2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
9 * Copyright (c) 2020 Philippe Mathieu-Daudé
11 * This library is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU Lesser General Public
13 * License as published by the Free Software Foundation; either
14 * version 2.1 of the License, or (at your option) any later version.
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Lesser General Public License for more details.
21 * You should have received a copy of the GNU Lesser General Public
22 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "qemu/osdep.h"
28 #include "tcg/tcg-op.h"
29 #include "exec/translator.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "semihosting/semihost.h"
36 #include "qemu/qemu-print.h"
37 #include "fpu_helper.h"
38 #include "translate.h"
41 * Many sysemu-only helpers are not reachable for user-only.
42 * Define stub generators here, so that we need not either sprinkle
43 * ifdefs through the translator, nor provide the helper function.
45 #define STUB_HELPER(NAME, ...) \
46 static inline void gen_helper_##NAME(__VA_ARGS__) \
47 { g_assert_not_reached(); }
49 #ifdef CONFIG_USER_ONLY
50 STUB_HELPER(cache
, TCGv_env env
, TCGv val
, TCGv_i32 reg
)
54 /* indirect opcode tables */
55 OPC_SPECIAL
= (0x00 << 26),
56 OPC_REGIMM
= (0x01 << 26),
57 OPC_CP0
= (0x10 << 26),
58 OPC_CP2
= (0x12 << 26),
59 OPC_CP3
= (0x13 << 26),
60 OPC_SPECIAL2
= (0x1C << 26),
61 OPC_SPECIAL3
= (0x1F << 26),
62 /* arithmetic with immediate */
63 OPC_ADDI
= (0x08 << 26),
64 OPC_ADDIU
= (0x09 << 26),
65 OPC_SLTI
= (0x0A << 26),
66 OPC_SLTIU
= (0x0B << 26),
67 /* logic with immediate */
68 OPC_ANDI
= (0x0C << 26),
69 OPC_ORI
= (0x0D << 26),
70 OPC_XORI
= (0x0E << 26),
71 OPC_LUI
= (0x0F << 26),
72 /* arithmetic with immediate */
73 OPC_DADDI
= (0x18 << 26),
74 OPC_DADDIU
= (0x19 << 26),
75 /* Jump and branches */
77 OPC_JAL
= (0x03 << 26),
78 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
79 OPC_BEQL
= (0x14 << 26),
80 OPC_BNE
= (0x05 << 26),
81 OPC_BNEL
= (0x15 << 26),
82 OPC_BLEZ
= (0x06 << 26),
83 OPC_BLEZL
= (0x16 << 26),
84 OPC_BGTZ
= (0x07 << 26),
85 OPC_BGTZL
= (0x17 << 26),
86 OPC_JALX
= (0x1D << 26),
87 OPC_DAUI
= (0x1D << 26),
89 OPC_LDL
= (0x1A << 26),
90 OPC_LDR
= (0x1B << 26),
91 OPC_LB
= (0x20 << 26),
92 OPC_LH
= (0x21 << 26),
93 OPC_LWL
= (0x22 << 26),
94 OPC_LW
= (0x23 << 26),
95 OPC_LWPC
= OPC_LW
| 0x5,
96 OPC_LBU
= (0x24 << 26),
97 OPC_LHU
= (0x25 << 26),
98 OPC_LWR
= (0x26 << 26),
99 OPC_LWU
= (0x27 << 26),
100 OPC_SB
= (0x28 << 26),
101 OPC_SH
= (0x29 << 26),
102 OPC_SWL
= (0x2A << 26),
103 OPC_SW
= (0x2B << 26),
104 OPC_SDL
= (0x2C << 26),
105 OPC_SDR
= (0x2D << 26),
106 OPC_SWR
= (0x2E << 26),
107 OPC_LL
= (0x30 << 26),
108 OPC_LLD
= (0x34 << 26),
109 OPC_LD
= (0x37 << 26),
110 OPC_LDPC
= OPC_LD
| 0x5,
111 OPC_SC
= (0x38 << 26),
112 OPC_SCD
= (0x3C << 26),
113 OPC_SD
= (0x3F << 26),
114 /* Floating point load/store */
115 OPC_LWC1
= (0x31 << 26),
116 OPC_LWC2
= (0x32 << 26),
117 OPC_LDC1
= (0x35 << 26),
118 OPC_LDC2
= (0x36 << 26),
119 OPC_SWC1
= (0x39 << 26),
120 OPC_SWC2
= (0x3A << 26),
121 OPC_SDC1
= (0x3D << 26),
122 OPC_SDC2
= (0x3E << 26),
123 /* Compact Branches */
124 OPC_BLEZALC
= (0x06 << 26),
125 OPC_BGEZALC
= (0x06 << 26),
126 OPC_BGEUC
= (0x06 << 26),
127 OPC_BGTZALC
= (0x07 << 26),
128 OPC_BLTZALC
= (0x07 << 26),
129 OPC_BLTUC
= (0x07 << 26),
130 OPC_BOVC
= (0x08 << 26),
131 OPC_BEQZALC
= (0x08 << 26),
132 OPC_BEQC
= (0x08 << 26),
133 OPC_BLEZC
= (0x16 << 26),
134 OPC_BGEZC
= (0x16 << 26),
135 OPC_BGEC
= (0x16 << 26),
136 OPC_BGTZC
= (0x17 << 26),
137 OPC_BLTZC
= (0x17 << 26),
138 OPC_BLTC
= (0x17 << 26),
139 OPC_BNVC
= (0x18 << 26),
140 OPC_BNEZALC
= (0x18 << 26),
141 OPC_BNEC
= (0x18 << 26),
142 OPC_BC
= (0x32 << 26),
143 OPC_BEQZC
= (0x36 << 26),
144 OPC_JIC
= (0x36 << 26),
145 OPC_BALC
= (0x3A << 26),
146 OPC_BNEZC
= (0x3E << 26),
147 OPC_JIALC
= (0x3E << 26),
148 /* MDMX ASE specific */
149 OPC_MDMX
= (0x1E << 26),
150 /* Cache and prefetch */
151 OPC_CACHE
= (0x2F << 26),
152 OPC_PREF
= (0x33 << 26),
153 /* PC-relative address computation / loads */
154 OPC_PCREL
= (0x3B << 26),
157 /* PC-relative address computation / loads */
158 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
159 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
161 /* Instructions determined by bits 19 and 20 */
162 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
163 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
164 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
166 /* Instructions determined by bits 16 ... 20 */
167 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
168 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
171 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
174 /* MIPS special opcodes */
175 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
179 OPC_SLL
= 0x00 | OPC_SPECIAL
,
180 /* NOP is SLL r0, r0, 0 */
181 /* SSNOP is SLL r0, r0, 1 */
182 /* EHB is SLL r0, r0, 3 */
183 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
184 OPC_ROTR
= OPC_SRL
| (1 << 21),
185 OPC_SRA
= 0x03 | OPC_SPECIAL
,
186 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
187 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
188 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
189 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
190 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
191 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
192 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
193 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
194 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
195 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
196 OPC_DROTR
= OPC_DSRL
| (1 << 21),
197 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
198 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
199 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
200 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
201 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
202 /* Multiplication / division */
203 OPC_MULT
= 0x18 | OPC_SPECIAL
,
204 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
205 OPC_DIV
= 0x1A | OPC_SPECIAL
,
206 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
207 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
208 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
209 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
210 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
212 /* 2 registers arithmetic / logic */
213 OPC_ADD
= 0x20 | OPC_SPECIAL
,
214 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
215 OPC_SUB
= 0x22 | OPC_SPECIAL
,
216 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
217 OPC_AND
= 0x24 | OPC_SPECIAL
,
218 OPC_OR
= 0x25 | OPC_SPECIAL
,
219 OPC_XOR
= 0x26 | OPC_SPECIAL
,
220 OPC_NOR
= 0x27 | OPC_SPECIAL
,
221 OPC_SLT
= 0x2A | OPC_SPECIAL
,
222 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
223 OPC_DADD
= 0x2C | OPC_SPECIAL
,
224 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
225 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
226 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
228 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
229 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
231 OPC_TGE
= 0x30 | OPC_SPECIAL
,
232 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
233 OPC_TLT
= 0x32 | OPC_SPECIAL
,
234 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
235 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
236 OPC_TNE
= 0x36 | OPC_SPECIAL
,
237 /* HI / LO registers load & stores */
238 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
239 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
240 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
241 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
242 /* Conditional moves */
243 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
244 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
246 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
247 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
249 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
252 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
253 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
254 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
255 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
256 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
258 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
259 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
260 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
261 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
265 * R6 Multiply and Divide instructions have the same opcode
266 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
268 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
271 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
272 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
273 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
274 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
275 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
276 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
277 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
278 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
280 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
281 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
282 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
283 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
284 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
285 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
286 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
287 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
289 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
290 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
291 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
292 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
293 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
296 /* REGIMM (rt field) opcodes */
297 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
300 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
301 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
302 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
303 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
304 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
305 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
306 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
307 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
308 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
309 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
310 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
311 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
312 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
313 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
314 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
315 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
317 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
318 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
321 /* Special2 opcodes */
322 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
325 /* Multiply & xxx operations */
326 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
327 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
328 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
329 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
330 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
332 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
333 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
334 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
335 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
336 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
337 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
338 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
339 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
340 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
341 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
342 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
343 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
345 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
346 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
347 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
348 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
350 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
353 /* Special3 opcodes */
354 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
357 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
358 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
359 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
360 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
361 OPC_INS
= 0x04 | OPC_SPECIAL3
,
362 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
363 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
364 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
365 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
366 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
367 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
368 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
369 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
370 OPC_GINV
= 0x3D | OPC_SPECIAL3
,
373 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
374 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
375 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
376 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
377 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
378 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
379 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
380 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
381 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
382 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
383 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
384 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
387 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
388 /* MIPS DSP Arithmetic */
389 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
390 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
391 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
392 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
393 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
394 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
395 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
396 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
397 /* MIPS DSP GPR-Based Shift Sub-class */
398 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
399 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
400 /* MIPS DSP Multiply Sub-class insns */
401 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
402 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
403 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
404 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
405 /* DSP Bit/Manipulation Sub-class */
406 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
407 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
408 /* MIPS DSP Append Sub-class */
409 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
410 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
411 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
412 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
413 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
416 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
417 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
418 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
419 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
420 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
421 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
422 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
423 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
424 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
425 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
426 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
427 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
428 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
429 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
430 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
431 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
434 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
435 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
436 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
437 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
438 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
439 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
442 /* Loongson EXT load/store quad word opcodes */
443 #define MASK_LOONGSON_GSLSQ(op) (MASK_OP_MAJOR(op) | (op & 0x8020))
445 OPC_GSLQ
= 0x0020 | OPC_LWC2
,
446 OPC_GSLQC1
= 0x8020 | OPC_LWC2
,
447 OPC_GSSHFL
= OPC_LWC2
,
448 OPC_GSSQ
= 0x0020 | OPC_SWC2
,
449 OPC_GSSQC1
= 0x8020 | OPC_SWC2
,
450 OPC_GSSHFS
= OPC_SWC2
,
453 /* Loongson EXT shifted load/store opcodes */
454 #define MASK_LOONGSON_GSSHFLS(op) (MASK_OP_MAJOR(op) | (op & 0xc03f))
456 OPC_GSLWLC1
= 0x4 | OPC_GSSHFL
,
457 OPC_GSLWRC1
= 0x5 | OPC_GSSHFL
,
458 OPC_GSLDLC1
= 0x6 | OPC_GSSHFL
,
459 OPC_GSLDRC1
= 0x7 | OPC_GSSHFL
,
460 OPC_GSSWLC1
= 0x4 | OPC_GSSHFS
,
461 OPC_GSSWRC1
= 0x5 | OPC_GSSHFS
,
462 OPC_GSSDLC1
= 0x6 | OPC_GSSHFS
,
463 OPC_GSSDRC1
= 0x7 | OPC_GSSHFS
,
466 /* Loongson EXT LDC2/SDC2 opcodes */
467 #define MASK_LOONGSON_LSDC2(op) (MASK_OP_MAJOR(op) | (op & 0x7))
470 OPC_GSLBX
= 0x0 | OPC_LDC2
,
471 OPC_GSLHX
= 0x1 | OPC_LDC2
,
472 OPC_GSLWX
= 0x2 | OPC_LDC2
,
473 OPC_GSLDX
= 0x3 | OPC_LDC2
,
474 OPC_GSLWXC1
= 0x6 | OPC_LDC2
,
475 OPC_GSLDXC1
= 0x7 | OPC_LDC2
,
476 OPC_GSSBX
= 0x0 | OPC_SDC2
,
477 OPC_GSSHX
= 0x1 | OPC_SDC2
,
478 OPC_GSSWX
= 0x2 | OPC_SDC2
,
479 OPC_GSSDX
= 0x3 | OPC_SDC2
,
480 OPC_GSSWXC1
= 0x6 | OPC_SDC2
,
481 OPC_GSSDXC1
= 0x7 | OPC_SDC2
,
485 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
488 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
489 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
490 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
491 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
492 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
493 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
494 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
495 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
499 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
502 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
503 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
504 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
505 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
506 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
507 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
508 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
509 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
510 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
511 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
512 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
515 /* MIPS DSP REGIMM opcodes */
517 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
518 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
521 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
524 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
525 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
526 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
527 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
530 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
532 /* MIPS DSP Arithmetic Sub-class */
533 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
534 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
535 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
536 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
537 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
538 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
539 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
540 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
541 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
542 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
543 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
544 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
545 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
546 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
547 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
548 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
549 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
550 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
553 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
554 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
555 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
556 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
557 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
560 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
561 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
563 /* MIPS DSP Arithmetic Sub-class */
564 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
565 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
566 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
567 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
568 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
569 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
570 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
571 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
572 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
573 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
574 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
575 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
576 /* MIPS DSP Multiply Sub-class insns */
577 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
578 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
579 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
580 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
583 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
585 /* MIPS DSP Arithmetic Sub-class */
586 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
587 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
588 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
589 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
590 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
591 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
592 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
593 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
594 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
595 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
596 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
597 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
598 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
599 /* DSP Bit/Manipulation Sub-class */
600 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
601 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
602 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
603 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
604 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
607 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Arithmetic Sub-class */
610 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
611 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
612 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
613 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
614 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
615 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
616 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
617 /* DSP Compare-Pick Sub-class */
618 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
619 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
620 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
621 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
622 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
623 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
624 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
625 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
626 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
627 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
628 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
629 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
630 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
631 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
632 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
635 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* MIPS DSP GPR-Based Shift Sub-class */
638 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
639 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
640 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
641 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
642 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
643 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
644 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
645 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
646 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
647 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
648 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
649 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
650 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
651 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
652 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
653 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
654 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
655 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
656 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
657 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
658 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
659 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
662 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Multiply Sub-class insns */
665 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
666 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
667 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
668 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
669 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
670 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
671 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
672 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
673 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
674 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
675 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
676 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
677 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
678 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
679 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
680 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
681 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
682 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
683 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
684 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
685 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
686 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
689 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
691 /* DSP Bit/Manipulation Sub-class */
692 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
695 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
697 /* MIPS DSP Append Sub-class */
698 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
699 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
700 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
703 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
705 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
706 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
707 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
708 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
709 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
710 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
711 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
712 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
713 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
714 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
715 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
716 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
717 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
718 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
719 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
720 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
721 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
722 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
725 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
727 /* MIPS DSP Arithmetic Sub-class */
728 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
729 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
730 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
731 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
732 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
733 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
734 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
735 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
736 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
737 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
738 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
739 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
740 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
741 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
742 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
743 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
744 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
745 /* DSP Bit/Manipulation Sub-class */
746 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
747 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
748 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
749 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
750 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
751 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
754 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
756 /* MIPS DSP Multiply Sub-class insns */
757 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
758 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
759 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
760 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
762 /* MIPS DSP Arithmetic Sub-class */
763 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
764 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
765 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
766 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
767 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
768 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
769 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
770 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
771 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
772 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
773 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
774 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
775 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
776 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
777 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
778 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
779 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
780 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
781 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
782 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
783 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
786 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Compare-Pick Sub-class */
789 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
795 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
796 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
797 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
798 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
799 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
800 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
801 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
802 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
803 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
804 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
805 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
806 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
807 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
808 /* MIPS DSP Arithmetic Sub-class */
809 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
810 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
811 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
812 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
813 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
814 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
815 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
816 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
819 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
821 /* DSP Append Sub-class */
822 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
823 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
824 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
825 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
828 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
830 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
831 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
832 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
833 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
834 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
835 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
836 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
837 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
838 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
839 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
840 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
841 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
842 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
843 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
844 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
845 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
846 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
847 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
848 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
849 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
850 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
851 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
854 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
856 /* DSP Bit/Manipulation Sub-class */
857 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
860 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
862 /* MIPS DSP Multiply Sub-class insns */
863 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
867 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
868 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
869 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
870 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
871 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
872 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
873 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
874 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
875 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
876 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
877 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
878 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
879 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
880 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
881 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
882 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
883 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
884 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
885 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
886 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
887 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
888 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
891 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
893 /* MIPS DSP GPR-Based Shift Sub-class */
894 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
898 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
899 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
900 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
901 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
902 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
903 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
904 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
905 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
906 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
907 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
908 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
909 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
910 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
911 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
912 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
913 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
914 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
915 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
916 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
917 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
918 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
919 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
922 /* Coprocessor 0 (rs field) */
923 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
926 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
927 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
928 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
929 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
930 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
931 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
932 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
933 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
934 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
935 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
936 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
937 OPC_C0
= (0x10 << 21) | OPC_CP0
,
938 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
939 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
940 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
941 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
942 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
943 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
944 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
945 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
946 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
947 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
948 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
949 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
950 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
951 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
952 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
956 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
959 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
960 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
961 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
962 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
963 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
964 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
965 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
966 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
969 /* Coprocessor 0 (with rs == C0) */
970 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
973 OPC_TLBR
= 0x01 | OPC_C0
,
974 OPC_TLBWI
= 0x02 | OPC_C0
,
975 OPC_TLBINV
= 0x03 | OPC_C0
,
976 OPC_TLBINVF
= 0x04 | OPC_C0
,
977 OPC_TLBWR
= 0x06 | OPC_C0
,
978 OPC_TLBP
= 0x08 | OPC_C0
,
979 OPC_RFE
= 0x10 | OPC_C0
,
980 OPC_ERET
= 0x18 | OPC_C0
,
981 OPC_DERET
= 0x1F | OPC_C0
,
982 OPC_WAIT
= 0x20 | OPC_C0
,
985 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
988 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
989 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
990 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
991 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
992 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
993 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
994 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
995 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
996 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
997 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
998 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1001 #define MASK_LMMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1004 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1023 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1026 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1027 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1028 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1029 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1031 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1098 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1101 OPC_LWXC1
= 0x00 | OPC_CP3
,
1102 OPC_LDXC1
= 0x01 | OPC_CP3
,
1103 OPC_LUXC1
= 0x05 | OPC_CP3
,
1104 OPC_SWXC1
= 0x08 | OPC_CP3
,
1105 OPC_SDXC1
= 0x09 | OPC_CP3
,
1106 OPC_SUXC1
= 0x0D | OPC_CP3
,
1107 OPC_PREFX
= 0x0F | OPC_CP3
,
1108 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1109 OPC_MADD_S
= 0x20 | OPC_CP3
,
1110 OPC_MADD_D
= 0x21 | OPC_CP3
,
1111 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1112 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1113 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1114 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1115 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1116 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1117 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1118 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1119 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1120 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1124 * MMI (MultiMedia Instruction) encodings
1125 * ======================================
1127 * MMI instructions encoding table keys:
1129 * * This code is reserved for future use. An attempt to execute it
1130 * causes a Reserved Instruction exception.
1131 * % This code indicates an instruction class. The instruction word
1132 * must be further decoded by examining additional tables that show
1133 * the values for other instruction fields.
1134 * # This code is reserved for the unsupported instructions DMULT,
1135 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
1136 * to execute it causes a Reserved Instruction exception.
1138 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
1141 * +--------+----------------------------------------+
1143 * +--------+----------------------------------------+
1145 * opcode bits 28..26
1146 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1147 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1148 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1149 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
1150 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
1151 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
1152 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
1153 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
1154 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
1155 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
1156 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
1160 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
1161 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
1165 * MMI instructions with opcode field = MMI:
1168 * +--------+-------------------------------+--------+
1169 * | MMI | |function|
1170 * +--------+-------------------------------+--------+
1172 * function bits 2..0
1173 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1174 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1175 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1176 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
1177 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
1178 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
1179 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
1180 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
1181 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
1182 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
1183 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
1186 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
1188 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
1189 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
1190 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
1191 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
1192 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
1193 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
1194 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
1195 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
1198 /* global register indices */
1199 TCGv cpu_gpr
[32], cpu_PC
;
1201 * For CPUs using 128-bit GPR registers, we put the lower halves in cpu_gpr[])
1202 * and the upper halves in cpu_gpr_hi[].
1204 TCGv_i64 cpu_gpr_hi
[32];
1205 TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1206 static TCGv cpu_dspctrl
, btarget
;
1208 static TCGv cpu_lladdr
, cpu_llval
;
1209 static TCGv_i32 hflags
;
1210 TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1211 TCGv_i64 fpu_f64
[32];
1213 #include "exec/gen-icount.h"
1215 static const char regnames_HI
[][4] = {
1216 "HI0", "HI1", "HI2", "HI3",
1219 static const char regnames_LO
[][4] = {
1220 "LO0", "LO1", "LO2", "LO3",
1223 /* General purpose registers moves. */
1224 void gen_load_gpr(TCGv t
, int reg
)
1227 tcg_gen_movi_tl(t
, 0);
1229 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1233 void gen_store_gpr(TCGv t
, int reg
)
1236 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1240 #if defined(TARGET_MIPS64)
1241 void gen_load_gpr_hi(TCGv_i64 t
, int reg
)
1244 tcg_gen_movi_i64(t
, 0);
1246 tcg_gen_mov_i64(t
, cpu_gpr_hi
[reg
]);
1250 void gen_store_gpr_hi(TCGv_i64 t
, int reg
)
1253 tcg_gen_mov_i64(cpu_gpr_hi
[reg
], t
);
1256 #endif /* TARGET_MIPS64 */
1258 /* Moves to/from shadow registers. */
1259 static inline void gen_load_srsgpr(int from
, int to
)
1261 TCGv t0
= tcg_temp_new();
1264 tcg_gen_movi_tl(t0
, 0);
1266 TCGv_i32 t2
= tcg_temp_new_i32();
1267 TCGv_ptr addr
= tcg_temp_new_ptr();
1269 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1270 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1271 tcg_gen_andi_i32(t2
, t2
, 0xf);
1272 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1273 tcg_gen_ext_i32_ptr(addr
, t2
);
1274 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1276 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1277 tcg_temp_free_ptr(addr
);
1278 tcg_temp_free_i32(t2
);
1280 gen_store_gpr(t0
, to
);
1284 static inline void gen_store_srsgpr(int from
, int to
)
1287 TCGv t0
= tcg_temp_new();
1288 TCGv_i32 t2
= tcg_temp_new_i32();
1289 TCGv_ptr addr
= tcg_temp_new_ptr();
1291 gen_load_gpr(t0
, from
);
1292 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1293 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1294 tcg_gen_andi_i32(t2
, t2
, 0xf);
1295 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1296 tcg_gen_ext_i32_ptr(addr
, t2
);
1297 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1299 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1300 tcg_temp_free_ptr(addr
);
1301 tcg_temp_free_i32(t2
);
1307 static inline void gen_save_pc(target_ulong pc
)
1309 tcg_gen_movi_tl(cpu_PC
, pc
);
1312 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1314 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1315 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1316 gen_save_pc(ctx
->base
.pc_next
);
1317 ctx
->saved_pc
= ctx
->base
.pc_next
;
1319 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1320 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1321 ctx
->saved_hflags
= ctx
->hflags
;
1322 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1328 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1334 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1336 ctx
->saved_hflags
= ctx
->hflags
;
1337 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1343 ctx
->btarget
= env
->btarget
;
1348 void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1350 save_cpu_state(ctx
, 1);
1351 gen_helper_raise_exception_err(cpu_env
, tcg_constant_i32(excp
),
1352 tcg_constant_i32(err
));
1353 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1356 void generate_exception(DisasContext
*ctx
, int excp
)
1358 gen_helper_raise_exception(cpu_env
, tcg_constant_i32(excp
));
1361 void generate_exception_end(DisasContext
*ctx
, int excp
)
1363 generate_exception_err(ctx
, excp
, 0);
1366 void generate_exception_break(DisasContext
*ctx
, int code
)
1368 #ifdef CONFIG_USER_ONLY
1369 /* Pass the break code along to cpu_loop. */
1370 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
1371 offsetof(CPUMIPSState
, error_code
));
1373 generate_exception_end(ctx
, EXCP_BREAK
);
1376 void gen_reserved_instruction(DisasContext
*ctx
)
1378 generate_exception_end(ctx
, EXCP_RI
);
1381 /* Floating point register moves. */
1382 void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1384 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1385 generate_exception(ctx
, EXCP_RI
);
1387 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1390 void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1393 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1394 generate_exception(ctx
, EXCP_RI
);
1396 t64
= tcg_temp_new_i64();
1397 tcg_gen_extu_i32_i64(t64
, t
);
1398 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1399 tcg_temp_free_i64(t64
);
1402 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1404 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1405 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1407 gen_load_fpr32(ctx
, t
, reg
| 1);
1411 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1413 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1414 TCGv_i64 t64
= tcg_temp_new_i64();
1415 tcg_gen_extu_i32_i64(t64
, t
);
1416 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1417 tcg_temp_free_i64(t64
);
1419 gen_store_fpr32(ctx
, t
, reg
| 1);
1423 void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1425 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1426 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1428 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1432 void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1434 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1435 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1438 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1439 t0
= tcg_temp_new_i64();
1440 tcg_gen_shri_i64(t0
, t
, 32);
1441 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1442 tcg_temp_free_i64(t0
);
1446 int get_fp_bit(int cc
)
1455 /* Addresses computation */
1456 void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1458 tcg_gen_add_tl(ret
, arg0
, arg1
);
1460 #if defined(TARGET_MIPS64)
1461 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1462 tcg_gen_ext32s_i64(ret
, ret
);
1467 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
1470 tcg_gen_addi_tl(ret
, base
, ofs
);
1472 #if defined(TARGET_MIPS64)
1473 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1474 tcg_gen_ext32s_i64(ret
, ret
);
1479 /* Addresses computation (translation time) */
1480 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1483 target_long sum
= base
+ offset
;
1485 #if defined(TARGET_MIPS64)
1486 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1493 /* Sign-extract the low 32-bits to a target_long. */
1494 void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1496 #if defined(TARGET_MIPS64)
1497 tcg_gen_ext32s_i64(ret
, arg
);
1499 tcg_gen_extrl_i64_i32(ret
, arg
);
1503 /* Sign-extract the high 32-bits to a target_long. */
1504 void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1506 #if defined(TARGET_MIPS64)
1507 tcg_gen_sari_i64(ret
, arg
, 32);
1509 tcg_gen_extrh_i64_i32(ret
, arg
);
1513 bool check_cp0_enabled(DisasContext
*ctx
)
1515 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1516 generate_exception_end(ctx
, EXCP_CpU
);
1522 void check_cp1_enabled(DisasContext
*ctx
)
1524 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
1525 generate_exception_err(ctx
, EXCP_CpU
, 1);
1530 * Verify that the processor is running with COP1X instructions enabled.
1531 * This is associated with the nabla symbol in the MIPS32 and MIPS64
1534 void check_cop1x(DisasContext
*ctx
)
1536 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
1537 gen_reserved_instruction(ctx
);
1542 * Verify that the processor is running with 64-bit floating-point
1543 * operations enabled.
1545 void check_cp1_64bitmode(DisasContext
*ctx
)
1547 if (unlikely(~ctx
->hflags
& MIPS_HFLAG_F64
)) {
1548 gen_reserved_instruction(ctx
);
1553 * Verify if floating point register is valid; an operation is not defined
1554 * if bit 0 of any register specification is set and the FR bit in the
1555 * Status register equals zero, since the register numbers specify an
1556 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1557 * in the Status register equals one, both even and odd register numbers
1558 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1560 * Multiple 64 bit wide registers can be checked by calling
1561 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1563 void check_cp1_registers(DisasContext
*ctx
, int regs
)
1565 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
1566 gen_reserved_instruction(ctx
);
1571 * Verify that the processor is running with DSP instructions enabled.
1572 * This is enabled by CP0 Status register MX(24) bit.
1574 static inline void check_dsp(DisasContext
*ctx
)
1576 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1577 if (ctx
->insn_flags
& ASE_DSP
) {
1578 generate_exception_end(ctx
, EXCP_DSPDIS
);
1580 gen_reserved_instruction(ctx
);
1585 static inline void check_dsp_r2(DisasContext
*ctx
)
1587 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
1588 if (ctx
->insn_flags
& ASE_DSP
) {
1589 generate_exception_end(ctx
, EXCP_DSPDIS
);
1591 gen_reserved_instruction(ctx
);
1596 static inline void check_dsp_r3(DisasContext
*ctx
)
1598 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
1599 if (ctx
->insn_flags
& ASE_DSP
) {
1600 generate_exception_end(ctx
, EXCP_DSPDIS
);
1602 gen_reserved_instruction(ctx
);
1608 * This code generates a "reserved instruction" exception if the
1609 * CPU does not support the instruction set corresponding to flags.
1611 void check_insn(DisasContext
*ctx
, uint64_t flags
)
1613 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1614 gen_reserved_instruction(ctx
);
1619 * This code generates a "reserved instruction" exception if the
1620 * CPU has corresponding flag set which indicates that the instruction
1623 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
1625 if (unlikely(ctx
->insn_flags
& flags
)) {
1626 gen_reserved_instruction(ctx
);
1631 * The Linux kernel traps certain reserved instruction exceptions to
1632 * emulate the corresponding instructions. QEMU is the kernel in user
1633 * mode, so those traps are emulated by accepting the instructions.
1635 * A reserved instruction exception is generated for flagged CPUs if
1636 * QEMU runs in system mode.
1638 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
1640 #ifndef CONFIG_USER_ONLY
1641 check_insn_opc_removed(ctx
, flags
);
1646 * This code generates a "reserved instruction" exception if the
1647 * CPU does not support 64-bit paired-single (PS) floating point data type.
1649 static inline void check_ps(DisasContext
*ctx
)
1651 if (unlikely(!ctx
->ps
)) {
1652 generate_exception(ctx
, EXCP_RI
);
1654 check_cp1_64bitmode(ctx
);
1658 * This code generates a "reserved instruction" exception if cpu is not
1659 * 64-bit or 64-bit instructions are not enabled.
1661 void check_mips_64(DisasContext
*ctx
)
1663 if (unlikely((TARGET_LONG_BITS
!= 64) || !(ctx
->hflags
& MIPS_HFLAG_64
))) {
1664 gen_reserved_instruction(ctx
);
1668 #ifndef CONFIG_USER_ONLY
1669 static inline void check_mvh(DisasContext
*ctx
)
1671 if (unlikely(!ctx
->mvh
)) {
1672 generate_exception(ctx
, EXCP_RI
);
1678 * This code generates a "reserved instruction" exception if the
1679 * Config5 XNP bit is set.
1681 static inline void check_xnp(DisasContext
*ctx
)
1683 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
1684 gen_reserved_instruction(ctx
);
1688 #ifndef CONFIG_USER_ONLY
1690 * This code generates a "reserved instruction" exception if the
1691 * Config3 PW bit is NOT set.
1693 static inline void check_pw(DisasContext
*ctx
)
1695 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
1696 gen_reserved_instruction(ctx
);
1702 * This code generates a "reserved instruction" exception if the
1703 * Config3 MT bit is NOT set.
1705 static inline void check_mt(DisasContext
*ctx
)
1707 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1708 gen_reserved_instruction(ctx
);
1712 #ifndef CONFIG_USER_ONLY
1714 * This code generates a "coprocessor unusable" exception if CP0 is not
1715 * available, and, if that is not the case, generates a "reserved instruction"
1716 * exception if the Config5 MT bit is NOT set. This is needed for availability
1717 * control of some of MT ASE instructions.
1719 static inline void check_cp0_mt(DisasContext
*ctx
)
1721 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1722 generate_exception_end(ctx
, EXCP_CpU
);
1724 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1725 gen_reserved_instruction(ctx
);
1732 * This code generates a "reserved instruction" exception if the
1733 * Config5 NMS bit is set.
1735 static inline void check_nms(DisasContext
*ctx
)
1737 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
1738 gen_reserved_instruction(ctx
);
1743 * This code generates a "reserved instruction" exception if the
1744 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
1745 * Config2 TL, and Config5 L2C are unset.
1747 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
1749 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
1750 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
1751 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
1752 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
1753 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
1754 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
1755 gen_reserved_instruction(ctx
);
1760 * This code generates a "reserved instruction" exception if the
1761 * Config5 EVA bit is NOT set.
1763 static inline void check_eva(DisasContext
*ctx
)
1765 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
1766 gen_reserved_instruction(ctx
);
1772 * Define small wrappers for gen_load_fpr* so that we have a uniform
1773 * calling interface for 32 and 64-bit FPRs. No sense in changing
1774 * all callers for gen_load_fpr32 when we need the CTX parameter for
1777 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1778 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1779 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1780 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1781 int ft, int fs, int cc) \
1783 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
1784 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
1793 check_cp1_registers(ctx, fs | ft); \
1801 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
1802 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
1805 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
1808 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
1811 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
1814 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
1817 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
1820 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
1823 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
1826 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
1829 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
1832 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
1835 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
1838 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
1841 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
1844 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
1847 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
1850 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
1855 tcg_temp_free_i##bits(fp0); \
1856 tcg_temp_free_i##bits(fp1); \
1859 FOP_CONDS(, 0, d
, FMT_D
, 64)
1860 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1861 FOP_CONDS(, 0, s
, FMT_S
, 32)
1862 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1863 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1864 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1867 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1868 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
1869 int ft, int fs, int fd) \
1871 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1872 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1873 if (ifmt == FMT_D) { \
1874 check_cp1_registers(ctx, fs | ft | fd); \
1876 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1877 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1880 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1883 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1886 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1889 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1892 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1895 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1898 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1901 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1904 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1907 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1910 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1913 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1916 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1919 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1922 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1925 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1928 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1931 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1934 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1937 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1940 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
1943 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
1949 tcg_temp_free_i ## bits(fp0); \
1950 tcg_temp_free_i ## bits(fp1); \
1953 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
1954 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
1956 #undef gen_ldcmp_fpr32
1957 #undef gen_ldcmp_fpr64
1959 /* load/store instructions. */
1960 #ifdef CONFIG_USER_ONLY
1961 #define OP_LD_ATOMIC(insn, fname) \
1962 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1963 DisasContext *ctx) \
1965 TCGv t0 = tcg_temp_new(); \
1966 tcg_gen_mov_tl(t0, arg1); \
1967 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1968 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1969 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1970 tcg_temp_free(t0); \
1973 #define OP_LD_ATOMIC(insn, fname) \
1974 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1975 DisasContext *ctx) \
1977 gen_helper_##insn(ret, cpu_env, arg1, tcg_constant_i32(mem_idx)); \
1980 OP_LD_ATOMIC(ll
, ld32s
);
1981 #if defined(TARGET_MIPS64)
1982 OP_LD_ATOMIC(lld
, ld64
);
1986 void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
, int base
, int offset
)
1989 tcg_gen_movi_tl(addr
, offset
);
1990 } else if (offset
== 0) {
1991 gen_load_gpr(addr
, base
);
1993 tcg_gen_movi_tl(addr
, offset
);
1994 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1998 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
2000 target_ulong pc
= ctx
->base
.pc_next
;
2002 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2003 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2008 pc
&= ~(target_ulong
)3;
2013 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2014 int rt
, int base
, int offset
)
2017 int mem_idx
= ctx
->mem_idx
;
2019 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
|
2022 * Loongson CPU uses a load to zero register for prefetch.
2023 * We emulate it as a NOP. On other CPU we must perform the
2024 * actual memory access.
2029 t0
= tcg_temp_new();
2030 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2033 #if defined(TARGET_MIPS64)
2035 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2036 ctx
->default_tcg_memop_mask
);
2037 gen_store_gpr(t0
, rt
);
2040 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
|
2041 ctx
->default_tcg_memop_mask
);
2042 gen_store_gpr(t0
, rt
);
2046 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2047 gen_store_gpr(t0
, rt
);
2050 t1
= tcg_temp_new();
2052 * Do a byte access to possibly trigger a page
2053 * fault with the unaligned address.
2055 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2056 tcg_gen_andi_tl(t1
, t0
, 7);
2057 if (!cpu_is_bigendian(ctx
)) {
2058 tcg_gen_xori_tl(t1
, t1
, 7);
2060 tcg_gen_shli_tl(t1
, t1
, 3);
2061 tcg_gen_andi_tl(t0
, t0
, ~7);
2062 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2063 tcg_gen_shl_tl(t0
, t0
, t1
);
2064 t2
= tcg_const_tl(-1);
2065 tcg_gen_shl_tl(t2
, t2
, t1
);
2066 gen_load_gpr(t1
, rt
);
2067 tcg_gen_andc_tl(t1
, t1
, t2
);
2069 tcg_gen_or_tl(t0
, t0
, t1
);
2071 gen_store_gpr(t0
, rt
);
2074 t1
= tcg_temp_new();
2076 * Do a byte access to possibly trigger a page
2077 * fault with the unaligned address.
2079 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2080 tcg_gen_andi_tl(t1
, t0
, 7);
2081 if (cpu_is_bigendian(ctx
)) {
2082 tcg_gen_xori_tl(t1
, t1
, 7);
2084 tcg_gen_shli_tl(t1
, t1
, 3);
2085 tcg_gen_andi_tl(t0
, t0
, ~7);
2086 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2087 tcg_gen_shr_tl(t0
, t0
, t1
);
2088 tcg_gen_xori_tl(t1
, t1
, 63);
2089 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2090 tcg_gen_shl_tl(t2
, t2
, t1
);
2091 gen_load_gpr(t1
, rt
);
2092 tcg_gen_and_tl(t1
, t1
, t2
);
2094 tcg_gen_or_tl(t0
, t0
, t1
);
2096 gen_store_gpr(t0
, rt
);
2099 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2100 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2102 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2103 gen_store_gpr(t0
, rt
);
2107 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2108 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2110 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2111 gen_store_gpr(t0
, rt
);
2114 mem_idx
= MIPS_HFLAG_UM
;
2117 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2118 ctx
->default_tcg_memop_mask
);
2119 gen_store_gpr(t0
, rt
);
2122 mem_idx
= MIPS_HFLAG_UM
;
2125 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2126 ctx
->default_tcg_memop_mask
);
2127 gen_store_gpr(t0
, rt
);
2130 mem_idx
= MIPS_HFLAG_UM
;
2133 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2134 ctx
->default_tcg_memop_mask
);
2135 gen_store_gpr(t0
, rt
);
2138 mem_idx
= MIPS_HFLAG_UM
;
2141 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2142 gen_store_gpr(t0
, rt
);
2145 mem_idx
= MIPS_HFLAG_UM
;
2148 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2149 gen_store_gpr(t0
, rt
);
2152 mem_idx
= MIPS_HFLAG_UM
;
2155 t1
= tcg_temp_new();
2157 * Do a byte access to possibly trigger a page
2158 * fault with the unaligned address.
2160 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2161 tcg_gen_andi_tl(t1
, t0
, 3);
2162 if (!cpu_is_bigendian(ctx
)) {
2163 tcg_gen_xori_tl(t1
, t1
, 3);
2165 tcg_gen_shli_tl(t1
, t1
, 3);
2166 tcg_gen_andi_tl(t0
, t0
, ~3);
2167 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2168 tcg_gen_shl_tl(t0
, t0
, t1
);
2169 t2
= tcg_const_tl(-1);
2170 tcg_gen_shl_tl(t2
, t2
, t1
);
2171 gen_load_gpr(t1
, rt
);
2172 tcg_gen_andc_tl(t1
, t1
, t2
);
2174 tcg_gen_or_tl(t0
, t0
, t1
);
2176 tcg_gen_ext32s_tl(t0
, t0
);
2177 gen_store_gpr(t0
, rt
);
2180 mem_idx
= MIPS_HFLAG_UM
;
2183 t1
= tcg_temp_new();
2185 * Do a byte access to possibly trigger a page
2186 * fault with the unaligned address.
2188 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2189 tcg_gen_andi_tl(t1
, t0
, 3);
2190 if (cpu_is_bigendian(ctx
)) {
2191 tcg_gen_xori_tl(t1
, t1
, 3);
2193 tcg_gen_shli_tl(t1
, t1
, 3);
2194 tcg_gen_andi_tl(t0
, t0
, ~3);
2195 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2196 tcg_gen_shr_tl(t0
, t0
, t1
);
2197 tcg_gen_xori_tl(t1
, t1
, 31);
2198 t2
= tcg_const_tl(0xfffffffeull
);
2199 tcg_gen_shl_tl(t2
, t2
, t1
);
2200 gen_load_gpr(t1
, rt
);
2201 tcg_gen_and_tl(t1
, t1
, t2
);
2203 tcg_gen_or_tl(t0
, t0
, t1
);
2205 tcg_gen_ext32s_tl(t0
, t0
);
2206 gen_store_gpr(t0
, rt
);
2209 mem_idx
= MIPS_HFLAG_UM
;
2213 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2214 gen_store_gpr(t0
, rt
);
2221 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
2222 int base
, int offset
)
2224 TCGv t0
= tcg_temp_new();
2225 TCGv t1
= tcg_temp_new();
2226 int mem_idx
= ctx
->mem_idx
;
2228 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2229 gen_load_gpr(t1
, rt
);
2231 #if defined(TARGET_MIPS64)
2233 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUQ
|
2234 ctx
->default_tcg_memop_mask
);
2237 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2240 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2244 mem_idx
= MIPS_HFLAG_UM
;
2247 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2248 ctx
->default_tcg_memop_mask
);
2251 mem_idx
= MIPS_HFLAG_UM
;
2254 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2255 ctx
->default_tcg_memop_mask
);
2258 mem_idx
= MIPS_HFLAG_UM
;
2261 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2264 mem_idx
= MIPS_HFLAG_UM
;
2267 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2270 mem_idx
= MIPS_HFLAG_UM
;
2273 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2281 /* Store conditional */
2282 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
2283 MemOp tcg_mo
, bool eva
)
2286 TCGLabel
*l1
= gen_new_label();
2287 TCGLabel
*done
= gen_new_label();
2289 t0
= tcg_temp_new();
2290 addr
= tcg_temp_new();
2291 /* compare the address against that of the preceding LL */
2292 gen_base_offset_addr(ctx
, addr
, base
, offset
);
2293 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
2294 tcg_temp_free(addr
);
2295 tcg_gen_movi_tl(t0
, 0);
2296 gen_store_gpr(t0
, rt
);
2300 /* generate cmpxchg */
2301 val
= tcg_temp_new();
2302 gen_load_gpr(val
, rt
);
2303 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
2304 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
2305 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
2306 gen_store_gpr(t0
, rt
);
2309 gen_set_label(done
);
2313 /* Load and store */
2314 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
2318 * Don't do NOP if destination is zero: we must perform the actual
2324 TCGv_i32 fp0
= tcg_temp_new_i32();
2325 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2326 ctx
->default_tcg_memop_mask
);
2327 gen_store_fpr32(ctx
, fp0
, ft
);
2328 tcg_temp_free_i32(fp0
);
2333 TCGv_i32 fp0
= tcg_temp_new_i32();
2334 gen_load_fpr32(ctx
, fp0
, ft
);
2335 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2336 ctx
->default_tcg_memop_mask
);
2337 tcg_temp_free_i32(fp0
);
2342 TCGv_i64 fp0
= tcg_temp_new_i64();
2343 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2344 ctx
->default_tcg_memop_mask
);
2345 gen_store_fpr64(ctx
, fp0
, ft
);
2346 tcg_temp_free_i64(fp0
);
2351 TCGv_i64 fp0
= tcg_temp_new_i64();
2352 gen_load_fpr64(ctx
, fp0
, ft
);
2353 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2354 ctx
->default_tcg_memop_mask
);
2355 tcg_temp_free_i64(fp0
);
2359 MIPS_INVAL("flt_ldst");
2360 gen_reserved_instruction(ctx
);
2365 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2366 int rs
, int16_t imm
)
2368 TCGv t0
= tcg_temp_new();
2370 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2371 check_cp1_enabled(ctx
);
2375 check_insn(ctx
, ISA_MIPS2
);
2378 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2379 gen_flt_ldst(ctx
, op
, rt
, t0
);
2382 generate_exception_err(ctx
, EXCP_CpU
, 1);
2387 /* Arithmetic with immediate operand */
2388 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2389 int rt
, int rs
, int imm
)
2391 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2393 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2395 * If no destination, treat it as a NOP.
2396 * For addi, we must generate the overflow exception when needed.
2403 TCGv t0
= tcg_temp_new();
2404 TCGv t1
= tcg_temp_new();
2405 TCGv t2
= tcg_temp_new();
2406 TCGLabel
*l1
= gen_new_label();
2408 gen_load_gpr(t1
, rs
);
2409 tcg_gen_addi_tl(t0
, t1
, uimm
);
2410 tcg_gen_ext32s_tl(t0
, t0
);
2412 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2413 tcg_gen_xori_tl(t2
, t0
, uimm
);
2414 tcg_gen_and_tl(t1
, t1
, t2
);
2416 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2418 /* operands of same sign, result different sign */
2419 generate_exception(ctx
, EXCP_OVERFLOW
);
2421 tcg_gen_ext32s_tl(t0
, t0
);
2422 gen_store_gpr(t0
, rt
);
2428 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2429 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2431 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2434 #if defined(TARGET_MIPS64)
2437 TCGv t0
= tcg_temp_new();
2438 TCGv t1
= tcg_temp_new();
2439 TCGv t2
= tcg_temp_new();
2440 TCGLabel
*l1
= gen_new_label();
2442 gen_load_gpr(t1
, rs
);
2443 tcg_gen_addi_tl(t0
, t1
, uimm
);
2445 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2446 tcg_gen_xori_tl(t2
, t0
, uimm
);
2447 tcg_gen_and_tl(t1
, t1
, t2
);
2449 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2451 /* operands of same sign, result different sign */
2452 generate_exception(ctx
, EXCP_OVERFLOW
);
2454 gen_store_gpr(t0
, rt
);
2460 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2462 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2469 /* Logic with immediate operand */
2470 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2471 int rt
, int rs
, int16_t imm
)
2476 /* If no destination, treat it as a NOP. */
2479 uimm
= (uint16_t)imm
;
2482 if (likely(rs
!= 0)) {
2483 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2485 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2490 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2492 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2496 if (likely(rs
!= 0)) {
2497 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2499 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2503 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS_R6
)) {
2505 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2506 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2508 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2517 /* Set on less than with immediate operand */
2518 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2519 int rt
, int rs
, int16_t imm
)
2521 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2525 /* If no destination, treat it as a NOP. */
2528 t0
= tcg_temp_new();
2529 gen_load_gpr(t0
, rs
);
2532 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2535 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2541 /* Shifts with immediate operand */
2542 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2543 int rt
, int rs
, int16_t imm
)
2545 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2549 /* If no destination, treat it as a NOP. */
2553 t0
= tcg_temp_new();
2554 gen_load_gpr(t0
, rs
);
2557 tcg_gen_shli_tl(t0
, t0
, uimm
);
2558 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2561 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2565 tcg_gen_ext32u_tl(t0
, t0
);
2566 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2568 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2573 TCGv_i32 t1
= tcg_temp_new_i32();
2575 tcg_gen_trunc_tl_i32(t1
, t0
);
2576 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2577 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2578 tcg_temp_free_i32(t1
);
2580 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2583 #if defined(TARGET_MIPS64)
2585 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2588 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2591 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2595 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2597 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2601 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2604 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2607 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2610 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2618 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2619 int rd
, int rs
, int rt
)
2621 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2622 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2624 * If no destination, treat it as a NOP.
2625 * For add & sub, we must generate the overflow exception when needed.
2633 TCGv t0
= tcg_temp_new();
2634 TCGv t1
= tcg_temp_new();
2635 TCGv t2
= tcg_temp_new();
2636 TCGLabel
*l1
= gen_new_label();
2638 gen_load_gpr(t1
, rs
);
2639 gen_load_gpr(t2
, rt
);
2640 tcg_gen_add_tl(t0
, t1
, t2
);
2641 tcg_gen_ext32s_tl(t0
, t0
);
2642 tcg_gen_xor_tl(t1
, t1
, t2
);
2643 tcg_gen_xor_tl(t2
, t0
, t2
);
2644 tcg_gen_andc_tl(t1
, t2
, t1
);
2646 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2648 /* operands of same sign, result different sign */
2649 generate_exception(ctx
, EXCP_OVERFLOW
);
2651 gen_store_gpr(t0
, rd
);
2656 if (rs
!= 0 && rt
!= 0) {
2657 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2658 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2659 } else if (rs
== 0 && rt
!= 0) {
2660 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2661 } else if (rs
!= 0 && rt
== 0) {
2662 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2664 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2669 TCGv t0
= tcg_temp_new();
2670 TCGv t1
= tcg_temp_new();
2671 TCGv t2
= tcg_temp_new();
2672 TCGLabel
*l1
= gen_new_label();
2674 gen_load_gpr(t1
, rs
);
2675 gen_load_gpr(t2
, rt
);
2676 tcg_gen_sub_tl(t0
, t1
, t2
);
2677 tcg_gen_ext32s_tl(t0
, t0
);
2678 tcg_gen_xor_tl(t2
, t1
, t2
);
2679 tcg_gen_xor_tl(t1
, t0
, t1
);
2680 tcg_gen_and_tl(t1
, t1
, t2
);
2682 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2685 * operands of different sign, first operand and the result
2688 generate_exception(ctx
, EXCP_OVERFLOW
);
2690 gen_store_gpr(t0
, rd
);
2695 if (rs
!= 0 && rt
!= 0) {
2696 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2697 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2698 } else if (rs
== 0 && rt
!= 0) {
2699 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2700 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2701 } else if (rs
!= 0 && rt
== 0) {
2702 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2704 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2707 #if defined(TARGET_MIPS64)
2710 TCGv t0
= tcg_temp_new();
2711 TCGv t1
= tcg_temp_new();
2712 TCGv t2
= tcg_temp_new();
2713 TCGLabel
*l1
= gen_new_label();
2715 gen_load_gpr(t1
, rs
);
2716 gen_load_gpr(t2
, rt
);
2717 tcg_gen_add_tl(t0
, t1
, t2
);
2718 tcg_gen_xor_tl(t1
, t1
, t2
);
2719 tcg_gen_xor_tl(t2
, t0
, t2
);
2720 tcg_gen_andc_tl(t1
, t2
, t1
);
2722 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2724 /* operands of same sign, result different sign */
2725 generate_exception(ctx
, EXCP_OVERFLOW
);
2727 gen_store_gpr(t0
, rd
);
2732 if (rs
!= 0 && rt
!= 0) {
2733 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2734 } else if (rs
== 0 && rt
!= 0) {
2735 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2736 } else if (rs
!= 0 && rt
== 0) {
2737 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2739 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2744 TCGv t0
= tcg_temp_new();
2745 TCGv t1
= tcg_temp_new();
2746 TCGv t2
= tcg_temp_new();
2747 TCGLabel
*l1
= gen_new_label();
2749 gen_load_gpr(t1
, rs
);
2750 gen_load_gpr(t2
, rt
);
2751 tcg_gen_sub_tl(t0
, t1
, t2
);
2752 tcg_gen_xor_tl(t2
, t1
, t2
);
2753 tcg_gen_xor_tl(t1
, t0
, t1
);
2754 tcg_gen_and_tl(t1
, t1
, t2
);
2756 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2759 * Operands of different sign, first operand and result different
2762 generate_exception(ctx
, EXCP_OVERFLOW
);
2764 gen_store_gpr(t0
, rd
);
2769 if (rs
!= 0 && rt
!= 0) {
2770 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2771 } else if (rs
== 0 && rt
!= 0) {
2772 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2773 } else if (rs
!= 0 && rt
== 0) {
2774 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2776 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2781 if (likely(rs
!= 0 && rt
!= 0)) {
2782 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2783 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2785 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2791 /* Conditional move */
2792 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2793 int rd
, int rs
, int rt
)
2798 /* If no destination, treat it as a NOP. */
2802 t0
= tcg_temp_new();
2803 gen_load_gpr(t0
, rt
);
2804 t1
= tcg_const_tl(0);
2805 t2
= tcg_temp_new();
2806 gen_load_gpr(t2
, rs
);
2809 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2812 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2815 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2818 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2827 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2828 int rd
, int rs
, int rt
)
2831 /* If no destination, treat it as a NOP. */
2837 if (likely(rs
!= 0 && rt
!= 0)) {
2838 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2840 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2844 if (rs
!= 0 && rt
!= 0) {
2845 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2846 } else if (rs
== 0 && rt
!= 0) {
2847 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2848 } else if (rs
!= 0 && rt
== 0) {
2849 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2851 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2855 if (likely(rs
!= 0 && rt
!= 0)) {
2856 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2857 } else if (rs
== 0 && rt
!= 0) {
2858 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2859 } else if (rs
!= 0 && rt
== 0) {
2860 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2862 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2866 if (likely(rs
!= 0 && rt
!= 0)) {
2867 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2868 } else if (rs
== 0 && rt
!= 0) {
2869 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2870 } else if (rs
!= 0 && rt
== 0) {
2871 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2873 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2879 /* Set on lower than */
2880 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2881 int rd
, int rs
, int rt
)
2886 /* If no destination, treat it as a NOP. */
2890 t0
= tcg_temp_new();
2891 t1
= tcg_temp_new();
2892 gen_load_gpr(t0
, rs
);
2893 gen_load_gpr(t1
, rt
);
2896 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2899 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2907 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2908 int rd
, int rs
, int rt
)
2914 * If no destination, treat it as a NOP.
2915 * For add & sub, we must generate the overflow exception when needed.
2920 t0
= tcg_temp_new();
2921 t1
= tcg_temp_new();
2922 gen_load_gpr(t0
, rs
);
2923 gen_load_gpr(t1
, rt
);
2926 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2927 tcg_gen_shl_tl(t0
, t1
, t0
);
2928 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2931 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2932 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2935 tcg_gen_ext32u_tl(t1
, t1
);
2936 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2937 tcg_gen_shr_tl(t0
, t1
, t0
);
2938 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2942 TCGv_i32 t2
= tcg_temp_new_i32();
2943 TCGv_i32 t3
= tcg_temp_new_i32();
2945 tcg_gen_trunc_tl_i32(t2
, t0
);
2946 tcg_gen_trunc_tl_i32(t3
, t1
);
2947 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2948 tcg_gen_rotr_i32(t2
, t3
, t2
);
2949 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2950 tcg_temp_free_i32(t2
);
2951 tcg_temp_free_i32(t3
);
2954 #if defined(TARGET_MIPS64)
2956 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2957 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2960 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2961 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2964 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2965 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2968 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2969 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2977 /* Arithmetic on HI/LO registers */
2978 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
2980 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2991 #if defined(TARGET_MIPS64)
2993 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
2997 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3001 #if defined(TARGET_MIPS64)
3003 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3007 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3012 #if defined(TARGET_MIPS64)
3014 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3018 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3021 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3026 #if defined(TARGET_MIPS64)
3028 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3032 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3035 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3041 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3044 TCGv t0
= tcg_const_tl(addr
);
3045 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3046 gen_store_gpr(t0
, reg
);
3050 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3056 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3059 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3060 addr
= addr_add(ctx
, pc
, offset
);
3061 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3065 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3066 addr
= addr_add(ctx
, pc
, offset
);
3067 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3069 #if defined(TARGET_MIPS64)
3072 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3073 addr
= addr_add(ctx
, pc
, offset
);
3074 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3078 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3081 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3082 addr
= addr_add(ctx
, pc
, offset
);
3083 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3088 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3089 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3090 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3093 #if defined(TARGET_MIPS64)
3094 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3095 case R6_OPC_LDPC
+ (1 << 16):
3096 case R6_OPC_LDPC
+ (2 << 16):
3097 case R6_OPC_LDPC
+ (3 << 16):
3099 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3100 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3101 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUQ
);
3105 MIPS_INVAL("OPC_PCREL");
3106 gen_reserved_instruction(ctx
);
3113 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3122 t0
= tcg_temp_new();
3123 t1
= tcg_temp_new();
3125 gen_load_gpr(t0
, rs
);
3126 gen_load_gpr(t1
, rt
);
3131 TCGv t2
= tcg_temp_new();
3132 TCGv t3
= tcg_temp_new();
3133 tcg_gen_ext32s_tl(t0
, t0
);
3134 tcg_gen_ext32s_tl(t1
, t1
);
3135 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3136 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3137 tcg_gen_and_tl(t2
, t2
, t3
);
3138 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3139 tcg_gen_or_tl(t2
, t2
, t3
);
3140 tcg_gen_movi_tl(t3
, 0);
3141 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3142 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3143 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3150 TCGv t2
= tcg_temp_new();
3151 TCGv t3
= tcg_temp_new();
3152 tcg_gen_ext32s_tl(t0
, t0
);
3153 tcg_gen_ext32s_tl(t1
, t1
);
3154 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3155 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3156 tcg_gen_and_tl(t2
, t2
, t3
);
3157 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3158 tcg_gen_or_tl(t2
, t2
, t3
);
3159 tcg_gen_movi_tl(t3
, 0);
3160 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3161 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3162 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3169 TCGv t2
= tcg_const_tl(0);
3170 TCGv t3
= tcg_const_tl(1);
3171 tcg_gen_ext32u_tl(t0
, t0
);
3172 tcg_gen_ext32u_tl(t1
, t1
);
3173 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3174 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3175 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3182 TCGv t2
= tcg_const_tl(0);
3183 TCGv t3
= tcg_const_tl(1);
3184 tcg_gen_ext32u_tl(t0
, t0
);
3185 tcg_gen_ext32u_tl(t1
, t1
);
3186 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3187 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3188 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3195 TCGv_i32 t2
= tcg_temp_new_i32();
3196 TCGv_i32 t3
= tcg_temp_new_i32();
3197 tcg_gen_trunc_tl_i32(t2
, t0
);
3198 tcg_gen_trunc_tl_i32(t3
, t1
);
3199 tcg_gen_mul_i32(t2
, t2
, t3
);
3200 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3201 tcg_temp_free_i32(t2
);
3202 tcg_temp_free_i32(t3
);
3207 TCGv_i32 t2
= tcg_temp_new_i32();
3208 TCGv_i32 t3
= tcg_temp_new_i32();
3209 tcg_gen_trunc_tl_i32(t2
, t0
);
3210 tcg_gen_trunc_tl_i32(t3
, t1
);
3211 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3212 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3213 tcg_temp_free_i32(t2
);
3214 tcg_temp_free_i32(t3
);
3219 TCGv_i32 t2
= tcg_temp_new_i32();
3220 TCGv_i32 t3
= tcg_temp_new_i32();
3221 tcg_gen_trunc_tl_i32(t2
, t0
);
3222 tcg_gen_trunc_tl_i32(t3
, t1
);
3223 tcg_gen_mul_i32(t2
, t2
, t3
);
3224 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3225 tcg_temp_free_i32(t2
);
3226 tcg_temp_free_i32(t3
);
3231 TCGv_i32 t2
= tcg_temp_new_i32();
3232 TCGv_i32 t3
= tcg_temp_new_i32();
3233 tcg_gen_trunc_tl_i32(t2
, t0
);
3234 tcg_gen_trunc_tl_i32(t3
, t1
);
3235 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3236 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3237 tcg_temp_free_i32(t2
);
3238 tcg_temp_free_i32(t3
);
3241 #if defined(TARGET_MIPS64)
3244 TCGv t2
= tcg_temp_new();
3245 TCGv t3
= tcg_temp_new();
3246 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3247 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3248 tcg_gen_and_tl(t2
, t2
, t3
);
3249 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3250 tcg_gen_or_tl(t2
, t2
, t3
);
3251 tcg_gen_movi_tl(t3
, 0);
3252 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3253 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3260 TCGv t2
= tcg_temp_new();
3261 TCGv t3
= tcg_temp_new();
3262 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3263 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3264 tcg_gen_and_tl(t2
, t2
, t3
);
3265 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3266 tcg_gen_or_tl(t2
, t2
, t3
);
3267 tcg_gen_movi_tl(t3
, 0);
3268 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3269 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3276 TCGv t2
= tcg_const_tl(0);
3277 TCGv t3
= tcg_const_tl(1);
3278 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3279 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3286 TCGv t2
= tcg_const_tl(0);
3287 TCGv t3
= tcg_const_tl(1);
3288 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3289 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3295 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3299 TCGv t2
= tcg_temp_new();
3300 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3305 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3309 TCGv t2
= tcg_temp_new();
3310 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3316 MIPS_INVAL("r6 mul/div");
3317 gen_reserved_instruction(ctx
);
3325 #if defined(TARGET_MIPS64)
3326 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
3330 t0
= tcg_temp_new();
3331 t1
= tcg_temp_new();
3333 gen_load_gpr(t0
, rs
);
3334 gen_load_gpr(t1
, rt
);
3339 TCGv t2
= tcg_temp_new();
3340 TCGv t3
= tcg_temp_new();
3341 tcg_gen_ext32s_tl(t0
, t0
);
3342 tcg_gen_ext32s_tl(t1
, t1
);
3343 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3344 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3345 tcg_gen_and_tl(t2
, t2
, t3
);
3346 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3347 tcg_gen_or_tl(t2
, t2
, t3
);
3348 tcg_gen_movi_tl(t3
, 0);
3349 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3350 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
3351 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
3352 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3353 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3360 TCGv t2
= tcg_const_tl(0);
3361 TCGv t3
= tcg_const_tl(1);
3362 tcg_gen_ext32u_tl(t0
, t0
);
3363 tcg_gen_ext32u_tl(t1
, t1
);
3364 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3365 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
3366 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
3367 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3368 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3374 MIPS_INVAL("div1 TX79");
3375 gen_reserved_instruction(ctx
);
3384 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3385 int acc
, int rs
, int rt
)
3389 t0
= tcg_temp_new();
3390 t1
= tcg_temp_new();
3392 gen_load_gpr(t0
, rs
);
3393 gen_load_gpr(t1
, rt
);
3402 TCGv t2
= tcg_temp_new();
3403 TCGv t3
= tcg_temp_new();
3404 tcg_gen_ext32s_tl(t0
, t0
);
3405 tcg_gen_ext32s_tl(t1
, t1
);
3406 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3407 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3408 tcg_gen_and_tl(t2
, t2
, t3
);
3409 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3410 tcg_gen_or_tl(t2
, t2
, t3
);
3411 tcg_gen_movi_tl(t3
, 0);
3412 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3413 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3414 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3415 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3416 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3423 TCGv t2
= tcg_const_tl(0);
3424 TCGv t3
= tcg_const_tl(1);
3425 tcg_gen_ext32u_tl(t0
, t0
);
3426 tcg_gen_ext32u_tl(t1
, t1
);
3427 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3428 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3429 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3430 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3431 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3438 TCGv_i32 t2
= tcg_temp_new_i32();
3439 TCGv_i32 t3
= tcg_temp_new_i32();
3440 tcg_gen_trunc_tl_i32(t2
, t0
);
3441 tcg_gen_trunc_tl_i32(t3
, t1
);
3442 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3443 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3444 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3445 tcg_temp_free_i32(t2
);
3446 tcg_temp_free_i32(t3
);
3451 TCGv_i32 t2
= tcg_temp_new_i32();
3452 TCGv_i32 t3
= tcg_temp_new_i32();
3453 tcg_gen_trunc_tl_i32(t2
, t0
);
3454 tcg_gen_trunc_tl_i32(t3
, t1
);
3455 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3456 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3457 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3458 tcg_temp_free_i32(t2
);
3459 tcg_temp_free_i32(t3
);
3462 #if defined(TARGET_MIPS64)
3465 TCGv t2
= tcg_temp_new();
3466 TCGv t3
= tcg_temp_new();
3467 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3468 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3469 tcg_gen_and_tl(t2
, t2
, t3
);
3470 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3471 tcg_gen_or_tl(t2
, t2
, t3
);
3472 tcg_gen_movi_tl(t3
, 0);
3473 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3474 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3475 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3482 TCGv t2
= tcg_const_tl(0);
3483 TCGv t3
= tcg_const_tl(1);
3484 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3485 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3486 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3492 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3495 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3500 TCGv_i64 t2
= tcg_temp_new_i64();
3501 TCGv_i64 t3
= tcg_temp_new_i64();
3503 tcg_gen_ext_tl_i64(t2
, t0
);
3504 tcg_gen_ext_tl_i64(t3
, t1
);
3505 tcg_gen_mul_i64(t2
, t2
, t3
);
3506 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3507 tcg_gen_add_i64(t2
, t2
, t3
);
3508 tcg_temp_free_i64(t3
);
3509 gen_move_low32(cpu_LO
[acc
], t2
);
3510 gen_move_high32(cpu_HI
[acc
], t2
);
3511 tcg_temp_free_i64(t2
);
3516 TCGv_i64 t2
= tcg_temp_new_i64();
3517 TCGv_i64 t3
= tcg_temp_new_i64();
3519 tcg_gen_ext32u_tl(t0
, t0
);
3520 tcg_gen_ext32u_tl(t1
, t1
);
3521 tcg_gen_extu_tl_i64(t2
, t0
);
3522 tcg_gen_extu_tl_i64(t3
, t1
);
3523 tcg_gen_mul_i64(t2
, t2
, t3
);
3524 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3525 tcg_gen_add_i64(t2
, t2
, t3
);
3526 tcg_temp_free_i64(t3
);
3527 gen_move_low32(cpu_LO
[acc
], t2
);
3528 gen_move_high32(cpu_HI
[acc
], t2
);
3529 tcg_temp_free_i64(t2
);
3534 TCGv_i64 t2
= tcg_temp_new_i64();
3535 TCGv_i64 t3
= tcg_temp_new_i64();
3537 tcg_gen_ext_tl_i64(t2
, t0
);
3538 tcg_gen_ext_tl_i64(t3
, t1
);
3539 tcg_gen_mul_i64(t2
, t2
, t3
);
3540 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3541 tcg_gen_sub_i64(t2
, t3
, t2
);
3542 tcg_temp_free_i64(t3
);
3543 gen_move_low32(cpu_LO
[acc
], t2
);
3544 gen_move_high32(cpu_HI
[acc
], t2
);
3545 tcg_temp_free_i64(t2
);
3550 TCGv_i64 t2
= tcg_temp_new_i64();
3551 TCGv_i64 t3
= tcg_temp_new_i64();
3553 tcg_gen_ext32u_tl(t0
, t0
);
3554 tcg_gen_ext32u_tl(t1
, t1
);
3555 tcg_gen_extu_tl_i64(t2
, t0
);
3556 tcg_gen_extu_tl_i64(t3
, t1
);
3557 tcg_gen_mul_i64(t2
, t2
, t3
);
3558 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3559 tcg_gen_sub_i64(t2
, t3
, t2
);
3560 tcg_temp_free_i64(t3
);
3561 gen_move_low32(cpu_LO
[acc
], t2
);
3562 gen_move_high32(cpu_HI
[acc
], t2
);
3563 tcg_temp_free_i64(t2
);
3567 MIPS_INVAL("mul/div");
3568 gen_reserved_instruction(ctx
);
3577 * These MULT[U] and MADD[U] instructions implemented in for example
3578 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
3579 * architectures are special three-operand variants with the syntax
3581 * MULT[U][1] rd, rs, rt
3585 * (rd, LO, HI) <- rs * rt
3589 * MADD[U][1] rd, rs, rt
3593 * (rd, LO, HI) <- (LO, HI) + rs * rt
3595 * where the low-order 32-bits of the result is placed into both the
3596 * GPR rd and the special register LO. The high-order 32-bits of the
3597 * result is placed into the special register HI.
3599 * If the GPR rd is omitted in assembly language, it is taken to be 0,
3600 * which is the zero register that always reads as 0.
3602 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
3603 int rd
, int rs
, int rt
)
3605 TCGv t0
= tcg_temp_new();
3606 TCGv t1
= tcg_temp_new();
3609 gen_load_gpr(t0
, rs
);
3610 gen_load_gpr(t1
, rt
);
3618 TCGv_i32 t2
= tcg_temp_new_i32();
3619 TCGv_i32 t3
= tcg_temp_new_i32();
3620 tcg_gen_trunc_tl_i32(t2
, t0
);
3621 tcg_gen_trunc_tl_i32(t3
, t1
);
3622 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3624 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3626 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3627 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3628 tcg_temp_free_i32(t2
);
3629 tcg_temp_free_i32(t3
);
3632 case MMI_OPC_MULTU1
:
3637 TCGv_i32 t2
= tcg_temp_new_i32();
3638 TCGv_i32 t3
= tcg_temp_new_i32();
3639 tcg_gen_trunc_tl_i32(t2
, t0
);
3640 tcg_gen_trunc_tl_i32(t3
, t1
);
3641 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3643 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3645 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3646 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3647 tcg_temp_free_i32(t2
);
3648 tcg_temp_free_i32(t3
);
3656 TCGv_i64 t2
= tcg_temp_new_i64();
3657 TCGv_i64 t3
= tcg_temp_new_i64();
3659 tcg_gen_ext_tl_i64(t2
, t0
);
3660 tcg_gen_ext_tl_i64(t3
, t1
);
3661 tcg_gen_mul_i64(t2
, t2
, t3
);
3662 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3663 tcg_gen_add_i64(t2
, t2
, t3
);
3664 tcg_temp_free_i64(t3
);
3665 gen_move_low32(cpu_LO
[acc
], t2
);
3666 gen_move_high32(cpu_HI
[acc
], t2
);
3668 gen_move_low32(cpu_gpr
[rd
], t2
);
3670 tcg_temp_free_i64(t2
);
3673 case MMI_OPC_MADDU1
:
3678 TCGv_i64 t2
= tcg_temp_new_i64();
3679 TCGv_i64 t3
= tcg_temp_new_i64();
3681 tcg_gen_ext32u_tl(t0
, t0
);
3682 tcg_gen_ext32u_tl(t1
, t1
);
3683 tcg_gen_extu_tl_i64(t2
, t0
);
3684 tcg_gen_extu_tl_i64(t3
, t1
);
3685 tcg_gen_mul_i64(t2
, t2
, t3
);
3686 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3687 tcg_gen_add_i64(t2
, t2
, t3
);
3688 tcg_temp_free_i64(t3
);
3689 gen_move_low32(cpu_LO
[acc
], t2
);
3690 gen_move_high32(cpu_HI
[acc
], t2
);
3692 gen_move_low32(cpu_gpr
[rd
], t2
);
3694 tcg_temp_free_i64(t2
);
3698 MIPS_INVAL("mul/madd TXx9");
3699 gen_reserved_instruction(ctx
);
3708 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
3718 gen_load_gpr(t0
, rs
);
3723 #if defined(TARGET_MIPS64)
3727 tcg_gen_not_tl(t0
, t0
);
3736 tcg_gen_ext32u_tl(t0
, t0
);
3737 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3738 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3740 #if defined(TARGET_MIPS64)
3745 tcg_gen_clzi_i64(t0
, t0
, 64);
3751 /* Godson integer instructions */
3752 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3753 int rd
, int rs
, int rt
)
3762 t0
= tcg_temp_new();
3763 t1
= tcg_temp_new();
3764 gen_load_gpr(t0
, rs
);
3765 gen_load_gpr(t1
, rt
);
3770 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3773 case OPC_MULTU_G_2E
:
3774 case OPC_MULTU_G_2F
:
3775 tcg_gen_ext32u_tl(t0
, t0
);
3776 tcg_gen_ext32u_tl(t1
, t1
);
3777 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3778 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3783 TCGLabel
*l1
= gen_new_label();
3784 TCGLabel
*l2
= gen_new_label();
3785 TCGLabel
*l3
= gen_new_label();
3786 tcg_gen_ext32s_tl(t0
, t0
);
3787 tcg_gen_ext32s_tl(t1
, t1
);
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3793 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3794 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3797 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3798 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3805 TCGLabel
*l1
= gen_new_label();
3806 TCGLabel
*l2
= gen_new_label();
3807 tcg_gen_ext32u_tl(t0
, t0
);
3808 tcg_gen_ext32u_tl(t1
, t1
);
3809 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3810 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3813 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3821 TCGLabel
*l1
= gen_new_label();
3822 TCGLabel
*l2
= gen_new_label();
3823 TCGLabel
*l3
= gen_new_label();
3824 tcg_gen_ext32u_tl(t0
, t0
);
3825 tcg_gen_ext32u_tl(t1
, t1
);
3826 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3827 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3830 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3833 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3834 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3841 TCGLabel
*l1
= gen_new_label();
3842 TCGLabel
*l2
= gen_new_label();
3843 tcg_gen_ext32u_tl(t0
, t0
);
3844 tcg_gen_ext32u_tl(t1
, t1
);
3845 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3846 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3849 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3850 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3854 #if defined(TARGET_MIPS64)
3855 case OPC_DMULT_G_2E
:
3856 case OPC_DMULT_G_2F
:
3857 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3859 case OPC_DMULTU_G_2E
:
3860 case OPC_DMULTU_G_2F
:
3861 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3866 TCGLabel
*l1
= gen_new_label();
3867 TCGLabel
*l2
= gen_new_label();
3868 TCGLabel
*l3
= gen_new_label();
3869 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3873 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3874 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3875 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3878 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3882 case OPC_DDIVU_G_2E
:
3883 case OPC_DDIVU_G_2F
:
3885 TCGLabel
*l1
= gen_new_label();
3886 TCGLabel
*l2
= gen_new_label();
3887 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3888 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3891 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3898 TCGLabel
*l1
= gen_new_label();
3899 TCGLabel
*l2
= gen_new_label();
3900 TCGLabel
*l3
= gen_new_label();
3901 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3902 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3903 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3905 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3908 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3912 case OPC_DMODU_G_2E
:
3913 case OPC_DMODU_G_2F
:
3915 TCGLabel
*l1
= gen_new_label();
3916 TCGLabel
*l2
= gen_new_label();
3917 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3918 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3921 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3932 /* Loongson multimedia instructions */
3933 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3935 uint32_t opc
, shift_max
;
3939 opc
= MASK_LMMI(ctx
->opcode
);
3940 check_cp1_enabled(ctx
);
3942 t0
= tcg_temp_new_i64();
3943 t1
= tcg_temp_new_i64();
3944 gen_load_fpr64(ctx
, t0
, rs
);
3945 gen_load_fpr64(ctx
, t1
, rt
);
3949 gen_helper_paddsh(t0
, t0
, t1
);
3952 gen_helper_paddush(t0
, t0
, t1
);
3955 gen_helper_paddh(t0
, t0
, t1
);
3958 gen_helper_paddw(t0
, t0
, t1
);
3961 gen_helper_paddsb(t0
, t0
, t1
);
3964 gen_helper_paddusb(t0
, t0
, t1
);
3967 gen_helper_paddb(t0
, t0
, t1
);
3971 gen_helper_psubsh(t0
, t0
, t1
);
3974 gen_helper_psubush(t0
, t0
, t1
);
3977 gen_helper_psubh(t0
, t0
, t1
);
3980 gen_helper_psubw(t0
, t0
, t1
);
3983 gen_helper_psubsb(t0
, t0
, t1
);
3986 gen_helper_psubusb(t0
, t0
, t1
);
3989 gen_helper_psubb(t0
, t0
, t1
);
3993 gen_helper_pshufh(t0
, t0
, t1
);
3996 gen_helper_packsswh(t0
, t0
, t1
);
3999 gen_helper_packsshb(t0
, t0
, t1
);
4002 gen_helper_packushb(t0
, t0
, t1
);
4006 gen_helper_punpcklhw(t0
, t0
, t1
);
4009 gen_helper_punpckhhw(t0
, t0
, t1
);
4012 gen_helper_punpcklbh(t0
, t0
, t1
);
4015 gen_helper_punpckhbh(t0
, t0
, t1
);
4018 gen_helper_punpcklwd(t0
, t0
, t1
);
4021 gen_helper_punpckhwd(t0
, t0
, t1
);
4025 gen_helper_pavgh(t0
, t0
, t1
);
4028 gen_helper_pavgb(t0
, t0
, t1
);
4031 gen_helper_pmaxsh(t0
, t0
, t1
);
4034 gen_helper_pminsh(t0
, t0
, t1
);
4037 gen_helper_pmaxub(t0
, t0
, t1
);
4040 gen_helper_pminub(t0
, t0
, t1
);
4044 gen_helper_pcmpeqw(t0
, t0
, t1
);
4047 gen_helper_pcmpgtw(t0
, t0
, t1
);
4050 gen_helper_pcmpeqh(t0
, t0
, t1
);
4053 gen_helper_pcmpgth(t0
, t0
, t1
);
4056 gen_helper_pcmpeqb(t0
, t0
, t1
);
4059 gen_helper_pcmpgtb(t0
, t0
, t1
);
4063 gen_helper_psllw(t0
, t0
, t1
);
4066 gen_helper_psllh(t0
, t0
, t1
);
4069 gen_helper_psrlw(t0
, t0
, t1
);
4072 gen_helper_psrlh(t0
, t0
, t1
);
4075 gen_helper_psraw(t0
, t0
, t1
);
4078 gen_helper_psrah(t0
, t0
, t1
);
4082 gen_helper_pmullh(t0
, t0
, t1
);
4085 gen_helper_pmulhh(t0
, t0
, t1
);
4088 gen_helper_pmulhuh(t0
, t0
, t1
);
4091 gen_helper_pmaddhw(t0
, t0
, t1
);
4095 gen_helper_pasubub(t0
, t0
, t1
);
4098 gen_helper_biadd(t0
, t0
);
4101 gen_helper_pmovmskb(t0
, t0
);
4105 tcg_gen_add_i64(t0
, t0
, t1
);
4108 tcg_gen_sub_i64(t0
, t0
, t1
);
4111 tcg_gen_xor_i64(t0
, t0
, t1
);
4114 tcg_gen_nor_i64(t0
, t0
, t1
);
4117 tcg_gen_and_i64(t0
, t0
, t1
);
4120 tcg_gen_or_i64(t0
, t0
, t1
);
4124 tcg_gen_andc_i64(t0
, t1
, t0
);
4128 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4131 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4134 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4137 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4141 tcg_gen_andi_i64(t1
, t1
, 3);
4142 tcg_gen_shli_i64(t1
, t1
, 4);
4143 tcg_gen_shr_i64(t0
, t0
, t1
);
4144 tcg_gen_ext16u_i64(t0
, t0
);
4148 tcg_gen_add_i64(t0
, t0
, t1
);
4149 tcg_gen_ext32s_i64(t0
, t0
);
4152 tcg_gen_sub_i64(t0
, t0
, t1
);
4153 tcg_gen_ext32s_i64(t0
, t0
);
4175 /* Make sure shift count isn't TCG undefined behaviour. */
4176 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4181 tcg_gen_shl_i64(t0
, t0
, t1
);
4186 * Since SRA is UndefinedResult without sign-extended inputs,
4187 * we can treat SRA and DSRA the same.
4189 tcg_gen_sar_i64(t0
, t0
, t1
);
4192 /* We want to shift in zeros for SRL; zero-extend first. */
4193 tcg_gen_ext32u_i64(t0
, t0
);
4196 tcg_gen_shr_i64(t0
, t0
, t1
);
4200 if (shift_max
== 32) {
4201 tcg_gen_ext32s_i64(t0
, t0
);
4204 /* Shifts larger than MAX produce zero. */
4205 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4206 tcg_gen_neg_i64(t1
, t1
);
4207 tcg_gen_and_i64(t0
, t0
, t1
);
4213 TCGv_i64 t2
= tcg_temp_new_i64();
4214 TCGLabel
*lab
= gen_new_label();
4216 tcg_gen_mov_i64(t2
, t0
);
4217 tcg_gen_add_i64(t0
, t1
, t2
);
4218 if (opc
== OPC_ADD_CP2
) {
4219 tcg_gen_ext32s_i64(t0
, t0
);
4221 tcg_gen_xor_i64(t1
, t1
, t2
);
4222 tcg_gen_xor_i64(t2
, t2
, t0
);
4223 tcg_gen_andc_i64(t1
, t2
, t1
);
4224 tcg_temp_free_i64(t2
);
4225 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4226 generate_exception(ctx
, EXCP_OVERFLOW
);
4234 TCGv_i64 t2
= tcg_temp_new_i64();
4235 TCGLabel
*lab
= gen_new_label();
4237 tcg_gen_mov_i64(t2
, t0
);
4238 tcg_gen_sub_i64(t0
, t1
, t2
);
4239 if (opc
== OPC_SUB_CP2
) {
4240 tcg_gen_ext32s_i64(t0
, t0
);
4242 tcg_gen_xor_i64(t1
, t1
, t2
);
4243 tcg_gen_xor_i64(t2
, t2
, t0
);
4244 tcg_gen_and_i64(t1
, t1
, t2
);
4245 tcg_temp_free_i64(t2
);
4246 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4247 generate_exception(ctx
, EXCP_OVERFLOW
);
4253 tcg_gen_ext32u_i64(t0
, t0
);
4254 tcg_gen_ext32u_i64(t1
, t1
);
4255 tcg_gen_mul_i64(t0
, t0
, t1
);
4264 cond
= TCG_COND_LTU
;
4272 cond
= TCG_COND_LEU
;
4279 int cc
= (ctx
->opcode
>> 8) & 0x7;
4280 TCGv_i64 t64
= tcg_temp_new_i64();
4281 TCGv_i32 t32
= tcg_temp_new_i32();
4283 tcg_gen_setcond_i64(cond
, t64
, t0
, t1
);
4284 tcg_gen_extrl_i64_i32(t32
, t64
);
4285 tcg_gen_deposit_i32(fpu_fcr31
, fpu_fcr31
, t32
,
4288 tcg_temp_free_i32(t32
);
4289 tcg_temp_free_i64(t64
);
4294 MIPS_INVAL("loongson_cp2");
4295 gen_reserved_instruction(ctx
);
4299 gen_store_fpr64(ctx
, t0
, rd
);
4302 tcg_temp_free_i64(t0
);
4303 tcg_temp_free_i64(t1
);
4306 static void gen_loongson_lswc2(DisasContext
*ctx
, int rt
,
4311 #if defined(TARGET_MIPS64)
4312 int lsq_rt1
= ctx
->opcode
& 0x1f;
4313 int lsq_offset
= sextract32(ctx
->opcode
, 6, 9) << 4;
4315 int shf_offset
= sextract32(ctx
->opcode
, 6, 8);
4317 t0
= tcg_temp_new();
4319 switch (MASK_LOONGSON_GSLSQ(ctx
->opcode
)) {
4320 #if defined(TARGET_MIPS64)
4322 t1
= tcg_temp_new();
4323 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4324 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4325 ctx
->default_tcg_memop_mask
);
4326 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4327 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4328 ctx
->default_tcg_memop_mask
);
4329 gen_store_gpr(t1
, rt
);
4330 gen_store_gpr(t0
, lsq_rt1
);
4334 check_cp1_enabled(ctx
);
4335 t1
= tcg_temp_new();
4336 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4337 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4338 ctx
->default_tcg_memop_mask
);
4339 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4340 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4341 ctx
->default_tcg_memop_mask
);
4342 gen_store_fpr64(ctx
, t1
, rt
);
4343 gen_store_fpr64(ctx
, t0
, lsq_rt1
);
4347 t1
= tcg_temp_new();
4348 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4349 gen_load_gpr(t1
, rt
);
4350 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4351 ctx
->default_tcg_memop_mask
);
4352 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4353 gen_load_gpr(t1
, lsq_rt1
);
4354 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4355 ctx
->default_tcg_memop_mask
);
4359 check_cp1_enabled(ctx
);
4360 t1
= tcg_temp_new();
4361 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4362 gen_load_fpr64(ctx
, t1
, rt
);
4363 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4364 ctx
->default_tcg_memop_mask
);
4365 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4366 gen_load_fpr64(ctx
, t1
, lsq_rt1
);
4367 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4368 ctx
->default_tcg_memop_mask
);
4373 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4375 check_cp1_enabled(ctx
);
4376 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4377 t1
= tcg_temp_new();
4378 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4379 tcg_gen_andi_tl(t1
, t0
, 3);
4380 if (!cpu_is_bigendian(ctx
)) {
4381 tcg_gen_xori_tl(t1
, t1
, 3);
4383 tcg_gen_shli_tl(t1
, t1
, 3);
4384 tcg_gen_andi_tl(t0
, t0
, ~3);
4385 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4386 tcg_gen_shl_tl(t0
, t0
, t1
);
4387 t2
= tcg_const_tl(-1);
4388 tcg_gen_shl_tl(t2
, t2
, t1
);
4389 fp0
= tcg_temp_new_i32();
4390 gen_load_fpr32(ctx
, fp0
, rt
);
4391 tcg_gen_ext_i32_tl(t1
, fp0
);
4392 tcg_gen_andc_tl(t1
, t1
, t2
);
4394 tcg_gen_or_tl(t0
, t0
, t1
);
4396 #if defined(TARGET_MIPS64)
4397 tcg_gen_extrl_i64_i32(fp0
, t0
);
4399 tcg_gen_ext32s_tl(fp0
, t0
);
4401 gen_store_fpr32(ctx
, fp0
, rt
);
4402 tcg_temp_free_i32(fp0
);
4405 check_cp1_enabled(ctx
);
4406 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4407 t1
= tcg_temp_new();
4408 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4409 tcg_gen_andi_tl(t1
, t0
, 3);
4410 if (cpu_is_bigendian(ctx
)) {
4411 tcg_gen_xori_tl(t1
, t1
, 3);
4413 tcg_gen_shli_tl(t1
, t1
, 3);
4414 tcg_gen_andi_tl(t0
, t0
, ~3);
4415 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4416 tcg_gen_shr_tl(t0
, t0
, t1
);
4417 tcg_gen_xori_tl(t1
, t1
, 31);
4418 t2
= tcg_const_tl(0xfffffffeull
);
4419 tcg_gen_shl_tl(t2
, t2
, t1
);
4420 fp0
= tcg_temp_new_i32();
4421 gen_load_fpr32(ctx
, fp0
, rt
);
4422 tcg_gen_ext_i32_tl(t1
, fp0
);
4423 tcg_gen_and_tl(t1
, t1
, t2
);
4425 tcg_gen_or_tl(t0
, t0
, t1
);
4427 #if defined(TARGET_MIPS64)
4428 tcg_gen_extrl_i64_i32(fp0
, t0
);
4430 tcg_gen_ext32s_tl(fp0
, t0
);
4432 gen_store_fpr32(ctx
, fp0
, rt
);
4433 tcg_temp_free_i32(fp0
);
4435 #if defined(TARGET_MIPS64)
4437 check_cp1_enabled(ctx
);
4438 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4439 t1
= tcg_temp_new();
4440 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4441 tcg_gen_andi_tl(t1
, t0
, 7);
4442 if (!cpu_is_bigendian(ctx
)) {
4443 tcg_gen_xori_tl(t1
, t1
, 7);
4445 tcg_gen_shli_tl(t1
, t1
, 3);
4446 tcg_gen_andi_tl(t0
, t0
, ~7);
4447 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4448 tcg_gen_shl_tl(t0
, t0
, t1
);
4449 t2
= tcg_const_tl(-1);
4450 tcg_gen_shl_tl(t2
, t2
, t1
);
4451 gen_load_fpr64(ctx
, t1
, rt
);
4452 tcg_gen_andc_tl(t1
, t1
, t2
);
4454 tcg_gen_or_tl(t0
, t0
, t1
);
4456 gen_store_fpr64(ctx
, t0
, rt
);
4459 check_cp1_enabled(ctx
);
4460 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4461 t1
= tcg_temp_new();
4462 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4463 tcg_gen_andi_tl(t1
, t0
, 7);
4464 if (cpu_is_bigendian(ctx
)) {
4465 tcg_gen_xori_tl(t1
, t1
, 7);
4467 tcg_gen_shli_tl(t1
, t1
, 3);
4468 tcg_gen_andi_tl(t0
, t0
, ~7);
4469 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4470 tcg_gen_shr_tl(t0
, t0
, t1
);
4471 tcg_gen_xori_tl(t1
, t1
, 63);
4472 t2
= tcg_const_tl(0xfffffffffffffffeull
);
4473 tcg_gen_shl_tl(t2
, t2
, t1
);
4474 gen_load_fpr64(ctx
, t1
, rt
);
4475 tcg_gen_and_tl(t1
, t1
, t2
);
4477 tcg_gen_or_tl(t0
, t0
, t1
);
4479 gen_store_fpr64(ctx
, t0
, rt
);
4483 MIPS_INVAL("loongson_gsshfl");
4484 gen_reserved_instruction(ctx
);
4489 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4491 check_cp1_enabled(ctx
);
4492 t1
= tcg_temp_new();
4493 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4494 fp0
= tcg_temp_new_i32();
4495 gen_load_fpr32(ctx
, fp0
, rt
);
4496 tcg_gen_ext_i32_tl(t1
, fp0
);
4497 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
4498 tcg_temp_free_i32(fp0
);
4502 check_cp1_enabled(ctx
);
4503 t1
= tcg_temp_new();
4504 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4505 fp0
= tcg_temp_new_i32();
4506 gen_load_fpr32(ctx
, fp0
, rt
);
4507 tcg_gen_ext_i32_tl(t1
, fp0
);
4508 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
4509 tcg_temp_free_i32(fp0
);
4512 #if defined(TARGET_MIPS64)
4514 check_cp1_enabled(ctx
);
4515 t1
= tcg_temp_new();
4516 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4517 gen_load_fpr64(ctx
, t1
, rt
);
4518 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
4522 check_cp1_enabled(ctx
);
4523 t1
= tcg_temp_new();
4524 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4525 gen_load_fpr64(ctx
, t1
, rt
);
4526 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
4531 MIPS_INVAL("loongson_gsshfs");
4532 gen_reserved_instruction(ctx
);
4537 MIPS_INVAL("loongson_gslsq");
4538 gen_reserved_instruction(ctx
);
4544 /* Loongson EXT LDC2/SDC2 */
4545 static void gen_loongson_lsdc2(DisasContext
*ctx
, int rt
,
4548 int offset
= sextract32(ctx
->opcode
, 3, 8);
4549 uint32_t opc
= MASK_LOONGSON_LSDC2(ctx
->opcode
);
4553 /* Pre-conditions */
4559 /* prefetch, implement as NOP */
4570 #if defined(TARGET_MIPS64)
4573 check_cp1_enabled(ctx
);
4574 /* prefetch, implement as NOP */
4580 #if defined(TARGET_MIPS64)
4583 check_cp1_enabled(ctx
);
4586 MIPS_INVAL("loongson_lsdc2");
4587 gen_reserved_instruction(ctx
);
4592 t0
= tcg_temp_new();
4594 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4595 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4599 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
4600 gen_store_gpr(t0
, rt
);
4603 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
4604 ctx
->default_tcg_memop_mask
);
4605 gen_store_gpr(t0
, rt
);
4608 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4610 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4612 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
4613 ctx
->default_tcg_memop_mask
);
4614 gen_store_gpr(t0
, rt
);
4616 #if defined(TARGET_MIPS64)
4618 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4620 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4622 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4623 ctx
->default_tcg_memop_mask
);
4624 gen_store_gpr(t0
, rt
);
4628 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4630 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4632 fp0
= tcg_temp_new_i32();
4633 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
4634 ctx
->default_tcg_memop_mask
);
4635 gen_store_fpr32(ctx
, fp0
, rt
);
4636 tcg_temp_free_i32(fp0
);
4638 #if defined(TARGET_MIPS64)
4640 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4642 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4644 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4645 ctx
->default_tcg_memop_mask
);
4646 gen_store_fpr64(ctx
, t0
, rt
);
4650 t1
= tcg_temp_new();
4651 gen_load_gpr(t1
, rt
);
4652 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
4656 t1
= tcg_temp_new();
4657 gen_load_gpr(t1
, rt
);
4658 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
4659 ctx
->default_tcg_memop_mask
);
4663 t1
= tcg_temp_new();
4664 gen_load_gpr(t1
, rt
);
4665 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
4666 ctx
->default_tcg_memop_mask
);
4669 #if defined(TARGET_MIPS64)
4671 t1
= tcg_temp_new();
4672 gen_load_gpr(t1
, rt
);
4673 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4674 ctx
->default_tcg_memop_mask
);
4679 fp0
= tcg_temp_new_i32();
4680 gen_load_fpr32(ctx
, fp0
, rt
);
4681 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
4682 ctx
->default_tcg_memop_mask
);
4683 tcg_temp_free_i32(fp0
);
4685 #if defined(TARGET_MIPS64)
4687 t1
= tcg_temp_new();
4688 gen_load_fpr64(ctx
, t1
, rt
);
4689 tcg_gen_qemu_st_i64(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4690 ctx
->default_tcg_memop_mask
);
4702 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
4703 int rs
, int rt
, int16_t imm
, int code
)
4706 TCGv t0
= tcg_temp_new();
4707 TCGv t1
= tcg_temp_new();
4710 /* Load needed operands */
4718 /* Compare two registers */
4720 gen_load_gpr(t0
, rs
);
4721 gen_load_gpr(t1
, rt
);
4731 /* Compare register to immediate */
4732 if (rs
!= 0 || imm
!= 0) {
4733 gen_load_gpr(t0
, rs
);
4734 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4741 case OPC_TEQ
: /* rs == rs */
4742 case OPC_TEQI
: /* r0 == 0 */
4743 case OPC_TGE
: /* rs >= rs */
4744 case OPC_TGEI
: /* r0 >= 0 */
4745 case OPC_TGEU
: /* rs >= rs unsigned */
4746 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4748 #ifdef CONFIG_USER_ONLY
4749 /* Pass the break code along to cpu_loop. */
4750 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4751 offsetof(CPUMIPSState
, error_code
));
4753 generate_exception_end(ctx
, EXCP_TRAP
);
4755 case OPC_TLT
: /* rs < rs */
4756 case OPC_TLTI
: /* r0 < 0 */
4757 case OPC_TLTU
: /* rs < rs unsigned */
4758 case OPC_TLTIU
: /* r0 < 0 unsigned */
4759 case OPC_TNE
: /* rs != rs */
4760 case OPC_TNEI
: /* r0 != 0 */
4761 /* Never trap: treat as NOP. */
4765 TCGLabel
*l1
= gen_new_label();
4770 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4774 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4778 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4782 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4786 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4790 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4793 #ifdef CONFIG_USER_ONLY
4794 /* Pass the break code along to cpu_loop. */
4795 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4796 offsetof(CPUMIPSState
, error_code
));
4798 /* Like save_cpu_state, only don't update saved values. */
4799 if (ctx
->base
.pc_next
!= ctx
->saved_pc
) {
4800 gen_save_pc(ctx
->base
.pc_next
);
4802 if (ctx
->hflags
!= ctx
->saved_hflags
) {
4803 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
4805 generate_exception(ctx
, EXCP_TRAP
);
4812 static void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4814 if (translator_use_goto_tb(&ctx
->base
, dest
)) {
4817 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4820 tcg_gen_lookup_and_goto_ptr();
4824 /* Branches (before delay slot) */
4825 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
4827 int rs
, int rt
, int32_t offset
,
4830 target_ulong btgt
= -1;
4832 int bcond_compute
= 0;
4833 TCGv t0
= tcg_temp_new();
4834 TCGv t1
= tcg_temp_new();
4836 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4837 #ifdef MIPS_DEBUG_DISAS
4838 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4839 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4841 gen_reserved_instruction(ctx
);
4845 /* Load needed operands */
4851 /* Compare two registers */
4853 gen_load_gpr(t0
, rs
);
4854 gen_load_gpr(t1
, rt
);
4857 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4871 /* Compare to zero */
4873 gen_load_gpr(t0
, rs
);
4876 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4879 #if defined(TARGET_MIPS64)
4881 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4883 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4886 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4891 /* Jump to immediate */
4892 int jal_mask
= ctx
->hflags
& MIPS_HFLAG_M16
? 0xF8000000
4894 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & jal_mask
)
4899 /* Jump to immediate */
4900 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4905 /* Jump to register */
4906 if (offset
!= 0 && offset
!= 16) {
4908 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4909 * others are reserved.
4911 MIPS_INVAL("jump hint");
4912 gen_reserved_instruction(ctx
);
4915 gen_load_gpr(btarget
, rs
);
4918 MIPS_INVAL("branch/jump");
4919 gen_reserved_instruction(ctx
);
4922 if (bcond_compute
== 0) {
4923 /* No condition to be computed */
4925 case OPC_BEQ
: /* rx == rx */
4926 case OPC_BEQL
: /* rx == rx likely */
4927 case OPC_BGEZ
: /* 0 >= 0 */
4928 case OPC_BGEZL
: /* 0 >= 0 likely */
4929 case OPC_BLEZ
: /* 0 <= 0 */
4930 case OPC_BLEZL
: /* 0 <= 0 likely */
4932 ctx
->hflags
|= MIPS_HFLAG_B
;
4934 case OPC_BGEZAL
: /* 0 >= 0 */
4935 case OPC_BGEZALL
: /* 0 >= 0 likely */
4936 /* Always take and link */
4938 ctx
->hflags
|= MIPS_HFLAG_B
;
4940 case OPC_BNE
: /* rx != rx */
4941 case OPC_BGTZ
: /* 0 > 0 */
4942 case OPC_BLTZ
: /* 0 < 0 */
4945 case OPC_BLTZAL
: /* 0 < 0 */
4947 * Handle as an unconditional branch to get correct delay
4951 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4952 ctx
->hflags
|= MIPS_HFLAG_B
;
4954 case OPC_BLTZALL
: /* 0 < 0 likely */
4955 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4956 /* Skip the instruction in the delay slot */
4957 ctx
->base
.pc_next
+= 4;
4959 case OPC_BNEL
: /* rx != rx likely */
4960 case OPC_BGTZL
: /* 0 > 0 likely */
4961 case OPC_BLTZL
: /* 0 < 0 likely */
4962 /* Skip the instruction in the delay slot */
4963 ctx
->base
.pc_next
+= 4;
4966 ctx
->hflags
|= MIPS_HFLAG_B
;
4969 ctx
->hflags
|= MIPS_HFLAG_BX
;
4973 ctx
->hflags
|= MIPS_HFLAG_B
;
4976 ctx
->hflags
|= MIPS_HFLAG_BR
;
4980 ctx
->hflags
|= MIPS_HFLAG_BR
;
4983 MIPS_INVAL("branch/jump");
4984 gen_reserved_instruction(ctx
);
4990 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4993 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4996 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4999 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5002 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5005 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5008 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5012 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5016 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5019 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5022 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5025 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5028 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5031 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5034 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5036 #if defined(TARGET_MIPS64)
5038 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5042 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5045 ctx
->hflags
|= MIPS_HFLAG_BC
;
5048 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5051 ctx
->hflags
|= MIPS_HFLAG_BL
;
5054 MIPS_INVAL("conditional branch/jump");
5055 gen_reserved_instruction(ctx
);
5060 ctx
->btarget
= btgt
;
5062 switch (delayslot_size
) {
5064 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5067 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5072 int post_delay
= insn_bytes
+ delayslot_size
;
5073 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5075 tcg_gen_movi_tl(cpu_gpr
[blink
],
5076 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5080 if (insn_bytes
== 2) {
5081 ctx
->hflags
|= MIPS_HFLAG_B16
;
5088 /* special3 bitfield operations */
5089 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
5090 int rs
, int lsb
, int msb
)
5092 TCGv t0
= tcg_temp_new();
5093 TCGv t1
= tcg_temp_new();
5095 gen_load_gpr(t1
, rs
);
5098 if (lsb
+ msb
> 31) {
5102 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5105 * The two checks together imply that lsb == 0,
5106 * so this is a simple sign-extension.
5108 tcg_gen_ext32s_tl(t0
, t1
);
5111 #if defined(TARGET_MIPS64)
5120 if (lsb
+ msb
> 63) {
5123 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5130 gen_load_gpr(t0
, rt
);
5131 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5132 tcg_gen_ext32s_tl(t0
, t0
);
5134 #if defined(TARGET_MIPS64)
5145 gen_load_gpr(t0
, rt
);
5146 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5151 MIPS_INVAL("bitops");
5152 gen_reserved_instruction(ctx
);
5157 gen_store_gpr(t0
, rt
);
5162 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5167 /* If no destination, treat it as a NOP. */
5171 t0
= tcg_temp_new();
5172 gen_load_gpr(t0
, rt
);
5176 TCGv t1
= tcg_temp_new();
5177 TCGv t2
= tcg_const_tl(0x00FF00FF);
5179 tcg_gen_shri_tl(t1
, t0
, 8);
5180 tcg_gen_and_tl(t1
, t1
, t2
);
5181 tcg_gen_and_tl(t0
, t0
, t2
);
5182 tcg_gen_shli_tl(t0
, t0
, 8);
5183 tcg_gen_or_tl(t0
, t0
, t1
);
5186 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5190 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
5193 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
5195 #if defined(TARGET_MIPS64)
5198 TCGv t1
= tcg_temp_new();
5199 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
5201 tcg_gen_shri_tl(t1
, t0
, 8);
5202 tcg_gen_and_tl(t1
, t1
, t2
);
5203 tcg_gen_and_tl(t0
, t0
, t2
);
5204 tcg_gen_shli_tl(t0
, t0
, 8);
5205 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5212 TCGv t1
= tcg_temp_new();
5213 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
5215 tcg_gen_shri_tl(t1
, t0
, 16);
5216 tcg_gen_and_tl(t1
, t1
, t2
);
5217 tcg_gen_and_tl(t0
, t0
, t2
);
5218 tcg_gen_shli_tl(t0
, t0
, 16);
5219 tcg_gen_or_tl(t0
, t0
, t1
);
5220 tcg_gen_shri_tl(t1
, t0
, 32);
5221 tcg_gen_shli_tl(t0
, t0
, 32);
5222 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5229 MIPS_INVAL("bsfhl");
5230 gen_reserved_instruction(ctx
);
5237 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
5245 t0
= tcg_temp_new();
5246 if (bits
== 0 || bits
== wordsz
) {
5248 gen_load_gpr(t0
, rt
);
5250 gen_load_gpr(t0
, rs
);
5254 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5256 #if defined(TARGET_MIPS64)
5258 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5263 TCGv t1
= tcg_temp_new();
5264 gen_load_gpr(t0
, rt
);
5265 gen_load_gpr(t1
, rs
);
5269 TCGv_i64 t2
= tcg_temp_new_i64();
5270 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5271 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5272 gen_move_low32(cpu_gpr
[rd
], t2
);
5273 tcg_temp_free_i64(t2
);
5276 #if defined(TARGET_MIPS64)
5278 tcg_gen_shli_tl(t0
, t0
, bits
);
5279 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5280 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5290 void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
, int bp
)
5292 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5295 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5302 t0
= tcg_temp_new();
5303 gen_load_gpr(t0
, rt
);
5306 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5308 #if defined(TARGET_MIPS64)
5310 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5317 #ifndef CONFIG_USER_ONLY
5318 /* CP0 (MMU and control) */
5319 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5321 TCGv_i64 t0
= tcg_temp_new_i64();
5322 TCGv_i64 t1
= tcg_temp_new_i64();
5324 tcg_gen_ext_tl_i64(t0
, arg
);
5325 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5326 #if defined(TARGET_MIPS64)
5327 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5329 tcg_gen_concat32_i64(t1
, t1
, t0
);
5331 tcg_gen_st_i64(t1
, cpu_env
, off
);
5332 tcg_temp_free_i64(t1
);
5333 tcg_temp_free_i64(t0
);
5336 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5338 TCGv_i64 t0
= tcg_temp_new_i64();
5339 TCGv_i64 t1
= tcg_temp_new_i64();
5341 tcg_gen_ext_tl_i64(t0
, arg
);
5342 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5343 tcg_gen_concat32_i64(t1
, t1
, t0
);
5344 tcg_gen_st_i64(t1
, cpu_env
, off
);
5345 tcg_temp_free_i64(t1
);
5346 tcg_temp_free_i64(t0
);
5349 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5351 TCGv_i64 t0
= tcg_temp_new_i64();
5353 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5354 #if defined(TARGET_MIPS64)
5355 tcg_gen_shri_i64(t0
, t0
, 30);
5357 tcg_gen_shri_i64(t0
, t0
, 32);
5359 gen_move_low32(arg
, t0
);
5360 tcg_temp_free_i64(t0
);
5363 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5365 TCGv_i64 t0
= tcg_temp_new_i64();
5367 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5368 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5369 gen_move_low32(arg
, t0
);
5370 tcg_temp_free_i64(t0
);
5373 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
5375 TCGv_i32 t0
= tcg_temp_new_i32();
5377 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5378 tcg_gen_ext_i32_tl(arg
, t0
);
5379 tcg_temp_free_i32(t0
);
5382 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
5384 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5385 tcg_gen_ext32s_tl(arg
, arg
);
5388 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
5390 TCGv_i32 t0
= tcg_temp_new_i32();
5392 tcg_gen_trunc_tl_i32(t0
, arg
);
5393 tcg_gen_st_i32(t0
, cpu_env
, off
);
5394 tcg_temp_free_i32(t0
);
5397 #define CP0_CHECK(c) \
5400 goto cp0_unimplemented; \
5404 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5406 const char *register_name
= "invalid";
5409 case CP0_REGISTER_02
:
5412 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5413 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5414 register_name
= "EntryLo0";
5417 goto cp0_unimplemented
;
5420 case CP0_REGISTER_03
:
5422 case CP0_REG03__ENTRYLO1
:
5423 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5424 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5425 register_name
= "EntryLo1";
5428 goto cp0_unimplemented
;
5431 case CP0_REGISTER_09
:
5433 case CP0_REG09__SAAR
:
5434 CP0_CHECK(ctx
->saar
);
5435 gen_helper_mfhc0_saar(arg
, cpu_env
);
5436 register_name
= "SAAR";
5439 goto cp0_unimplemented
;
5442 case CP0_REGISTER_17
:
5444 case CP0_REG17__LLADDR
:
5445 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
5446 ctx
->CP0_LLAddr_shift
);
5447 register_name
= "LLAddr";
5449 case CP0_REG17__MAAR
:
5450 CP0_CHECK(ctx
->mrp
);
5451 gen_helper_mfhc0_maar(arg
, cpu_env
);
5452 register_name
= "MAAR";
5455 goto cp0_unimplemented
;
5458 case CP0_REGISTER_19
:
5460 case CP0_REG19__WATCHHI0
:
5461 case CP0_REG19__WATCHHI1
:
5462 case CP0_REG19__WATCHHI2
:
5463 case CP0_REG19__WATCHHI3
:
5464 case CP0_REG19__WATCHHI4
:
5465 case CP0_REG19__WATCHHI5
:
5466 case CP0_REG19__WATCHHI6
:
5467 case CP0_REG19__WATCHHI7
:
5468 /* upper 32 bits are only available when Config5MI != 0 */
5470 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_WatchHi
[sel
]), 0);
5471 register_name
= "WatchHi";
5474 goto cp0_unimplemented
;
5477 case CP0_REGISTER_28
:
5483 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5484 register_name
= "TagLo";
5487 goto cp0_unimplemented
;
5491 goto cp0_unimplemented
;
5493 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
5497 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
5498 register_name
, reg
, sel
);
5499 tcg_gen_movi_tl(arg
, 0);
5502 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5504 const char *register_name
= "invalid";
5505 uint64_t mask
= ctx
->PAMask
>> 36;
5508 case CP0_REGISTER_02
:
5511 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5512 tcg_gen_andi_tl(arg
, arg
, mask
);
5513 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5514 register_name
= "EntryLo0";
5517 goto cp0_unimplemented
;
5520 case CP0_REGISTER_03
:
5522 case CP0_REG03__ENTRYLO1
:
5523 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5524 tcg_gen_andi_tl(arg
, arg
, mask
);
5525 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5526 register_name
= "EntryLo1";
5529 goto cp0_unimplemented
;
5532 case CP0_REGISTER_09
:
5534 case CP0_REG09__SAAR
:
5535 CP0_CHECK(ctx
->saar
);
5536 gen_helper_mthc0_saar(cpu_env
, arg
);
5537 register_name
= "SAAR";
5540 goto cp0_unimplemented
;
5543 case CP0_REGISTER_17
:
5545 case CP0_REG17__LLADDR
:
5547 * LLAddr is read-only (the only exception is bit 0 if LLB is
5548 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
5549 * relevant for modern MIPS cores supporting MTHC0, therefore
5550 * treating MTHC0 to LLAddr as NOP.
5552 register_name
= "LLAddr";
5554 case CP0_REG17__MAAR
:
5555 CP0_CHECK(ctx
->mrp
);
5556 gen_helper_mthc0_maar(cpu_env
, arg
);
5557 register_name
= "MAAR";
5560 goto cp0_unimplemented
;
5563 case CP0_REGISTER_19
:
5565 case CP0_REG19__WATCHHI0
:
5566 case CP0_REG19__WATCHHI1
:
5567 case CP0_REG19__WATCHHI2
:
5568 case CP0_REG19__WATCHHI3
:
5569 case CP0_REG19__WATCHHI4
:
5570 case CP0_REG19__WATCHHI5
:
5571 case CP0_REG19__WATCHHI6
:
5572 case CP0_REG19__WATCHHI7
:
5573 /* upper 32 bits are only available when Config5MI != 0 */
5575 gen_helper_0e1i(mthc0_watchhi
, arg
, sel
);
5576 register_name
= "WatchHi";
5579 goto cp0_unimplemented
;
5582 case CP0_REGISTER_28
:
5588 tcg_gen_andi_tl(arg
, arg
, mask
);
5589 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5590 register_name
= "TagLo";
5593 goto cp0_unimplemented
;
5597 goto cp0_unimplemented
;
5599 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
5603 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
5604 register_name
, reg
, sel
);
5607 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5609 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
5610 tcg_gen_movi_tl(arg
, 0);
5612 tcg_gen_movi_tl(arg
, ~0);
5616 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5618 const char *register_name
= "invalid";
5621 check_insn(ctx
, ISA_MIPS_R1
);
5625 case CP0_REGISTER_00
:
5627 case CP0_REG00__INDEX
:
5628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5629 register_name
= "Index";
5631 case CP0_REG00__MVPCONTROL
:
5632 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5633 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5634 register_name
= "MVPControl";
5636 case CP0_REG00__MVPCONF0
:
5637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5638 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5639 register_name
= "MVPConf0";
5641 case CP0_REG00__MVPCONF1
:
5642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5643 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5644 register_name
= "MVPConf1";
5646 case CP0_REG00__VPCONTROL
:
5648 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5649 register_name
= "VPControl";
5652 goto cp0_unimplemented
;
5655 case CP0_REGISTER_01
:
5657 case CP0_REG01__RANDOM
:
5658 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
5659 gen_helper_mfc0_random(arg
, cpu_env
);
5660 register_name
= "Random";
5662 case CP0_REG01__VPECONTROL
:
5663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5665 register_name
= "VPEControl";
5667 case CP0_REG01__VPECONF0
:
5668 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5670 register_name
= "VPEConf0";
5672 case CP0_REG01__VPECONF1
:
5673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5675 register_name
= "VPEConf1";
5677 case CP0_REG01__YQMASK
:
5678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5679 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5680 register_name
= "YQMask";
5682 case CP0_REG01__VPESCHEDULE
:
5683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5684 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5685 register_name
= "VPESchedule";
5687 case CP0_REG01__VPESCHEFBACK
:
5688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5689 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5690 register_name
= "VPEScheFBack";
5692 case CP0_REG01__VPEOPT
:
5693 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5694 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5695 register_name
= "VPEOpt";
5698 goto cp0_unimplemented
;
5701 case CP0_REGISTER_02
:
5703 case CP0_REG02__ENTRYLO0
:
5705 TCGv_i64 tmp
= tcg_temp_new_i64();
5706 tcg_gen_ld_i64(tmp
, cpu_env
,
5707 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5708 #if defined(TARGET_MIPS64)
5710 /* Move RI/XI fields to bits 31:30 */
5711 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5712 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5715 gen_move_low32(arg
, tmp
);
5716 tcg_temp_free_i64(tmp
);
5718 register_name
= "EntryLo0";
5720 case CP0_REG02__TCSTATUS
:
5721 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5722 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5723 register_name
= "TCStatus";
5725 case CP0_REG02__TCBIND
:
5726 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5727 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5728 register_name
= "TCBind";
5730 case CP0_REG02__TCRESTART
:
5731 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5732 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5733 register_name
= "TCRestart";
5735 case CP0_REG02__TCHALT
:
5736 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5737 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5738 register_name
= "TCHalt";
5740 case CP0_REG02__TCCONTEXT
:
5741 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5742 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5743 register_name
= "TCContext";
5745 case CP0_REG02__TCSCHEDULE
:
5746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5747 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5748 register_name
= "TCSchedule";
5750 case CP0_REG02__TCSCHEFBACK
:
5751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5752 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5753 register_name
= "TCScheFBack";
5756 goto cp0_unimplemented
;
5759 case CP0_REGISTER_03
:
5761 case CP0_REG03__ENTRYLO1
:
5763 TCGv_i64 tmp
= tcg_temp_new_i64();
5764 tcg_gen_ld_i64(tmp
, cpu_env
,
5765 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5766 #if defined(TARGET_MIPS64)
5768 /* Move RI/XI fields to bits 31:30 */
5769 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5770 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5773 gen_move_low32(arg
, tmp
);
5774 tcg_temp_free_i64(tmp
);
5776 register_name
= "EntryLo1";
5778 case CP0_REG03__GLOBALNUM
:
5780 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5781 register_name
= "GlobalNumber";
5784 goto cp0_unimplemented
;
5787 case CP0_REGISTER_04
:
5789 case CP0_REG04__CONTEXT
:
5790 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5791 tcg_gen_ext32s_tl(arg
, arg
);
5792 register_name
= "Context";
5794 case CP0_REG04__CONTEXTCONFIG
:
5796 /* gen_helper_mfc0_contextconfig(arg); */
5797 register_name
= "ContextConfig";
5798 goto cp0_unimplemented
;
5799 case CP0_REG04__USERLOCAL
:
5800 CP0_CHECK(ctx
->ulri
);
5801 tcg_gen_ld_tl(arg
, cpu_env
,
5802 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5803 tcg_gen_ext32s_tl(arg
, arg
);
5804 register_name
= "UserLocal";
5806 case CP0_REG04__MMID
:
5808 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
5809 register_name
= "MMID";
5812 goto cp0_unimplemented
;
5815 case CP0_REGISTER_05
:
5817 case CP0_REG05__PAGEMASK
:
5818 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5819 register_name
= "PageMask";
5821 case CP0_REG05__PAGEGRAIN
:
5822 check_insn(ctx
, ISA_MIPS_R2
);
5823 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5824 register_name
= "PageGrain";
5826 case CP0_REG05__SEGCTL0
:
5828 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5829 tcg_gen_ext32s_tl(arg
, arg
);
5830 register_name
= "SegCtl0";
5832 case CP0_REG05__SEGCTL1
:
5834 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5835 tcg_gen_ext32s_tl(arg
, arg
);
5836 register_name
= "SegCtl1";
5838 case CP0_REG05__SEGCTL2
:
5840 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5841 tcg_gen_ext32s_tl(arg
, arg
);
5842 register_name
= "SegCtl2";
5844 case CP0_REG05__PWBASE
:
5846 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
5847 register_name
= "PWBase";
5849 case CP0_REG05__PWFIELD
:
5851 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
5852 register_name
= "PWField";
5854 case CP0_REG05__PWSIZE
:
5856 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
5857 register_name
= "PWSize";
5860 goto cp0_unimplemented
;
5863 case CP0_REGISTER_06
:
5865 case CP0_REG06__WIRED
:
5866 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5867 register_name
= "Wired";
5869 case CP0_REG06__SRSCONF0
:
5870 check_insn(ctx
, ISA_MIPS_R2
);
5871 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5872 register_name
= "SRSConf0";
5874 case CP0_REG06__SRSCONF1
:
5875 check_insn(ctx
, ISA_MIPS_R2
);
5876 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5877 register_name
= "SRSConf1";
5879 case CP0_REG06__SRSCONF2
:
5880 check_insn(ctx
, ISA_MIPS_R2
);
5881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5882 register_name
= "SRSConf2";
5884 case CP0_REG06__SRSCONF3
:
5885 check_insn(ctx
, ISA_MIPS_R2
);
5886 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5887 register_name
= "SRSConf3";
5889 case CP0_REG06__SRSCONF4
:
5890 check_insn(ctx
, ISA_MIPS_R2
);
5891 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5892 register_name
= "SRSConf4";
5894 case CP0_REG06__PWCTL
:
5896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
5897 register_name
= "PWCtl";
5900 goto cp0_unimplemented
;
5903 case CP0_REGISTER_07
:
5905 case CP0_REG07__HWRENA
:
5906 check_insn(ctx
, ISA_MIPS_R2
);
5907 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5908 register_name
= "HWREna";
5911 goto cp0_unimplemented
;
5914 case CP0_REGISTER_08
:
5916 case CP0_REG08__BADVADDR
:
5917 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5918 tcg_gen_ext32s_tl(arg
, arg
);
5919 register_name
= "BadVAddr";
5921 case CP0_REG08__BADINSTR
:
5923 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5924 register_name
= "BadInstr";
5926 case CP0_REG08__BADINSTRP
:
5928 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5929 register_name
= "BadInstrP";
5931 case CP0_REG08__BADINSTRX
:
5933 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
5934 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
5935 register_name
= "BadInstrX";
5938 goto cp0_unimplemented
;
5941 case CP0_REGISTER_09
:
5943 case CP0_REG09__COUNT
:
5944 /* Mark as an IO operation because we read the time. */
5945 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5948 gen_helper_mfc0_count(arg
, cpu_env
);
5950 * Break the TB to be able to take timer interrupts immediately
5951 * after reading count. DISAS_STOP isn't sufficient, we need to
5952 * ensure we break completely out of translated code.
5954 gen_save_pc(ctx
->base
.pc_next
+ 4);
5955 ctx
->base
.is_jmp
= DISAS_EXIT
;
5956 register_name
= "Count";
5958 case CP0_REG09__SAARI
:
5959 CP0_CHECK(ctx
->saar
);
5960 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
5961 register_name
= "SAARI";
5963 case CP0_REG09__SAAR
:
5964 CP0_CHECK(ctx
->saar
);
5965 gen_helper_mfc0_saar(arg
, cpu_env
);
5966 register_name
= "SAAR";
5969 goto cp0_unimplemented
;
5972 case CP0_REGISTER_10
:
5974 case CP0_REG10__ENTRYHI
:
5975 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5976 tcg_gen_ext32s_tl(arg
, arg
);
5977 register_name
= "EntryHi";
5980 goto cp0_unimplemented
;
5983 case CP0_REGISTER_11
:
5985 case CP0_REG11__COMPARE
:
5986 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5987 register_name
= "Compare";
5989 /* 6,7 are implementation dependent */
5991 goto cp0_unimplemented
;
5994 case CP0_REGISTER_12
:
5996 case CP0_REG12__STATUS
:
5997 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5998 register_name
= "Status";
6000 case CP0_REG12__INTCTL
:
6001 check_insn(ctx
, ISA_MIPS_R2
);
6002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6003 register_name
= "IntCtl";
6005 case CP0_REG12__SRSCTL
:
6006 check_insn(ctx
, ISA_MIPS_R2
);
6007 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6008 register_name
= "SRSCtl";
6010 case CP0_REG12__SRSMAP
:
6011 check_insn(ctx
, ISA_MIPS_R2
);
6012 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6013 register_name
= "SRSMap";
6016 goto cp0_unimplemented
;
6019 case CP0_REGISTER_13
:
6021 case CP0_REG13__CAUSE
:
6022 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6023 register_name
= "Cause";
6026 goto cp0_unimplemented
;
6029 case CP0_REGISTER_14
:
6031 case CP0_REG14__EPC
:
6032 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6033 tcg_gen_ext32s_tl(arg
, arg
);
6034 register_name
= "EPC";
6037 goto cp0_unimplemented
;
6040 case CP0_REGISTER_15
:
6042 case CP0_REG15__PRID
:
6043 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6044 register_name
= "PRid";
6046 case CP0_REG15__EBASE
:
6047 check_insn(ctx
, ISA_MIPS_R2
);
6048 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6049 tcg_gen_ext32s_tl(arg
, arg
);
6050 register_name
= "EBase";
6052 case CP0_REG15__CMGCRBASE
:
6053 check_insn(ctx
, ISA_MIPS_R2
);
6054 CP0_CHECK(ctx
->cmgcr
);
6055 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6056 tcg_gen_ext32s_tl(arg
, arg
);
6057 register_name
= "CMGCRBase";
6060 goto cp0_unimplemented
;
6063 case CP0_REGISTER_16
:
6065 case CP0_REG16__CONFIG
:
6066 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6067 register_name
= "Config";
6069 case CP0_REG16__CONFIG1
:
6070 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6071 register_name
= "Config1";
6073 case CP0_REG16__CONFIG2
:
6074 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6075 register_name
= "Config2";
6077 case CP0_REG16__CONFIG3
:
6078 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6079 register_name
= "Config3";
6081 case CP0_REG16__CONFIG4
:
6082 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6083 register_name
= "Config4";
6085 case CP0_REG16__CONFIG5
:
6086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6087 register_name
= "Config5";
6089 /* 6,7 are implementation dependent */
6090 case CP0_REG16__CONFIG6
:
6091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6092 register_name
= "Config6";
6094 case CP0_REG16__CONFIG7
:
6095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6096 register_name
= "Config7";
6099 goto cp0_unimplemented
;
6102 case CP0_REGISTER_17
:
6104 case CP0_REG17__LLADDR
:
6105 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6106 register_name
= "LLAddr";
6108 case CP0_REG17__MAAR
:
6109 CP0_CHECK(ctx
->mrp
);
6110 gen_helper_mfc0_maar(arg
, cpu_env
);
6111 register_name
= "MAAR";
6113 case CP0_REG17__MAARI
:
6114 CP0_CHECK(ctx
->mrp
);
6115 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6116 register_name
= "MAARI";
6119 goto cp0_unimplemented
;
6122 case CP0_REGISTER_18
:
6124 case CP0_REG18__WATCHLO0
:
6125 case CP0_REG18__WATCHLO1
:
6126 case CP0_REG18__WATCHLO2
:
6127 case CP0_REG18__WATCHLO3
:
6128 case CP0_REG18__WATCHLO4
:
6129 case CP0_REG18__WATCHLO5
:
6130 case CP0_REG18__WATCHLO6
:
6131 case CP0_REG18__WATCHLO7
:
6132 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6133 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6134 register_name
= "WatchLo";
6137 goto cp0_unimplemented
;
6140 case CP0_REGISTER_19
:
6142 case CP0_REG19__WATCHHI0
:
6143 case CP0_REG19__WATCHHI1
:
6144 case CP0_REG19__WATCHHI2
:
6145 case CP0_REG19__WATCHHI3
:
6146 case CP0_REG19__WATCHHI4
:
6147 case CP0_REG19__WATCHHI5
:
6148 case CP0_REG19__WATCHHI6
:
6149 case CP0_REG19__WATCHHI7
:
6150 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6151 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6152 register_name
= "WatchHi";
6155 goto cp0_unimplemented
;
6158 case CP0_REGISTER_20
:
6160 case CP0_REG20__XCONTEXT
:
6161 #if defined(TARGET_MIPS64)
6162 check_insn(ctx
, ISA_MIPS3
);
6163 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6164 tcg_gen_ext32s_tl(arg
, arg
);
6165 register_name
= "XContext";
6169 goto cp0_unimplemented
;
6172 case CP0_REGISTER_21
:
6173 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6174 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6177 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6178 register_name
= "Framemask";
6181 goto cp0_unimplemented
;
6184 case CP0_REGISTER_22
:
6185 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6186 register_name
= "'Diagnostic"; /* implementation dependent */
6188 case CP0_REGISTER_23
:
6190 case CP0_REG23__DEBUG
:
6191 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6192 register_name
= "Debug";
6194 case CP0_REG23__TRACECONTROL
:
6195 /* PDtrace support */
6196 /* gen_helper_mfc0_tracecontrol(arg); */
6197 register_name
= "TraceControl";
6198 goto cp0_unimplemented
;
6199 case CP0_REG23__TRACECONTROL2
:
6200 /* PDtrace support */
6201 /* gen_helper_mfc0_tracecontrol2(arg); */
6202 register_name
= "TraceControl2";
6203 goto cp0_unimplemented
;
6204 case CP0_REG23__USERTRACEDATA1
:
6205 /* PDtrace support */
6206 /* gen_helper_mfc0_usertracedata1(arg);*/
6207 register_name
= "UserTraceData1";
6208 goto cp0_unimplemented
;
6209 case CP0_REG23__TRACEIBPC
:
6210 /* PDtrace support */
6211 /* gen_helper_mfc0_traceibpc(arg); */
6212 register_name
= "TraceIBPC";
6213 goto cp0_unimplemented
;
6214 case CP0_REG23__TRACEDBPC
:
6215 /* PDtrace support */
6216 /* gen_helper_mfc0_tracedbpc(arg); */
6217 register_name
= "TraceDBPC";
6218 goto cp0_unimplemented
;
6220 goto cp0_unimplemented
;
6223 case CP0_REGISTER_24
:
6225 case CP0_REG24__DEPC
:
6227 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6228 tcg_gen_ext32s_tl(arg
, arg
);
6229 register_name
= "DEPC";
6232 goto cp0_unimplemented
;
6235 case CP0_REGISTER_25
:
6237 case CP0_REG25__PERFCTL0
:
6238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6239 register_name
= "Performance0";
6241 case CP0_REG25__PERFCNT0
:
6242 /* gen_helper_mfc0_performance1(arg); */
6243 register_name
= "Performance1";
6244 goto cp0_unimplemented
;
6245 case CP0_REG25__PERFCTL1
:
6246 /* gen_helper_mfc0_performance2(arg); */
6247 register_name
= "Performance2";
6248 goto cp0_unimplemented
;
6249 case CP0_REG25__PERFCNT1
:
6250 /* gen_helper_mfc0_performance3(arg); */
6251 register_name
= "Performance3";
6252 goto cp0_unimplemented
;
6253 case CP0_REG25__PERFCTL2
:
6254 /* gen_helper_mfc0_performance4(arg); */
6255 register_name
= "Performance4";
6256 goto cp0_unimplemented
;
6257 case CP0_REG25__PERFCNT2
:
6258 /* gen_helper_mfc0_performance5(arg); */
6259 register_name
= "Performance5";
6260 goto cp0_unimplemented
;
6261 case CP0_REG25__PERFCTL3
:
6262 /* gen_helper_mfc0_performance6(arg); */
6263 register_name
= "Performance6";
6264 goto cp0_unimplemented
;
6265 case CP0_REG25__PERFCNT3
:
6266 /* gen_helper_mfc0_performance7(arg); */
6267 register_name
= "Performance7";
6268 goto cp0_unimplemented
;
6270 goto cp0_unimplemented
;
6273 case CP0_REGISTER_26
:
6275 case CP0_REG26__ERRCTL
:
6276 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6277 register_name
= "ErrCtl";
6280 goto cp0_unimplemented
;
6283 case CP0_REGISTER_27
:
6285 case CP0_REG27__CACHERR
:
6286 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6287 register_name
= "CacheErr";
6290 goto cp0_unimplemented
;
6293 case CP0_REGISTER_28
:
6295 case CP0_REG28__TAGLO
:
6296 case CP0_REG28__TAGLO1
:
6297 case CP0_REG28__TAGLO2
:
6298 case CP0_REG28__TAGLO3
:
6300 TCGv_i64 tmp
= tcg_temp_new_i64();
6301 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
6302 gen_move_low32(arg
, tmp
);
6303 tcg_temp_free_i64(tmp
);
6305 register_name
= "TagLo";
6307 case CP0_REG28__DATALO
:
6308 case CP0_REG28__DATALO1
:
6309 case CP0_REG28__DATALO2
:
6310 case CP0_REG28__DATALO3
:
6311 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6312 register_name
= "DataLo";
6315 goto cp0_unimplemented
;
6318 case CP0_REGISTER_29
:
6320 case CP0_REG29__TAGHI
:
6321 case CP0_REG29__TAGHI1
:
6322 case CP0_REG29__TAGHI2
:
6323 case CP0_REG29__TAGHI3
:
6324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6325 register_name
= "TagHi";
6327 case CP0_REG29__DATAHI
:
6328 case CP0_REG29__DATAHI1
:
6329 case CP0_REG29__DATAHI2
:
6330 case CP0_REG29__DATAHI3
:
6331 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6332 register_name
= "DataHi";
6335 goto cp0_unimplemented
;
6338 case CP0_REGISTER_30
:
6340 case CP0_REG30__ERROREPC
:
6341 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6342 tcg_gen_ext32s_tl(arg
, arg
);
6343 register_name
= "ErrorEPC";
6346 goto cp0_unimplemented
;
6349 case CP0_REGISTER_31
:
6351 case CP0_REG31__DESAVE
:
6353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6354 register_name
= "DESAVE";
6356 case CP0_REG31__KSCRATCH1
:
6357 case CP0_REG31__KSCRATCH2
:
6358 case CP0_REG31__KSCRATCH3
:
6359 case CP0_REG31__KSCRATCH4
:
6360 case CP0_REG31__KSCRATCH5
:
6361 case CP0_REG31__KSCRATCH6
:
6362 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6363 tcg_gen_ld_tl(arg
, cpu_env
,
6364 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
6365 tcg_gen_ext32s_tl(arg
, arg
);
6366 register_name
= "KScratch";
6369 goto cp0_unimplemented
;
6373 goto cp0_unimplemented
;
6375 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
6379 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
6380 register_name
, reg
, sel
);
6381 gen_mfc0_unimplemented(ctx
, arg
);
6384 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6386 const char *register_name
= "invalid";
6389 check_insn(ctx
, ISA_MIPS_R1
);
6392 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6397 case CP0_REGISTER_00
:
6399 case CP0_REG00__INDEX
:
6400 gen_helper_mtc0_index(cpu_env
, arg
);
6401 register_name
= "Index";
6403 case CP0_REG00__MVPCONTROL
:
6404 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6405 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6406 register_name
= "MVPControl";
6408 case CP0_REG00__MVPCONF0
:
6409 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6411 register_name
= "MVPConf0";
6413 case CP0_REG00__MVPCONF1
:
6414 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6416 register_name
= "MVPConf1";
6418 case CP0_REG00__VPCONTROL
:
6421 register_name
= "VPControl";
6424 goto cp0_unimplemented
;
6427 case CP0_REGISTER_01
:
6429 case CP0_REG01__RANDOM
:
6431 register_name
= "Random";
6433 case CP0_REG01__VPECONTROL
:
6434 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6435 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6436 register_name
= "VPEControl";
6438 case CP0_REG01__VPECONF0
:
6439 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6440 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6441 register_name
= "VPEConf0";
6443 case CP0_REG01__VPECONF1
:
6444 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6445 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6446 register_name
= "VPEConf1";
6448 case CP0_REG01__YQMASK
:
6449 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6450 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6451 register_name
= "YQMask";
6453 case CP0_REG01__VPESCHEDULE
:
6454 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6455 tcg_gen_st_tl(arg
, cpu_env
,
6456 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6457 register_name
= "VPESchedule";
6459 case CP0_REG01__VPESCHEFBACK
:
6460 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6461 tcg_gen_st_tl(arg
, cpu_env
,
6462 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6463 register_name
= "VPEScheFBack";
6465 case CP0_REG01__VPEOPT
:
6466 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6467 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6468 register_name
= "VPEOpt";
6471 goto cp0_unimplemented
;
6474 case CP0_REGISTER_02
:
6476 case CP0_REG02__ENTRYLO0
:
6477 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6478 register_name
= "EntryLo0";
6480 case CP0_REG02__TCSTATUS
:
6481 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6482 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6483 register_name
= "TCStatus";
6485 case CP0_REG02__TCBIND
:
6486 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6487 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6488 register_name
= "TCBind";
6490 case CP0_REG02__TCRESTART
:
6491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6492 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6493 register_name
= "TCRestart";
6495 case CP0_REG02__TCHALT
:
6496 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6497 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6498 register_name
= "TCHalt";
6500 case CP0_REG02__TCCONTEXT
:
6501 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6502 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6503 register_name
= "TCContext";
6505 case CP0_REG02__TCSCHEDULE
:
6506 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6507 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6508 register_name
= "TCSchedule";
6510 case CP0_REG02__TCSCHEFBACK
:
6511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6512 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6513 register_name
= "TCScheFBack";
6516 goto cp0_unimplemented
;
6519 case CP0_REGISTER_03
:
6521 case CP0_REG03__ENTRYLO1
:
6522 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6523 register_name
= "EntryLo1";
6525 case CP0_REG03__GLOBALNUM
:
6528 register_name
= "GlobalNumber";
6531 goto cp0_unimplemented
;
6534 case CP0_REGISTER_04
:
6536 case CP0_REG04__CONTEXT
:
6537 gen_helper_mtc0_context(cpu_env
, arg
);
6538 register_name
= "Context";
6540 case CP0_REG04__CONTEXTCONFIG
:
6542 /* gen_helper_mtc0_contextconfig(arg); */
6543 register_name
= "ContextConfig";
6544 goto cp0_unimplemented
;
6545 case CP0_REG04__USERLOCAL
:
6546 CP0_CHECK(ctx
->ulri
);
6547 tcg_gen_st_tl(arg
, cpu_env
,
6548 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6549 register_name
= "UserLocal";
6551 case CP0_REG04__MMID
:
6553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
6554 register_name
= "MMID";
6557 goto cp0_unimplemented
;
6560 case CP0_REGISTER_05
:
6562 case CP0_REG05__PAGEMASK
:
6563 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6564 register_name
= "PageMask";
6566 case CP0_REG05__PAGEGRAIN
:
6567 check_insn(ctx
, ISA_MIPS_R2
);
6568 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6569 register_name
= "PageGrain";
6570 ctx
->base
.is_jmp
= DISAS_STOP
;
6572 case CP0_REG05__SEGCTL0
:
6574 gen_helper_mtc0_segctl0(cpu_env
, arg
);
6575 register_name
= "SegCtl0";
6577 case CP0_REG05__SEGCTL1
:
6579 gen_helper_mtc0_segctl1(cpu_env
, arg
);
6580 register_name
= "SegCtl1";
6582 case CP0_REG05__SEGCTL2
:
6584 gen_helper_mtc0_segctl2(cpu_env
, arg
);
6585 register_name
= "SegCtl2";
6587 case CP0_REG05__PWBASE
:
6589 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6590 register_name
= "PWBase";
6592 case CP0_REG05__PWFIELD
:
6594 gen_helper_mtc0_pwfield(cpu_env
, arg
);
6595 register_name
= "PWField";
6597 case CP0_REG05__PWSIZE
:
6599 gen_helper_mtc0_pwsize(cpu_env
, arg
);
6600 register_name
= "PWSize";
6603 goto cp0_unimplemented
;
6606 case CP0_REGISTER_06
:
6608 case CP0_REG06__WIRED
:
6609 gen_helper_mtc0_wired(cpu_env
, arg
);
6610 register_name
= "Wired";
6612 case CP0_REG06__SRSCONF0
:
6613 check_insn(ctx
, ISA_MIPS_R2
);
6614 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6615 register_name
= "SRSConf0";
6617 case CP0_REG06__SRSCONF1
:
6618 check_insn(ctx
, ISA_MIPS_R2
);
6619 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6620 register_name
= "SRSConf1";
6622 case CP0_REG06__SRSCONF2
:
6623 check_insn(ctx
, ISA_MIPS_R2
);
6624 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6625 register_name
= "SRSConf2";
6627 case CP0_REG06__SRSCONF3
:
6628 check_insn(ctx
, ISA_MIPS_R2
);
6629 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6630 register_name
= "SRSConf3";
6632 case CP0_REG06__SRSCONF4
:
6633 check_insn(ctx
, ISA_MIPS_R2
);
6634 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6635 register_name
= "SRSConf4";
6637 case CP0_REG06__PWCTL
:
6639 gen_helper_mtc0_pwctl(cpu_env
, arg
);
6640 register_name
= "PWCtl";
6643 goto cp0_unimplemented
;
6646 case CP0_REGISTER_07
:
6648 case CP0_REG07__HWRENA
:
6649 check_insn(ctx
, ISA_MIPS_R2
);
6650 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6651 ctx
->base
.is_jmp
= DISAS_STOP
;
6652 register_name
= "HWREna";
6655 goto cp0_unimplemented
;
6658 case CP0_REGISTER_08
:
6660 case CP0_REG08__BADVADDR
:
6662 register_name
= "BadVAddr";
6664 case CP0_REG08__BADINSTR
:
6666 register_name
= "BadInstr";
6668 case CP0_REG08__BADINSTRP
:
6670 register_name
= "BadInstrP";
6672 case CP0_REG08__BADINSTRX
:
6674 register_name
= "BadInstrX";
6677 goto cp0_unimplemented
;
6680 case CP0_REGISTER_09
:
6682 case CP0_REG09__COUNT
:
6683 gen_helper_mtc0_count(cpu_env
, arg
);
6684 register_name
= "Count";
6686 case CP0_REG09__SAARI
:
6687 CP0_CHECK(ctx
->saar
);
6688 gen_helper_mtc0_saari(cpu_env
, arg
);
6689 register_name
= "SAARI";
6691 case CP0_REG09__SAAR
:
6692 CP0_CHECK(ctx
->saar
);
6693 gen_helper_mtc0_saar(cpu_env
, arg
);
6694 register_name
= "SAAR";
6697 goto cp0_unimplemented
;
6700 case CP0_REGISTER_10
:
6702 case CP0_REG10__ENTRYHI
:
6703 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6704 register_name
= "EntryHi";
6707 goto cp0_unimplemented
;
6710 case CP0_REGISTER_11
:
6712 case CP0_REG11__COMPARE
:
6713 gen_helper_mtc0_compare(cpu_env
, arg
);
6714 register_name
= "Compare";
6716 /* 6,7 are implementation dependent */
6718 goto cp0_unimplemented
;
6721 case CP0_REGISTER_12
:
6723 case CP0_REG12__STATUS
:
6724 save_cpu_state(ctx
, 1);
6725 gen_helper_mtc0_status(cpu_env
, arg
);
6726 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6727 gen_save_pc(ctx
->base
.pc_next
+ 4);
6728 ctx
->base
.is_jmp
= DISAS_EXIT
;
6729 register_name
= "Status";
6731 case CP0_REG12__INTCTL
:
6732 check_insn(ctx
, ISA_MIPS_R2
);
6733 gen_helper_mtc0_intctl(cpu_env
, arg
);
6734 /* Stop translation as we may have switched the execution mode */
6735 ctx
->base
.is_jmp
= DISAS_STOP
;
6736 register_name
= "IntCtl";
6738 case CP0_REG12__SRSCTL
:
6739 check_insn(ctx
, ISA_MIPS_R2
);
6740 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6741 /* Stop translation as we may have switched the execution mode */
6742 ctx
->base
.is_jmp
= DISAS_STOP
;
6743 register_name
= "SRSCtl";
6745 case CP0_REG12__SRSMAP
:
6746 check_insn(ctx
, ISA_MIPS_R2
);
6747 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6748 /* Stop translation as we may have switched the execution mode */
6749 ctx
->base
.is_jmp
= DISAS_STOP
;
6750 register_name
= "SRSMap";
6753 goto cp0_unimplemented
;
6756 case CP0_REGISTER_13
:
6758 case CP0_REG13__CAUSE
:
6759 save_cpu_state(ctx
, 1);
6760 gen_helper_mtc0_cause(cpu_env
, arg
);
6762 * Stop translation as we may have triggered an interrupt.
6763 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6764 * translated code to check for pending interrupts.
6766 gen_save_pc(ctx
->base
.pc_next
+ 4);
6767 ctx
->base
.is_jmp
= DISAS_EXIT
;
6768 register_name
= "Cause";
6771 goto cp0_unimplemented
;
6774 case CP0_REGISTER_14
:
6776 case CP0_REG14__EPC
:
6777 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6778 register_name
= "EPC";
6781 goto cp0_unimplemented
;
6784 case CP0_REGISTER_15
:
6786 case CP0_REG15__PRID
:
6788 register_name
= "PRid";
6790 case CP0_REG15__EBASE
:
6791 check_insn(ctx
, ISA_MIPS_R2
);
6792 gen_helper_mtc0_ebase(cpu_env
, arg
);
6793 register_name
= "EBase";
6796 goto cp0_unimplemented
;
6799 case CP0_REGISTER_16
:
6801 case CP0_REG16__CONFIG
:
6802 gen_helper_mtc0_config0(cpu_env
, arg
);
6803 register_name
= "Config";
6804 /* Stop translation as we may have switched the execution mode */
6805 ctx
->base
.is_jmp
= DISAS_STOP
;
6807 case CP0_REG16__CONFIG1
:
6808 /* ignored, read only */
6809 register_name
= "Config1";
6811 case CP0_REG16__CONFIG2
:
6812 gen_helper_mtc0_config2(cpu_env
, arg
);
6813 register_name
= "Config2";
6814 /* Stop translation as we may have switched the execution mode */
6815 ctx
->base
.is_jmp
= DISAS_STOP
;
6817 case CP0_REG16__CONFIG3
:
6818 gen_helper_mtc0_config3(cpu_env
, arg
);
6819 register_name
= "Config3";
6820 /* Stop translation as we may have switched the execution mode */
6821 ctx
->base
.is_jmp
= DISAS_STOP
;
6823 case CP0_REG16__CONFIG4
:
6824 gen_helper_mtc0_config4(cpu_env
, arg
);
6825 register_name
= "Config4";
6826 ctx
->base
.is_jmp
= DISAS_STOP
;
6828 case CP0_REG16__CONFIG5
:
6829 gen_helper_mtc0_config5(cpu_env
, arg
);
6830 register_name
= "Config5";
6831 /* Stop translation as we may have switched the execution mode */
6832 ctx
->base
.is_jmp
= DISAS_STOP
;
6834 /* 6,7 are implementation dependent */
6835 case CP0_REG16__CONFIG6
:
6837 register_name
= "Config6";
6839 case CP0_REG16__CONFIG7
:
6841 register_name
= "Config7";
6844 register_name
= "Invalid config selector";
6845 goto cp0_unimplemented
;
6848 case CP0_REGISTER_17
:
6850 case CP0_REG17__LLADDR
:
6851 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6852 register_name
= "LLAddr";
6854 case CP0_REG17__MAAR
:
6855 CP0_CHECK(ctx
->mrp
);
6856 gen_helper_mtc0_maar(cpu_env
, arg
);
6857 register_name
= "MAAR";
6859 case CP0_REG17__MAARI
:
6860 CP0_CHECK(ctx
->mrp
);
6861 gen_helper_mtc0_maari(cpu_env
, arg
);
6862 register_name
= "MAARI";
6865 goto cp0_unimplemented
;
6868 case CP0_REGISTER_18
:
6870 case CP0_REG18__WATCHLO0
:
6871 case CP0_REG18__WATCHLO1
:
6872 case CP0_REG18__WATCHLO2
:
6873 case CP0_REG18__WATCHLO3
:
6874 case CP0_REG18__WATCHLO4
:
6875 case CP0_REG18__WATCHLO5
:
6876 case CP0_REG18__WATCHLO6
:
6877 case CP0_REG18__WATCHLO7
:
6878 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6879 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6880 register_name
= "WatchLo";
6883 goto cp0_unimplemented
;
6886 case CP0_REGISTER_19
:
6888 case CP0_REG19__WATCHHI0
:
6889 case CP0_REG19__WATCHHI1
:
6890 case CP0_REG19__WATCHHI2
:
6891 case CP0_REG19__WATCHHI3
:
6892 case CP0_REG19__WATCHHI4
:
6893 case CP0_REG19__WATCHHI5
:
6894 case CP0_REG19__WATCHHI6
:
6895 case CP0_REG19__WATCHHI7
:
6896 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6897 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6898 register_name
= "WatchHi";
6901 goto cp0_unimplemented
;
6904 case CP0_REGISTER_20
:
6906 case CP0_REG20__XCONTEXT
:
6907 #if defined(TARGET_MIPS64)
6908 check_insn(ctx
, ISA_MIPS3
);
6909 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6910 register_name
= "XContext";
6914 goto cp0_unimplemented
;
6917 case CP0_REGISTER_21
:
6918 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6919 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6922 gen_helper_mtc0_framemask(cpu_env
, arg
);
6923 register_name
= "Framemask";
6926 goto cp0_unimplemented
;
6929 case CP0_REGISTER_22
:
6931 register_name
= "Diagnostic"; /* implementation dependent */
6933 case CP0_REGISTER_23
:
6935 case CP0_REG23__DEBUG
:
6936 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6937 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6938 gen_save_pc(ctx
->base
.pc_next
+ 4);
6939 ctx
->base
.is_jmp
= DISAS_EXIT
;
6940 register_name
= "Debug";
6942 case CP0_REG23__TRACECONTROL
:
6943 /* PDtrace support */
6944 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
6945 register_name
= "TraceControl";
6946 /* Stop translation as we may have switched the execution mode */
6947 ctx
->base
.is_jmp
= DISAS_STOP
;
6948 goto cp0_unimplemented
;
6949 case CP0_REG23__TRACECONTROL2
:
6950 /* PDtrace support */
6951 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
6952 register_name
= "TraceControl2";
6953 /* Stop translation as we may have switched the execution mode */
6954 ctx
->base
.is_jmp
= DISAS_STOP
;
6955 goto cp0_unimplemented
;
6956 case CP0_REG23__USERTRACEDATA1
:
6957 /* Stop translation as we may have switched the execution mode */
6958 ctx
->base
.is_jmp
= DISAS_STOP
;
6959 /* PDtrace support */
6960 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
6961 register_name
= "UserTraceData";
6962 /* Stop translation as we may have switched the execution mode */
6963 ctx
->base
.is_jmp
= DISAS_STOP
;
6964 goto cp0_unimplemented
;
6965 case CP0_REG23__TRACEIBPC
:
6966 /* PDtrace support */
6967 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
6968 /* Stop translation as we may have switched the execution mode */
6969 ctx
->base
.is_jmp
= DISAS_STOP
;
6970 register_name
= "TraceIBPC";
6971 goto cp0_unimplemented
;
6972 case CP0_REG23__TRACEDBPC
:
6973 /* PDtrace support */
6974 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
6975 /* Stop translation as we may have switched the execution mode */
6976 ctx
->base
.is_jmp
= DISAS_STOP
;
6977 register_name
= "TraceDBPC";
6978 goto cp0_unimplemented
;
6980 goto cp0_unimplemented
;
6983 case CP0_REGISTER_24
:
6985 case CP0_REG24__DEPC
:
6987 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6988 register_name
= "DEPC";
6991 goto cp0_unimplemented
;
6994 case CP0_REGISTER_25
:
6996 case CP0_REG25__PERFCTL0
:
6997 gen_helper_mtc0_performance0(cpu_env
, arg
);
6998 register_name
= "Performance0";
7000 case CP0_REG25__PERFCNT0
:
7001 /* gen_helper_mtc0_performance1(arg); */
7002 register_name
= "Performance1";
7003 goto cp0_unimplemented
;
7004 case CP0_REG25__PERFCTL1
:
7005 /* gen_helper_mtc0_performance2(arg); */
7006 register_name
= "Performance2";
7007 goto cp0_unimplemented
;
7008 case CP0_REG25__PERFCNT1
:
7009 /* gen_helper_mtc0_performance3(arg); */
7010 register_name
= "Performance3";
7011 goto cp0_unimplemented
;
7012 case CP0_REG25__PERFCTL2
:
7013 /* gen_helper_mtc0_performance4(arg); */
7014 register_name
= "Performance4";
7015 goto cp0_unimplemented
;
7016 case CP0_REG25__PERFCNT2
:
7017 /* gen_helper_mtc0_performance5(arg); */
7018 register_name
= "Performance5";
7019 goto cp0_unimplemented
;
7020 case CP0_REG25__PERFCTL3
:
7021 /* gen_helper_mtc0_performance6(arg); */
7022 register_name
= "Performance6";
7023 goto cp0_unimplemented
;
7024 case CP0_REG25__PERFCNT3
:
7025 /* gen_helper_mtc0_performance7(arg); */
7026 register_name
= "Performance7";
7027 goto cp0_unimplemented
;
7029 goto cp0_unimplemented
;
7032 case CP0_REGISTER_26
:
7034 case CP0_REG26__ERRCTL
:
7035 gen_helper_mtc0_errctl(cpu_env
, arg
);
7036 ctx
->base
.is_jmp
= DISAS_STOP
;
7037 register_name
= "ErrCtl";
7040 goto cp0_unimplemented
;
7043 case CP0_REGISTER_27
:
7045 case CP0_REG27__CACHERR
:
7047 register_name
= "CacheErr";
7050 goto cp0_unimplemented
;
7053 case CP0_REGISTER_28
:
7055 case CP0_REG28__TAGLO
:
7056 case CP0_REG28__TAGLO1
:
7057 case CP0_REG28__TAGLO2
:
7058 case CP0_REG28__TAGLO3
:
7059 gen_helper_mtc0_taglo(cpu_env
, arg
);
7060 register_name
= "TagLo";
7062 case CP0_REG28__DATALO
:
7063 case CP0_REG28__DATALO1
:
7064 case CP0_REG28__DATALO2
:
7065 case CP0_REG28__DATALO3
:
7066 gen_helper_mtc0_datalo(cpu_env
, arg
);
7067 register_name
= "DataLo";
7070 goto cp0_unimplemented
;
7073 case CP0_REGISTER_29
:
7075 case CP0_REG29__TAGHI
:
7076 case CP0_REG29__TAGHI1
:
7077 case CP0_REG29__TAGHI2
:
7078 case CP0_REG29__TAGHI3
:
7079 gen_helper_mtc0_taghi(cpu_env
, arg
);
7080 register_name
= "TagHi";
7082 case CP0_REG29__DATAHI
:
7083 case CP0_REG29__DATAHI1
:
7084 case CP0_REG29__DATAHI2
:
7085 case CP0_REG29__DATAHI3
:
7086 gen_helper_mtc0_datahi(cpu_env
, arg
);
7087 register_name
= "DataHi";
7090 register_name
= "invalid sel";
7091 goto cp0_unimplemented
;
7094 case CP0_REGISTER_30
:
7096 case CP0_REG30__ERROREPC
:
7097 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7098 register_name
= "ErrorEPC";
7101 goto cp0_unimplemented
;
7104 case CP0_REGISTER_31
:
7106 case CP0_REG31__DESAVE
:
7108 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7109 register_name
= "DESAVE";
7111 case CP0_REG31__KSCRATCH1
:
7112 case CP0_REG31__KSCRATCH2
:
7113 case CP0_REG31__KSCRATCH3
:
7114 case CP0_REG31__KSCRATCH4
:
7115 case CP0_REG31__KSCRATCH5
:
7116 case CP0_REG31__KSCRATCH6
:
7117 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7118 tcg_gen_st_tl(arg
, cpu_env
,
7119 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7120 register_name
= "KScratch";
7123 goto cp0_unimplemented
;
7127 goto cp0_unimplemented
;
7129 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
7131 /* For simplicity assume that all writes can cause interrupts. */
7132 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7134 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7135 * translated code to check for pending interrupts.
7137 gen_save_pc(ctx
->base
.pc_next
+ 4);
7138 ctx
->base
.is_jmp
= DISAS_EXIT
;
7143 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
7144 register_name
, reg
, sel
);
7147 #if defined(TARGET_MIPS64)
7148 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7150 const char *register_name
= "invalid";
7153 check_insn(ctx
, ISA_MIPS_R1
);
7157 case CP0_REGISTER_00
:
7159 case CP0_REG00__INDEX
:
7160 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7161 register_name
= "Index";
7163 case CP0_REG00__MVPCONTROL
:
7164 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7165 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7166 register_name
= "MVPControl";
7168 case CP0_REG00__MVPCONF0
:
7169 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7170 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7171 register_name
= "MVPConf0";
7173 case CP0_REG00__MVPCONF1
:
7174 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7175 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7176 register_name
= "MVPConf1";
7178 case CP0_REG00__VPCONTROL
:
7180 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7181 register_name
= "VPControl";
7184 goto cp0_unimplemented
;
7187 case CP0_REGISTER_01
:
7189 case CP0_REG01__RANDOM
:
7190 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7191 gen_helper_mfc0_random(arg
, cpu_env
);
7192 register_name
= "Random";
7194 case CP0_REG01__VPECONTROL
:
7195 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7197 register_name
= "VPEControl";
7199 case CP0_REG01__VPECONF0
:
7200 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7201 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7202 register_name
= "VPEConf0";
7204 case CP0_REG01__VPECONF1
:
7205 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7206 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7207 register_name
= "VPEConf1";
7209 case CP0_REG01__YQMASK
:
7210 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7211 tcg_gen_ld_tl(arg
, cpu_env
,
7212 offsetof(CPUMIPSState
, CP0_YQMask
));
7213 register_name
= "YQMask";
7215 case CP0_REG01__VPESCHEDULE
:
7216 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7217 tcg_gen_ld_tl(arg
, cpu_env
,
7218 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7219 register_name
= "VPESchedule";
7221 case CP0_REG01__VPESCHEFBACK
:
7222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7223 tcg_gen_ld_tl(arg
, cpu_env
,
7224 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7225 register_name
= "VPEScheFBack";
7227 case CP0_REG01__VPEOPT
:
7228 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7229 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7230 register_name
= "VPEOpt";
7233 goto cp0_unimplemented
;
7236 case CP0_REGISTER_02
:
7238 case CP0_REG02__ENTRYLO0
:
7239 tcg_gen_ld_tl(arg
, cpu_env
,
7240 offsetof(CPUMIPSState
, CP0_EntryLo0
));
7241 register_name
= "EntryLo0";
7243 case CP0_REG02__TCSTATUS
:
7244 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7245 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7246 register_name
= "TCStatus";
7248 case CP0_REG02__TCBIND
:
7249 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7250 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7251 register_name
= "TCBind";
7253 case CP0_REG02__TCRESTART
:
7254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7255 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7256 register_name
= "TCRestart";
7258 case CP0_REG02__TCHALT
:
7259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7260 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7261 register_name
= "TCHalt";
7263 case CP0_REG02__TCCONTEXT
:
7264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7265 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
7266 register_name
= "TCContext";
7268 case CP0_REG02__TCSCHEDULE
:
7269 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7270 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
7271 register_name
= "TCSchedule";
7273 case CP0_REG02__TCSCHEFBACK
:
7274 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7275 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
7276 register_name
= "TCScheFBack";
7279 goto cp0_unimplemented
;
7282 case CP0_REGISTER_03
:
7284 case CP0_REG03__ENTRYLO1
:
7285 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
7286 register_name
= "EntryLo1";
7288 case CP0_REG03__GLOBALNUM
:
7290 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
7291 register_name
= "GlobalNumber";
7294 goto cp0_unimplemented
;
7297 case CP0_REGISTER_04
:
7299 case CP0_REG04__CONTEXT
:
7300 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
7301 register_name
= "Context";
7303 case CP0_REG04__CONTEXTCONFIG
:
7305 /* gen_helper_dmfc0_contextconfig(arg); */
7306 register_name
= "ContextConfig";
7307 goto cp0_unimplemented
;
7308 case CP0_REG04__USERLOCAL
:
7309 CP0_CHECK(ctx
->ulri
);
7310 tcg_gen_ld_tl(arg
, cpu_env
,
7311 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7312 register_name
= "UserLocal";
7314 case CP0_REG04__MMID
:
7316 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
7317 register_name
= "MMID";
7320 goto cp0_unimplemented
;
7323 case CP0_REGISTER_05
:
7325 case CP0_REG05__PAGEMASK
:
7326 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7327 register_name
= "PageMask";
7329 case CP0_REG05__PAGEGRAIN
:
7330 check_insn(ctx
, ISA_MIPS_R2
);
7331 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7332 register_name
= "PageGrain";
7334 case CP0_REG05__SEGCTL0
:
7336 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7337 register_name
= "SegCtl0";
7339 case CP0_REG05__SEGCTL1
:
7341 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7342 register_name
= "SegCtl1";
7344 case CP0_REG05__SEGCTL2
:
7346 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7347 register_name
= "SegCtl2";
7349 case CP0_REG05__PWBASE
:
7351 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
7352 register_name
= "PWBase";
7354 case CP0_REG05__PWFIELD
:
7356 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
7357 register_name
= "PWField";
7359 case CP0_REG05__PWSIZE
:
7361 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
7362 register_name
= "PWSize";
7365 goto cp0_unimplemented
;
7368 case CP0_REGISTER_06
:
7370 case CP0_REG06__WIRED
:
7371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7372 register_name
= "Wired";
7374 case CP0_REG06__SRSCONF0
:
7375 check_insn(ctx
, ISA_MIPS_R2
);
7376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7377 register_name
= "SRSConf0";
7379 case CP0_REG06__SRSCONF1
:
7380 check_insn(ctx
, ISA_MIPS_R2
);
7381 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7382 register_name
= "SRSConf1";
7384 case CP0_REG06__SRSCONF2
:
7385 check_insn(ctx
, ISA_MIPS_R2
);
7386 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7387 register_name
= "SRSConf2";
7389 case CP0_REG06__SRSCONF3
:
7390 check_insn(ctx
, ISA_MIPS_R2
);
7391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7392 register_name
= "SRSConf3";
7394 case CP0_REG06__SRSCONF4
:
7395 check_insn(ctx
, ISA_MIPS_R2
);
7396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7397 register_name
= "SRSConf4";
7399 case CP0_REG06__PWCTL
:
7401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7402 register_name
= "PWCtl";
7405 goto cp0_unimplemented
;
7408 case CP0_REGISTER_07
:
7410 case CP0_REG07__HWRENA
:
7411 check_insn(ctx
, ISA_MIPS_R2
);
7412 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7413 register_name
= "HWREna";
7416 goto cp0_unimplemented
;
7419 case CP0_REGISTER_08
:
7421 case CP0_REG08__BADVADDR
:
7422 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7423 register_name
= "BadVAddr";
7425 case CP0_REG08__BADINSTR
:
7427 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7428 register_name
= "BadInstr";
7430 case CP0_REG08__BADINSTRP
:
7432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7433 register_name
= "BadInstrP";
7435 case CP0_REG08__BADINSTRX
:
7437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7438 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7439 register_name
= "BadInstrX";
7442 goto cp0_unimplemented
;
7445 case CP0_REGISTER_09
:
7447 case CP0_REG09__COUNT
:
7448 /* Mark as an IO operation because we read the time. */
7449 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7452 gen_helper_mfc0_count(arg
, cpu_env
);
7454 * Break the TB to be able to take timer interrupts immediately
7455 * after reading count. DISAS_STOP isn't sufficient, we need to
7456 * ensure we break completely out of translated code.
7458 gen_save_pc(ctx
->base
.pc_next
+ 4);
7459 ctx
->base
.is_jmp
= DISAS_EXIT
;
7460 register_name
= "Count";
7462 case CP0_REG09__SAARI
:
7463 CP0_CHECK(ctx
->saar
);
7464 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7465 register_name
= "SAARI";
7467 case CP0_REG09__SAAR
:
7468 CP0_CHECK(ctx
->saar
);
7469 gen_helper_dmfc0_saar(arg
, cpu_env
);
7470 register_name
= "SAAR";
7473 goto cp0_unimplemented
;
7476 case CP0_REGISTER_10
:
7478 case CP0_REG10__ENTRYHI
:
7479 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7480 register_name
= "EntryHi";
7483 goto cp0_unimplemented
;
7486 case CP0_REGISTER_11
:
7488 case CP0_REG11__COMPARE
:
7489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7490 register_name
= "Compare";
7492 /* 6,7 are implementation dependent */
7494 goto cp0_unimplemented
;
7497 case CP0_REGISTER_12
:
7499 case CP0_REG12__STATUS
:
7500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7501 register_name
= "Status";
7503 case CP0_REG12__INTCTL
:
7504 check_insn(ctx
, ISA_MIPS_R2
);
7505 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7506 register_name
= "IntCtl";
7508 case CP0_REG12__SRSCTL
:
7509 check_insn(ctx
, ISA_MIPS_R2
);
7510 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7511 register_name
= "SRSCtl";
7513 case CP0_REG12__SRSMAP
:
7514 check_insn(ctx
, ISA_MIPS_R2
);
7515 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7516 register_name
= "SRSMap";
7519 goto cp0_unimplemented
;
7522 case CP0_REGISTER_13
:
7524 case CP0_REG13__CAUSE
:
7525 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7526 register_name
= "Cause";
7529 goto cp0_unimplemented
;
7532 case CP0_REGISTER_14
:
7534 case CP0_REG14__EPC
:
7535 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7536 register_name
= "EPC";
7539 goto cp0_unimplemented
;
7542 case CP0_REGISTER_15
:
7544 case CP0_REG15__PRID
:
7545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7546 register_name
= "PRid";
7548 case CP0_REG15__EBASE
:
7549 check_insn(ctx
, ISA_MIPS_R2
);
7550 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7551 register_name
= "EBase";
7553 case CP0_REG15__CMGCRBASE
:
7554 check_insn(ctx
, ISA_MIPS_R2
);
7555 CP0_CHECK(ctx
->cmgcr
);
7556 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7557 register_name
= "CMGCRBase";
7560 goto cp0_unimplemented
;
7563 case CP0_REGISTER_16
:
7565 case CP0_REG16__CONFIG
:
7566 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7567 register_name
= "Config";
7569 case CP0_REG16__CONFIG1
:
7570 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7571 register_name
= "Config1";
7573 case CP0_REG16__CONFIG2
:
7574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7575 register_name
= "Config2";
7577 case CP0_REG16__CONFIG3
:
7578 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7579 register_name
= "Config3";
7581 case CP0_REG16__CONFIG4
:
7582 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7583 register_name
= "Config4";
7585 case CP0_REG16__CONFIG5
:
7586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7587 register_name
= "Config5";
7589 /* 6,7 are implementation dependent */
7590 case CP0_REG16__CONFIG6
:
7591 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7592 register_name
= "Config6";
7594 case CP0_REG16__CONFIG7
:
7595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7596 register_name
= "Config7";
7599 goto cp0_unimplemented
;
7602 case CP0_REGISTER_17
:
7604 case CP0_REG17__LLADDR
:
7605 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
7606 register_name
= "LLAddr";
7608 case CP0_REG17__MAAR
:
7609 CP0_CHECK(ctx
->mrp
);
7610 gen_helper_dmfc0_maar(arg
, cpu_env
);
7611 register_name
= "MAAR";
7613 case CP0_REG17__MAARI
:
7614 CP0_CHECK(ctx
->mrp
);
7615 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7616 register_name
= "MAARI";
7619 goto cp0_unimplemented
;
7622 case CP0_REGISTER_18
:
7624 case CP0_REG18__WATCHLO0
:
7625 case CP0_REG18__WATCHLO1
:
7626 case CP0_REG18__WATCHLO2
:
7627 case CP0_REG18__WATCHLO3
:
7628 case CP0_REG18__WATCHLO4
:
7629 case CP0_REG18__WATCHLO5
:
7630 case CP0_REG18__WATCHLO6
:
7631 case CP0_REG18__WATCHLO7
:
7632 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7633 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
7634 register_name
= "WatchLo";
7637 goto cp0_unimplemented
;
7640 case CP0_REGISTER_19
:
7642 case CP0_REG19__WATCHHI0
:
7643 case CP0_REG19__WATCHHI1
:
7644 case CP0_REG19__WATCHHI2
:
7645 case CP0_REG19__WATCHHI3
:
7646 case CP0_REG19__WATCHHI4
:
7647 case CP0_REG19__WATCHHI5
:
7648 case CP0_REG19__WATCHHI6
:
7649 case CP0_REG19__WATCHHI7
:
7650 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7651 gen_helper_1e0i(dmfc0_watchhi
, arg
, sel
);
7652 register_name
= "WatchHi";
7655 goto cp0_unimplemented
;
7658 case CP0_REGISTER_20
:
7660 case CP0_REG20__XCONTEXT
:
7661 check_insn(ctx
, ISA_MIPS3
);
7662 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7663 register_name
= "XContext";
7666 goto cp0_unimplemented
;
7669 case CP0_REGISTER_21
:
7670 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7671 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7675 register_name
= "Framemask";
7678 goto cp0_unimplemented
;
7681 case CP0_REGISTER_22
:
7682 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7683 register_name
= "'Diagnostic"; /* implementation dependent */
7685 case CP0_REGISTER_23
:
7687 case CP0_REG23__DEBUG
:
7688 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7689 register_name
= "Debug";
7691 case CP0_REG23__TRACECONTROL
:
7692 /* PDtrace support */
7693 /* gen_helper_dmfc0_tracecontrol(arg, cpu_env); */
7694 register_name
= "TraceControl";
7695 goto cp0_unimplemented
;
7696 case CP0_REG23__TRACECONTROL2
:
7697 /* PDtrace support */
7698 /* gen_helper_dmfc0_tracecontrol2(arg, cpu_env); */
7699 register_name
= "TraceControl2";
7700 goto cp0_unimplemented
;
7701 case CP0_REG23__USERTRACEDATA1
:
7702 /* PDtrace support */
7703 /* gen_helper_dmfc0_usertracedata1(arg, cpu_env);*/
7704 register_name
= "UserTraceData1";
7705 goto cp0_unimplemented
;
7706 case CP0_REG23__TRACEIBPC
:
7707 /* PDtrace support */
7708 /* gen_helper_dmfc0_traceibpc(arg, cpu_env); */
7709 register_name
= "TraceIBPC";
7710 goto cp0_unimplemented
;
7711 case CP0_REG23__TRACEDBPC
:
7712 /* PDtrace support */
7713 /* gen_helper_dmfc0_tracedbpc(arg, cpu_env); */
7714 register_name
= "TraceDBPC";
7715 goto cp0_unimplemented
;
7717 goto cp0_unimplemented
;
7720 case CP0_REGISTER_24
:
7722 case CP0_REG24__DEPC
:
7724 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7725 register_name
= "DEPC";
7728 goto cp0_unimplemented
;
7731 case CP0_REGISTER_25
:
7733 case CP0_REG25__PERFCTL0
:
7734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7735 register_name
= "Performance0";
7737 case CP0_REG25__PERFCNT0
:
7738 /* gen_helper_dmfc0_performance1(arg); */
7739 register_name
= "Performance1";
7740 goto cp0_unimplemented
;
7741 case CP0_REG25__PERFCTL1
:
7742 /* gen_helper_dmfc0_performance2(arg); */
7743 register_name
= "Performance2";
7744 goto cp0_unimplemented
;
7745 case CP0_REG25__PERFCNT1
:
7746 /* gen_helper_dmfc0_performance3(arg); */
7747 register_name
= "Performance3";
7748 goto cp0_unimplemented
;
7749 case CP0_REG25__PERFCTL2
:
7750 /* gen_helper_dmfc0_performance4(arg); */
7751 register_name
= "Performance4";
7752 goto cp0_unimplemented
;
7753 case CP0_REG25__PERFCNT2
:
7754 /* gen_helper_dmfc0_performance5(arg); */
7755 register_name
= "Performance5";
7756 goto cp0_unimplemented
;
7757 case CP0_REG25__PERFCTL3
:
7758 /* gen_helper_dmfc0_performance6(arg); */
7759 register_name
= "Performance6";
7760 goto cp0_unimplemented
;
7761 case CP0_REG25__PERFCNT3
:
7762 /* gen_helper_dmfc0_performance7(arg); */
7763 register_name
= "Performance7";
7764 goto cp0_unimplemented
;
7766 goto cp0_unimplemented
;
7769 case CP0_REGISTER_26
:
7771 case CP0_REG26__ERRCTL
:
7772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7773 register_name
= "ErrCtl";
7776 goto cp0_unimplemented
;
7779 case CP0_REGISTER_27
:
7782 case CP0_REG27__CACHERR
:
7783 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7784 register_name
= "CacheErr";
7787 goto cp0_unimplemented
;
7790 case CP0_REGISTER_28
:
7792 case CP0_REG28__TAGLO
:
7793 case CP0_REG28__TAGLO1
:
7794 case CP0_REG28__TAGLO2
:
7795 case CP0_REG28__TAGLO3
:
7796 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7797 register_name
= "TagLo";
7799 case CP0_REG28__DATALO
:
7800 case CP0_REG28__DATALO1
:
7801 case CP0_REG28__DATALO2
:
7802 case CP0_REG28__DATALO3
:
7803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7804 register_name
= "DataLo";
7807 goto cp0_unimplemented
;
7810 case CP0_REGISTER_29
:
7812 case CP0_REG29__TAGHI
:
7813 case CP0_REG29__TAGHI1
:
7814 case CP0_REG29__TAGHI2
:
7815 case CP0_REG29__TAGHI3
:
7816 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7817 register_name
= "TagHi";
7819 case CP0_REG29__DATAHI
:
7820 case CP0_REG29__DATAHI1
:
7821 case CP0_REG29__DATAHI2
:
7822 case CP0_REG29__DATAHI3
:
7823 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7824 register_name
= "DataHi";
7827 goto cp0_unimplemented
;
7830 case CP0_REGISTER_30
:
7832 case CP0_REG30__ERROREPC
:
7833 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7834 register_name
= "ErrorEPC";
7837 goto cp0_unimplemented
;
7840 case CP0_REGISTER_31
:
7842 case CP0_REG31__DESAVE
:
7844 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7845 register_name
= "DESAVE";
7847 case CP0_REG31__KSCRATCH1
:
7848 case CP0_REG31__KSCRATCH2
:
7849 case CP0_REG31__KSCRATCH3
:
7850 case CP0_REG31__KSCRATCH4
:
7851 case CP0_REG31__KSCRATCH5
:
7852 case CP0_REG31__KSCRATCH6
:
7853 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7854 tcg_gen_ld_tl(arg
, cpu_env
,
7855 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7856 register_name
= "KScratch";
7859 goto cp0_unimplemented
;
7863 goto cp0_unimplemented
;
7865 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
7869 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
7870 register_name
, reg
, sel
);
7871 gen_mfc0_unimplemented(ctx
, arg
);
7874 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7876 const char *register_name
= "invalid";
7879 check_insn(ctx
, ISA_MIPS_R1
);
7882 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7887 case CP0_REGISTER_00
:
7889 case CP0_REG00__INDEX
:
7890 gen_helper_mtc0_index(cpu_env
, arg
);
7891 register_name
= "Index";
7893 case CP0_REG00__MVPCONTROL
:
7894 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7895 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7896 register_name
= "MVPControl";
7898 case CP0_REG00__MVPCONF0
:
7899 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7901 register_name
= "MVPConf0";
7903 case CP0_REG00__MVPCONF1
:
7904 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7906 register_name
= "MVPConf1";
7908 case CP0_REG00__VPCONTROL
:
7911 register_name
= "VPControl";
7914 goto cp0_unimplemented
;
7917 case CP0_REGISTER_01
:
7919 case CP0_REG01__RANDOM
:
7921 register_name
= "Random";
7923 case CP0_REG01__VPECONTROL
:
7924 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7925 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7926 register_name
= "VPEControl";
7928 case CP0_REG01__VPECONF0
:
7929 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7930 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7931 register_name
= "VPEConf0";
7933 case CP0_REG01__VPECONF1
:
7934 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7935 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7936 register_name
= "VPEConf1";
7938 case CP0_REG01__YQMASK
:
7939 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7940 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7941 register_name
= "YQMask";
7943 case CP0_REG01__VPESCHEDULE
:
7944 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7945 tcg_gen_st_tl(arg
, cpu_env
,
7946 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7947 register_name
= "VPESchedule";
7949 case CP0_REG01__VPESCHEFBACK
:
7950 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7951 tcg_gen_st_tl(arg
, cpu_env
,
7952 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7953 register_name
= "VPEScheFBack";
7955 case CP0_REG01__VPEOPT
:
7956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7957 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7958 register_name
= "VPEOpt";
7961 goto cp0_unimplemented
;
7964 case CP0_REGISTER_02
:
7966 case CP0_REG02__ENTRYLO0
:
7967 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7968 register_name
= "EntryLo0";
7970 case CP0_REG02__TCSTATUS
:
7971 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7972 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7973 register_name
= "TCStatus";
7975 case CP0_REG02__TCBIND
:
7976 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7977 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7978 register_name
= "TCBind";
7980 case CP0_REG02__TCRESTART
:
7981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7982 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7983 register_name
= "TCRestart";
7985 case CP0_REG02__TCHALT
:
7986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7987 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7988 register_name
= "TCHalt";
7990 case CP0_REG02__TCCONTEXT
:
7991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7992 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7993 register_name
= "TCContext";
7995 case CP0_REG02__TCSCHEDULE
:
7996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7997 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7998 register_name
= "TCSchedule";
8000 case CP0_REG02__TCSCHEFBACK
:
8001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8002 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8003 register_name
= "TCScheFBack";
8006 goto cp0_unimplemented
;
8009 case CP0_REGISTER_03
:
8011 case CP0_REG03__ENTRYLO1
:
8012 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8013 register_name
= "EntryLo1";
8015 case CP0_REG03__GLOBALNUM
:
8018 register_name
= "GlobalNumber";
8021 goto cp0_unimplemented
;
8024 case CP0_REGISTER_04
:
8026 case CP0_REG04__CONTEXT
:
8027 gen_helper_mtc0_context(cpu_env
, arg
);
8028 register_name
= "Context";
8030 case CP0_REG04__CONTEXTCONFIG
:
8032 /* gen_helper_dmtc0_contextconfig(arg); */
8033 register_name
= "ContextConfig";
8034 goto cp0_unimplemented
;
8035 case CP0_REG04__USERLOCAL
:
8036 CP0_CHECK(ctx
->ulri
);
8037 tcg_gen_st_tl(arg
, cpu_env
,
8038 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8039 register_name
= "UserLocal";
8041 case CP0_REG04__MMID
:
8043 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
8044 register_name
= "MMID";
8047 goto cp0_unimplemented
;
8050 case CP0_REGISTER_05
:
8052 case CP0_REG05__PAGEMASK
:
8053 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8054 register_name
= "PageMask";
8056 case CP0_REG05__PAGEGRAIN
:
8057 check_insn(ctx
, ISA_MIPS_R2
);
8058 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8059 register_name
= "PageGrain";
8061 case CP0_REG05__SEGCTL0
:
8063 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8064 register_name
= "SegCtl0";
8066 case CP0_REG05__SEGCTL1
:
8068 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8069 register_name
= "SegCtl1";
8071 case CP0_REG05__SEGCTL2
:
8073 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8074 register_name
= "SegCtl2";
8076 case CP0_REG05__PWBASE
:
8078 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8079 register_name
= "PWBase";
8081 case CP0_REG05__PWFIELD
:
8083 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8084 register_name
= "PWField";
8086 case CP0_REG05__PWSIZE
:
8088 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8089 register_name
= "PWSize";
8092 goto cp0_unimplemented
;
8095 case CP0_REGISTER_06
:
8097 case CP0_REG06__WIRED
:
8098 gen_helper_mtc0_wired(cpu_env
, arg
);
8099 register_name
= "Wired";
8101 case CP0_REG06__SRSCONF0
:
8102 check_insn(ctx
, ISA_MIPS_R2
);
8103 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8104 register_name
= "SRSConf0";
8106 case CP0_REG06__SRSCONF1
:
8107 check_insn(ctx
, ISA_MIPS_R2
);
8108 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8109 register_name
= "SRSConf1";
8111 case CP0_REG06__SRSCONF2
:
8112 check_insn(ctx
, ISA_MIPS_R2
);
8113 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8114 register_name
= "SRSConf2";
8116 case CP0_REG06__SRSCONF3
:
8117 check_insn(ctx
, ISA_MIPS_R2
);
8118 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8119 register_name
= "SRSConf3";
8121 case CP0_REG06__SRSCONF4
:
8122 check_insn(ctx
, ISA_MIPS_R2
);
8123 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8124 register_name
= "SRSConf4";
8126 case CP0_REG06__PWCTL
:
8128 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8129 register_name
= "PWCtl";
8132 goto cp0_unimplemented
;
8135 case CP0_REGISTER_07
:
8137 case CP0_REG07__HWRENA
:
8138 check_insn(ctx
, ISA_MIPS_R2
);
8139 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8140 ctx
->base
.is_jmp
= DISAS_STOP
;
8141 register_name
= "HWREna";
8144 goto cp0_unimplemented
;
8147 case CP0_REGISTER_08
:
8149 case CP0_REG08__BADVADDR
:
8151 register_name
= "BadVAddr";
8153 case CP0_REG08__BADINSTR
:
8155 register_name
= "BadInstr";
8157 case CP0_REG08__BADINSTRP
:
8159 register_name
= "BadInstrP";
8161 case CP0_REG08__BADINSTRX
:
8163 register_name
= "BadInstrX";
8166 goto cp0_unimplemented
;
8169 case CP0_REGISTER_09
:
8171 case CP0_REG09__COUNT
:
8172 gen_helper_mtc0_count(cpu_env
, arg
);
8173 register_name
= "Count";
8175 case CP0_REG09__SAARI
:
8176 CP0_CHECK(ctx
->saar
);
8177 gen_helper_mtc0_saari(cpu_env
, arg
);
8178 register_name
= "SAARI";
8180 case CP0_REG09__SAAR
:
8181 CP0_CHECK(ctx
->saar
);
8182 gen_helper_mtc0_saar(cpu_env
, arg
);
8183 register_name
= "SAAR";
8186 goto cp0_unimplemented
;
8188 /* Stop translation as we may have switched the execution mode */
8189 ctx
->base
.is_jmp
= DISAS_STOP
;
8191 case CP0_REGISTER_10
:
8193 case CP0_REG10__ENTRYHI
:
8194 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8195 register_name
= "EntryHi";
8198 goto cp0_unimplemented
;
8201 case CP0_REGISTER_11
:
8203 case CP0_REG11__COMPARE
:
8204 gen_helper_mtc0_compare(cpu_env
, arg
);
8205 register_name
= "Compare";
8207 /* 6,7 are implementation dependent */
8209 goto cp0_unimplemented
;
8211 /* Stop translation as we may have switched the execution mode */
8212 ctx
->base
.is_jmp
= DISAS_STOP
;
8214 case CP0_REGISTER_12
:
8216 case CP0_REG12__STATUS
:
8217 save_cpu_state(ctx
, 1);
8218 gen_helper_mtc0_status(cpu_env
, arg
);
8219 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8220 gen_save_pc(ctx
->base
.pc_next
+ 4);
8221 ctx
->base
.is_jmp
= DISAS_EXIT
;
8222 register_name
= "Status";
8224 case CP0_REG12__INTCTL
:
8225 check_insn(ctx
, ISA_MIPS_R2
);
8226 gen_helper_mtc0_intctl(cpu_env
, arg
);
8227 /* Stop translation as we may have switched the execution mode */
8228 ctx
->base
.is_jmp
= DISAS_STOP
;
8229 register_name
= "IntCtl";
8231 case CP0_REG12__SRSCTL
:
8232 check_insn(ctx
, ISA_MIPS_R2
);
8233 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8234 /* Stop translation as we may have switched the execution mode */
8235 ctx
->base
.is_jmp
= DISAS_STOP
;
8236 register_name
= "SRSCtl";
8238 case CP0_REG12__SRSMAP
:
8239 check_insn(ctx
, ISA_MIPS_R2
);
8240 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8241 /* Stop translation as we may have switched the execution mode */
8242 ctx
->base
.is_jmp
= DISAS_STOP
;
8243 register_name
= "SRSMap";
8246 goto cp0_unimplemented
;
8249 case CP0_REGISTER_13
:
8251 case CP0_REG13__CAUSE
:
8252 save_cpu_state(ctx
, 1);
8253 gen_helper_mtc0_cause(cpu_env
, arg
);
8255 * Stop translation as we may have triggered an interrupt.
8256 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8257 * translated code to check for pending interrupts.
8259 gen_save_pc(ctx
->base
.pc_next
+ 4);
8260 ctx
->base
.is_jmp
= DISAS_EXIT
;
8261 register_name
= "Cause";
8264 goto cp0_unimplemented
;
8267 case CP0_REGISTER_14
:
8269 case CP0_REG14__EPC
:
8270 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8271 register_name
= "EPC";
8274 goto cp0_unimplemented
;
8277 case CP0_REGISTER_15
:
8279 case CP0_REG15__PRID
:
8281 register_name
= "PRid";
8283 case CP0_REG15__EBASE
:
8284 check_insn(ctx
, ISA_MIPS_R2
);
8285 gen_helper_mtc0_ebase(cpu_env
, arg
);
8286 register_name
= "EBase";
8289 goto cp0_unimplemented
;
8292 case CP0_REGISTER_16
:
8294 case CP0_REG16__CONFIG
:
8295 gen_helper_mtc0_config0(cpu_env
, arg
);
8296 register_name
= "Config";
8297 /* Stop translation as we may have switched the execution mode */
8298 ctx
->base
.is_jmp
= DISAS_STOP
;
8300 case CP0_REG16__CONFIG1
:
8301 /* ignored, read only */
8302 register_name
= "Config1";
8304 case CP0_REG16__CONFIG2
:
8305 gen_helper_mtc0_config2(cpu_env
, arg
);
8306 register_name
= "Config2";
8307 /* Stop translation as we may have switched the execution mode */
8308 ctx
->base
.is_jmp
= DISAS_STOP
;
8310 case CP0_REG16__CONFIG3
:
8311 gen_helper_mtc0_config3(cpu_env
, arg
);
8312 register_name
= "Config3";
8313 /* Stop translation as we may have switched the execution mode */
8314 ctx
->base
.is_jmp
= DISAS_STOP
;
8316 case CP0_REG16__CONFIG4
:
8317 /* currently ignored */
8318 register_name
= "Config4";
8320 case CP0_REG16__CONFIG5
:
8321 gen_helper_mtc0_config5(cpu_env
, arg
);
8322 register_name
= "Config5";
8323 /* Stop translation as we may have switched the execution mode */
8324 ctx
->base
.is_jmp
= DISAS_STOP
;
8326 /* 6,7 are implementation dependent */
8328 register_name
= "Invalid config selector";
8329 goto cp0_unimplemented
;
8332 case CP0_REGISTER_17
:
8334 case CP0_REG17__LLADDR
:
8335 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8336 register_name
= "LLAddr";
8338 case CP0_REG17__MAAR
:
8339 CP0_CHECK(ctx
->mrp
);
8340 gen_helper_mtc0_maar(cpu_env
, arg
);
8341 register_name
= "MAAR";
8343 case CP0_REG17__MAARI
:
8344 CP0_CHECK(ctx
->mrp
);
8345 gen_helper_mtc0_maari(cpu_env
, arg
);
8346 register_name
= "MAARI";
8349 goto cp0_unimplemented
;
8352 case CP0_REGISTER_18
:
8354 case CP0_REG18__WATCHLO0
:
8355 case CP0_REG18__WATCHLO1
:
8356 case CP0_REG18__WATCHLO2
:
8357 case CP0_REG18__WATCHLO3
:
8358 case CP0_REG18__WATCHLO4
:
8359 case CP0_REG18__WATCHLO5
:
8360 case CP0_REG18__WATCHLO6
:
8361 case CP0_REG18__WATCHLO7
:
8362 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8363 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8364 register_name
= "WatchLo";
8367 goto cp0_unimplemented
;
8370 case CP0_REGISTER_19
:
8372 case CP0_REG19__WATCHHI0
:
8373 case CP0_REG19__WATCHHI1
:
8374 case CP0_REG19__WATCHHI2
:
8375 case CP0_REG19__WATCHHI3
:
8376 case CP0_REG19__WATCHHI4
:
8377 case CP0_REG19__WATCHHI5
:
8378 case CP0_REG19__WATCHHI6
:
8379 case CP0_REG19__WATCHHI7
:
8380 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8381 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8382 register_name
= "WatchHi";
8385 goto cp0_unimplemented
;
8388 case CP0_REGISTER_20
:
8390 case CP0_REG20__XCONTEXT
:
8391 check_insn(ctx
, ISA_MIPS3
);
8392 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8393 register_name
= "XContext";
8396 goto cp0_unimplemented
;
8399 case CP0_REGISTER_21
:
8400 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8401 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8404 gen_helper_mtc0_framemask(cpu_env
, arg
);
8405 register_name
= "Framemask";
8408 goto cp0_unimplemented
;
8411 case CP0_REGISTER_22
:
8413 register_name
= "Diagnostic"; /* implementation dependent */
8415 case CP0_REGISTER_23
:
8417 case CP0_REG23__DEBUG
:
8418 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8419 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8420 gen_save_pc(ctx
->base
.pc_next
+ 4);
8421 ctx
->base
.is_jmp
= DISAS_EXIT
;
8422 register_name
= "Debug";
8424 case CP0_REG23__TRACECONTROL
:
8425 /* PDtrace support */
8426 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
8427 /* Stop translation as we may have switched the execution mode */
8428 ctx
->base
.is_jmp
= DISAS_STOP
;
8429 register_name
= "TraceControl";
8430 goto cp0_unimplemented
;
8431 case CP0_REG23__TRACECONTROL2
:
8432 /* PDtrace support */
8433 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
8434 /* Stop translation as we may have switched the execution mode */
8435 ctx
->base
.is_jmp
= DISAS_STOP
;
8436 register_name
= "TraceControl2";
8437 goto cp0_unimplemented
;
8438 case CP0_REG23__USERTRACEDATA1
:
8439 /* PDtrace support */
8440 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
8441 /* Stop translation as we may have switched the execution mode */
8442 ctx
->base
.is_jmp
= DISAS_STOP
;
8443 register_name
= "UserTraceData1";
8444 goto cp0_unimplemented
;
8445 case CP0_REG23__TRACEIBPC
:
8446 /* PDtrace support */
8447 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
8448 /* Stop translation as we may have switched the execution mode */
8449 ctx
->base
.is_jmp
= DISAS_STOP
;
8450 register_name
= "TraceIBPC";
8451 goto cp0_unimplemented
;
8452 case CP0_REG23__TRACEDBPC
:
8453 /* PDtrace support */
8454 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
8455 /* Stop translation as we may have switched the execution mode */
8456 ctx
->base
.is_jmp
= DISAS_STOP
;
8457 register_name
= "TraceDBPC";
8458 goto cp0_unimplemented
;
8460 goto cp0_unimplemented
;
8463 case CP0_REGISTER_24
:
8465 case CP0_REG24__DEPC
:
8467 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8468 register_name
= "DEPC";
8471 goto cp0_unimplemented
;
8474 case CP0_REGISTER_25
:
8476 case CP0_REG25__PERFCTL0
:
8477 gen_helper_mtc0_performance0(cpu_env
, arg
);
8478 register_name
= "Performance0";
8480 case CP0_REG25__PERFCNT0
:
8481 /* gen_helper_mtc0_performance1(cpu_env, arg); */
8482 register_name
= "Performance1";
8483 goto cp0_unimplemented
;
8484 case CP0_REG25__PERFCTL1
:
8485 /* gen_helper_mtc0_performance2(cpu_env, arg); */
8486 register_name
= "Performance2";
8487 goto cp0_unimplemented
;
8488 case CP0_REG25__PERFCNT1
:
8489 /* gen_helper_mtc0_performance3(cpu_env, arg); */
8490 register_name
= "Performance3";
8491 goto cp0_unimplemented
;
8492 case CP0_REG25__PERFCTL2
:
8493 /* gen_helper_mtc0_performance4(cpu_env, arg); */
8494 register_name
= "Performance4";
8495 goto cp0_unimplemented
;
8496 case CP0_REG25__PERFCNT2
:
8497 /* gen_helper_mtc0_performance5(cpu_env, arg); */
8498 register_name
= "Performance5";
8499 goto cp0_unimplemented
;
8500 case CP0_REG25__PERFCTL3
:
8501 /* gen_helper_mtc0_performance6(cpu_env, arg); */
8502 register_name
= "Performance6";
8503 goto cp0_unimplemented
;
8504 case CP0_REG25__PERFCNT3
:
8505 /* gen_helper_mtc0_performance7(cpu_env, arg); */
8506 register_name
= "Performance7";
8507 goto cp0_unimplemented
;
8509 goto cp0_unimplemented
;
8512 case CP0_REGISTER_26
:
8514 case CP0_REG26__ERRCTL
:
8515 gen_helper_mtc0_errctl(cpu_env
, arg
);
8516 ctx
->base
.is_jmp
= DISAS_STOP
;
8517 register_name
= "ErrCtl";
8520 goto cp0_unimplemented
;
8523 case CP0_REGISTER_27
:
8525 case CP0_REG27__CACHERR
:
8527 register_name
= "CacheErr";
8530 goto cp0_unimplemented
;
8533 case CP0_REGISTER_28
:
8535 case CP0_REG28__TAGLO
:
8536 case CP0_REG28__TAGLO1
:
8537 case CP0_REG28__TAGLO2
:
8538 case CP0_REG28__TAGLO3
:
8539 gen_helper_mtc0_taglo(cpu_env
, arg
);
8540 register_name
= "TagLo";
8542 case CP0_REG28__DATALO
:
8543 case CP0_REG28__DATALO1
:
8544 case CP0_REG28__DATALO2
:
8545 case CP0_REG28__DATALO3
:
8546 gen_helper_mtc0_datalo(cpu_env
, arg
);
8547 register_name
= "DataLo";
8550 goto cp0_unimplemented
;
8553 case CP0_REGISTER_29
:
8555 case CP0_REG29__TAGHI
:
8556 case CP0_REG29__TAGHI1
:
8557 case CP0_REG29__TAGHI2
:
8558 case CP0_REG29__TAGHI3
:
8559 gen_helper_mtc0_taghi(cpu_env
, arg
);
8560 register_name
= "TagHi";
8562 case CP0_REG29__DATAHI
:
8563 case CP0_REG29__DATAHI1
:
8564 case CP0_REG29__DATAHI2
:
8565 case CP0_REG29__DATAHI3
:
8566 gen_helper_mtc0_datahi(cpu_env
, arg
);
8567 register_name
= "DataHi";
8570 register_name
= "invalid sel";
8571 goto cp0_unimplemented
;
8574 case CP0_REGISTER_30
:
8576 case CP0_REG30__ERROREPC
:
8577 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8578 register_name
= "ErrorEPC";
8581 goto cp0_unimplemented
;
8584 case CP0_REGISTER_31
:
8586 case CP0_REG31__DESAVE
:
8588 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8589 register_name
= "DESAVE";
8591 case CP0_REG31__KSCRATCH1
:
8592 case CP0_REG31__KSCRATCH2
:
8593 case CP0_REG31__KSCRATCH3
:
8594 case CP0_REG31__KSCRATCH4
:
8595 case CP0_REG31__KSCRATCH5
:
8596 case CP0_REG31__KSCRATCH6
:
8597 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8598 tcg_gen_st_tl(arg
, cpu_env
,
8599 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
8600 register_name
= "KScratch";
8603 goto cp0_unimplemented
;
8607 goto cp0_unimplemented
;
8609 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
8611 /* For simplicity assume that all writes can cause interrupts. */
8612 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8614 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8615 * translated code to check for pending interrupts.
8617 gen_save_pc(ctx
->base
.pc_next
+ 4);
8618 ctx
->base
.is_jmp
= DISAS_EXIT
;
8623 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
8624 register_name
, reg
, sel
);
8626 #endif /* TARGET_MIPS64 */
8628 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8629 int u
, int sel
, int h
)
8631 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8632 TCGv t0
= tcg_temp_new();
8634 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8635 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8636 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8637 tcg_gen_movi_tl(t0
, -1);
8638 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8639 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8640 tcg_gen_movi_tl(t0
, -1);
8641 } else if (u
== 0) {
8646 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
8649 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
8659 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
8662 gen_helper_mftc0_tcbind(t0
, cpu_env
);
8665 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
8668 gen_helper_mftc0_tchalt(t0
, cpu_env
);
8671 gen_helper_mftc0_tccontext(t0
, cpu_env
);
8674 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
8677 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
8680 gen_mfc0(ctx
, t0
, rt
, sel
);
8687 gen_helper_mftc0_entryhi(t0
, cpu_env
);
8690 gen_mfc0(ctx
, t0
, rt
, sel
);
8697 gen_helper_mftc0_status(t0
, cpu_env
);
8700 gen_mfc0(ctx
, t0
, rt
, sel
);
8707 gen_helper_mftc0_cause(t0
, cpu_env
);
8717 gen_helper_mftc0_epc(t0
, cpu_env
);
8727 gen_helper_mftc0_ebase(t0
, cpu_env
);
8744 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
8754 gen_helper_mftc0_debug(t0
, cpu_env
);
8757 gen_mfc0(ctx
, t0
, rt
, sel
);
8762 gen_mfc0(ctx
, t0
, rt
, sel
);
8766 /* GPR registers. */
8768 gen_helper_1e0i(mftgpr
, t0
, rt
);
8770 /* Auxiliary CPU registers */
8774 gen_helper_1e0i(mftlo
, t0
, 0);
8777 gen_helper_1e0i(mfthi
, t0
, 0);
8780 gen_helper_1e0i(mftacx
, t0
, 0);
8783 gen_helper_1e0i(mftlo
, t0
, 1);
8786 gen_helper_1e0i(mfthi
, t0
, 1);
8789 gen_helper_1e0i(mftacx
, t0
, 1);
8792 gen_helper_1e0i(mftlo
, t0
, 2);
8795 gen_helper_1e0i(mfthi
, t0
, 2);
8798 gen_helper_1e0i(mftacx
, t0
, 2);
8801 gen_helper_1e0i(mftlo
, t0
, 3);
8804 gen_helper_1e0i(mfthi
, t0
, 3);
8807 gen_helper_1e0i(mftacx
, t0
, 3);
8810 gen_helper_mftdsp(t0
, cpu_env
);
8816 /* Floating point (COP1). */
8818 /* XXX: For now we support only a single FPU context. */
8820 TCGv_i32 fp0
= tcg_temp_new_i32();
8822 gen_load_fpr32(ctx
, fp0
, rt
);
8823 tcg_gen_ext_i32_tl(t0
, fp0
);
8824 tcg_temp_free_i32(fp0
);
8826 TCGv_i32 fp0
= tcg_temp_new_i32();
8828 gen_load_fpr32h(ctx
, fp0
, rt
);
8829 tcg_gen_ext_i32_tl(t0
, fp0
);
8830 tcg_temp_free_i32(fp0
);
8834 /* XXX: For now we support only a single FPU context. */
8835 gen_helper_1e0i(cfc1
, t0
, rt
);
8837 /* COP2: Not implemented. */
8845 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8846 gen_store_gpr(t0
, rd
);
8852 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8853 gen_reserved_instruction(ctx
);
8856 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8857 int u
, int sel
, int h
)
8859 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8860 TCGv t0
= tcg_temp_new();
8862 gen_load_gpr(t0
, rt
);
8863 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8864 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8865 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8868 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8869 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8872 } else if (u
== 0) {
8877 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8880 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8890 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8893 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8896 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8899 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8902 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8905 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8908 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8911 gen_mtc0(ctx
, t0
, rd
, sel
);
8918 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8921 gen_mtc0(ctx
, t0
, rd
, sel
);
8928 gen_helper_mttc0_status(cpu_env
, t0
);
8931 gen_mtc0(ctx
, t0
, rd
, sel
);
8938 gen_helper_mttc0_cause(cpu_env
, t0
);
8948 gen_helper_mttc0_ebase(cpu_env
, t0
);
8958 gen_helper_mttc0_debug(cpu_env
, t0
);
8961 gen_mtc0(ctx
, t0
, rd
, sel
);
8966 gen_mtc0(ctx
, t0
, rd
, sel
);
8970 /* GPR registers. */
8972 gen_helper_0e1i(mttgpr
, t0
, rd
);
8974 /* Auxiliary CPU registers */
8978 gen_helper_0e1i(mttlo
, t0
, 0);
8981 gen_helper_0e1i(mtthi
, t0
, 0);
8984 gen_helper_0e1i(mttacx
, t0
, 0);
8987 gen_helper_0e1i(mttlo
, t0
, 1);
8990 gen_helper_0e1i(mtthi
, t0
, 1);
8993 gen_helper_0e1i(mttacx
, t0
, 1);
8996 gen_helper_0e1i(mttlo
, t0
, 2);
8999 gen_helper_0e1i(mtthi
, t0
, 2);
9002 gen_helper_0e1i(mttacx
, t0
, 2);
9005 gen_helper_0e1i(mttlo
, t0
, 3);
9008 gen_helper_0e1i(mtthi
, t0
, 3);
9011 gen_helper_0e1i(mttacx
, t0
, 3);
9014 gen_helper_mttdsp(cpu_env
, t0
);
9020 /* Floating point (COP1). */
9022 /* XXX: For now we support only a single FPU context. */
9024 TCGv_i32 fp0
= tcg_temp_new_i32();
9026 tcg_gen_trunc_tl_i32(fp0
, t0
);
9027 gen_store_fpr32(ctx
, fp0
, rd
);
9028 tcg_temp_free_i32(fp0
);
9030 TCGv_i32 fp0
= tcg_temp_new_i32();
9032 tcg_gen_trunc_tl_i32(fp0
, t0
);
9033 gen_store_fpr32h(ctx
, fp0
, rd
);
9034 tcg_temp_free_i32(fp0
);
9038 /* XXX: For now we support only a single FPU context. */
9039 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(rd
), rt
);
9040 /* Stop translation as we may have changed hflags */
9041 ctx
->base
.is_jmp
= DISAS_STOP
;
9043 /* COP2: Not implemented. */
9051 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9057 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9058 gen_reserved_instruction(ctx
);
9061 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
9064 const char *opn
= "ldst";
9066 check_cp0_enabled(ctx
);
9073 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9078 TCGv t0
= tcg_temp_new();
9080 gen_load_gpr(t0
, rt
);
9081 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9086 #if defined(TARGET_MIPS64)
9088 check_insn(ctx
, ISA_MIPS3
);
9093 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9097 check_insn(ctx
, ISA_MIPS3
);
9099 TCGv t0
= tcg_temp_new();
9101 gen_load_gpr(t0
, rt
);
9102 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9114 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9120 TCGv t0
= tcg_temp_new();
9121 gen_load_gpr(t0
, rt
);
9122 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9128 check_cp0_enabled(ctx
);
9133 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9134 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9138 check_cp0_enabled(ctx
);
9139 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9140 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9145 if (!env
->tlb
->helper_tlbwi
) {
9148 gen_helper_tlbwi(cpu_env
);
9153 if (!env
->tlb
->helper_tlbinv
) {
9156 gen_helper_tlbinv(cpu_env
);
9157 } /* treat as nop if TLBINV not supported */
9162 if (!env
->tlb
->helper_tlbinvf
) {
9165 gen_helper_tlbinvf(cpu_env
);
9166 } /* treat as nop if TLBINV not supported */
9170 if (!env
->tlb
->helper_tlbwr
) {
9173 gen_helper_tlbwr(cpu_env
);
9177 if (!env
->tlb
->helper_tlbp
) {
9180 gen_helper_tlbp(cpu_env
);
9184 if (!env
->tlb
->helper_tlbr
) {
9187 gen_helper_tlbr(cpu_env
);
9189 case OPC_ERET
: /* OPC_ERETNC */
9190 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9191 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9194 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9195 if (ctx
->opcode
& (1 << bit_shift
)) {
9198 check_insn(ctx
, ISA_MIPS_R5
);
9199 gen_helper_eretnc(cpu_env
);
9203 check_insn(ctx
, ISA_MIPS2
);
9204 gen_helper_eret(cpu_env
);
9206 ctx
->base
.is_jmp
= DISAS_EXIT
;
9211 check_insn(ctx
, ISA_MIPS_R1
);
9212 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9213 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9216 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9218 gen_reserved_instruction(ctx
);
9220 gen_helper_deret(cpu_env
);
9221 ctx
->base
.is_jmp
= DISAS_EXIT
;
9226 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
9227 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9228 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9231 /* If we get an exception, we want to restart at next instruction */
9232 ctx
->base
.pc_next
+= 4;
9233 save_cpu_state(ctx
, 1);
9234 ctx
->base
.pc_next
-= 4;
9235 gen_helper_wait(cpu_env
);
9236 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9241 gen_reserved_instruction(ctx
);
9244 (void)opn
; /* avoid a compiler warning */
9246 #endif /* !CONFIG_USER_ONLY */
9248 /* CP1 Branches (before delay slot) */
9249 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9250 int32_t cc
, int32_t offset
)
9252 target_ulong btarget
;
9253 TCGv_i32 t0
= tcg_temp_new_i32();
9255 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9256 gen_reserved_instruction(ctx
);
9261 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
9264 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9268 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9269 tcg_gen_not_i32(t0
, t0
);
9270 tcg_gen_andi_i32(t0
, t0
, 1);
9271 tcg_gen_extu_i32_tl(bcond
, t0
);
9274 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9275 tcg_gen_not_i32(t0
, t0
);
9276 tcg_gen_andi_i32(t0
, t0
, 1);
9277 tcg_gen_extu_i32_tl(bcond
, t0
);
9280 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9281 tcg_gen_andi_i32(t0
, t0
, 1);
9282 tcg_gen_extu_i32_tl(bcond
, t0
);
9285 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9286 tcg_gen_andi_i32(t0
, t0
, 1);
9287 tcg_gen_extu_i32_tl(bcond
, t0
);
9289 ctx
->hflags
|= MIPS_HFLAG_BL
;
9293 TCGv_i32 t1
= tcg_temp_new_i32();
9294 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9295 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9296 tcg_gen_nand_i32(t0
, t0
, t1
);
9297 tcg_temp_free_i32(t1
);
9298 tcg_gen_andi_i32(t0
, t0
, 1);
9299 tcg_gen_extu_i32_tl(bcond
, t0
);
9304 TCGv_i32 t1
= tcg_temp_new_i32();
9305 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9306 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9307 tcg_gen_or_i32(t0
, t0
, t1
);
9308 tcg_temp_free_i32(t1
);
9309 tcg_gen_andi_i32(t0
, t0
, 1);
9310 tcg_gen_extu_i32_tl(bcond
, t0
);
9315 TCGv_i32 t1
= tcg_temp_new_i32();
9316 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9317 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9318 tcg_gen_and_i32(t0
, t0
, t1
);
9319 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9320 tcg_gen_and_i32(t0
, t0
, t1
);
9321 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9322 tcg_gen_nand_i32(t0
, t0
, t1
);
9323 tcg_temp_free_i32(t1
);
9324 tcg_gen_andi_i32(t0
, t0
, 1);
9325 tcg_gen_extu_i32_tl(bcond
, t0
);
9330 TCGv_i32 t1
= tcg_temp_new_i32();
9331 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9332 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9333 tcg_gen_or_i32(t0
, t0
, t1
);
9334 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9335 tcg_gen_or_i32(t0
, t0
, t1
);
9336 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9337 tcg_gen_or_i32(t0
, t0
, t1
);
9338 tcg_temp_free_i32(t1
);
9339 tcg_gen_andi_i32(t0
, t0
, 1);
9340 tcg_gen_extu_i32_tl(bcond
, t0
);
9343 ctx
->hflags
|= MIPS_HFLAG_BC
;
9346 MIPS_INVAL("cp1 cond branch");
9347 gen_reserved_instruction(ctx
);
9350 ctx
->btarget
= btarget
;
9351 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9353 tcg_temp_free_i32(t0
);
9356 /* R6 CP1 Branches */
9357 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
9358 int32_t ft
, int32_t offset
,
9361 target_ulong btarget
;
9362 TCGv_i64 t0
= tcg_temp_new_i64();
9364 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9365 #ifdef MIPS_DEBUG_DISAS
9366 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
9367 "\n", ctx
->base
.pc_next
);
9369 gen_reserved_instruction(ctx
);
9373 gen_load_fpr64(ctx
, t0
, ft
);
9374 tcg_gen_andi_i64(t0
, t0
, 1);
9376 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
9380 tcg_gen_xori_i64(t0
, t0
, 1);
9381 ctx
->hflags
|= MIPS_HFLAG_BC
;
9384 /* t0 already set */
9385 ctx
->hflags
|= MIPS_HFLAG_BC
;
9388 MIPS_INVAL("cp1 cond branch");
9389 gen_reserved_instruction(ctx
);
9393 tcg_gen_trunc_i64_tl(bcond
, t0
);
9395 ctx
->btarget
= btarget
;
9397 switch (delayslot_size
) {
9399 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
9402 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9407 tcg_temp_free_i64(t0
);
9410 /* Coprocessor 1 (FPU) */
9412 #define FOP(func, fmt) (((fmt) << 21) | (func))
9415 OPC_ADD_S
= FOP(0, FMT_S
),
9416 OPC_SUB_S
= FOP(1, FMT_S
),
9417 OPC_MUL_S
= FOP(2, FMT_S
),
9418 OPC_DIV_S
= FOP(3, FMT_S
),
9419 OPC_SQRT_S
= FOP(4, FMT_S
),
9420 OPC_ABS_S
= FOP(5, FMT_S
),
9421 OPC_MOV_S
= FOP(6, FMT_S
),
9422 OPC_NEG_S
= FOP(7, FMT_S
),
9423 OPC_ROUND_L_S
= FOP(8, FMT_S
),
9424 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
9425 OPC_CEIL_L_S
= FOP(10, FMT_S
),
9426 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
9427 OPC_ROUND_W_S
= FOP(12, FMT_S
),
9428 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
9429 OPC_CEIL_W_S
= FOP(14, FMT_S
),
9430 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
9431 OPC_SEL_S
= FOP(16, FMT_S
),
9432 OPC_MOVCF_S
= FOP(17, FMT_S
),
9433 OPC_MOVZ_S
= FOP(18, FMT_S
),
9434 OPC_MOVN_S
= FOP(19, FMT_S
),
9435 OPC_SELEQZ_S
= FOP(20, FMT_S
),
9436 OPC_RECIP_S
= FOP(21, FMT_S
),
9437 OPC_RSQRT_S
= FOP(22, FMT_S
),
9438 OPC_SELNEZ_S
= FOP(23, FMT_S
),
9439 OPC_MADDF_S
= FOP(24, FMT_S
),
9440 OPC_MSUBF_S
= FOP(25, FMT_S
),
9441 OPC_RINT_S
= FOP(26, FMT_S
),
9442 OPC_CLASS_S
= FOP(27, FMT_S
),
9443 OPC_MIN_S
= FOP(28, FMT_S
),
9444 OPC_RECIP2_S
= FOP(28, FMT_S
),
9445 OPC_MINA_S
= FOP(29, FMT_S
),
9446 OPC_RECIP1_S
= FOP(29, FMT_S
),
9447 OPC_MAX_S
= FOP(30, FMT_S
),
9448 OPC_RSQRT1_S
= FOP(30, FMT_S
),
9449 OPC_MAXA_S
= FOP(31, FMT_S
),
9450 OPC_RSQRT2_S
= FOP(31, FMT_S
),
9451 OPC_CVT_D_S
= FOP(33, FMT_S
),
9452 OPC_CVT_W_S
= FOP(36, FMT_S
),
9453 OPC_CVT_L_S
= FOP(37, FMT_S
),
9454 OPC_CVT_PS_S
= FOP(38, FMT_S
),
9455 OPC_CMP_F_S
= FOP(48, FMT_S
),
9456 OPC_CMP_UN_S
= FOP(49, FMT_S
),
9457 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
9458 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
9459 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
9460 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
9461 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
9462 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
9463 OPC_CMP_SF_S
= FOP(56, FMT_S
),
9464 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
9465 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
9466 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
9467 OPC_CMP_LT_S
= FOP(60, FMT_S
),
9468 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
9469 OPC_CMP_LE_S
= FOP(62, FMT_S
),
9470 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
9472 OPC_ADD_D
= FOP(0, FMT_D
),
9473 OPC_SUB_D
= FOP(1, FMT_D
),
9474 OPC_MUL_D
= FOP(2, FMT_D
),
9475 OPC_DIV_D
= FOP(3, FMT_D
),
9476 OPC_SQRT_D
= FOP(4, FMT_D
),
9477 OPC_ABS_D
= FOP(5, FMT_D
),
9478 OPC_MOV_D
= FOP(6, FMT_D
),
9479 OPC_NEG_D
= FOP(7, FMT_D
),
9480 OPC_ROUND_L_D
= FOP(8, FMT_D
),
9481 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
9482 OPC_CEIL_L_D
= FOP(10, FMT_D
),
9483 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
9484 OPC_ROUND_W_D
= FOP(12, FMT_D
),
9485 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
9486 OPC_CEIL_W_D
= FOP(14, FMT_D
),
9487 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
9488 OPC_SEL_D
= FOP(16, FMT_D
),
9489 OPC_MOVCF_D
= FOP(17, FMT_D
),
9490 OPC_MOVZ_D
= FOP(18, FMT_D
),
9491 OPC_MOVN_D
= FOP(19, FMT_D
),
9492 OPC_SELEQZ_D
= FOP(20, FMT_D
),
9493 OPC_RECIP_D
= FOP(21, FMT_D
),
9494 OPC_RSQRT_D
= FOP(22, FMT_D
),
9495 OPC_SELNEZ_D
= FOP(23, FMT_D
),
9496 OPC_MADDF_D
= FOP(24, FMT_D
),
9497 OPC_MSUBF_D
= FOP(25, FMT_D
),
9498 OPC_RINT_D
= FOP(26, FMT_D
),
9499 OPC_CLASS_D
= FOP(27, FMT_D
),
9500 OPC_MIN_D
= FOP(28, FMT_D
),
9501 OPC_RECIP2_D
= FOP(28, FMT_D
),
9502 OPC_MINA_D
= FOP(29, FMT_D
),
9503 OPC_RECIP1_D
= FOP(29, FMT_D
),
9504 OPC_MAX_D
= FOP(30, FMT_D
),
9505 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9506 OPC_MAXA_D
= FOP(31, FMT_D
),
9507 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9508 OPC_CVT_S_D
= FOP(32, FMT_D
),
9509 OPC_CVT_W_D
= FOP(36, FMT_D
),
9510 OPC_CVT_L_D
= FOP(37, FMT_D
),
9511 OPC_CMP_F_D
= FOP(48, FMT_D
),
9512 OPC_CMP_UN_D
= FOP(49, FMT_D
),
9513 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
9514 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
9515 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
9516 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
9517 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
9518 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
9519 OPC_CMP_SF_D
= FOP(56, FMT_D
),
9520 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
9521 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
9522 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
9523 OPC_CMP_LT_D
= FOP(60, FMT_D
),
9524 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
9525 OPC_CMP_LE_D
= FOP(62, FMT_D
),
9526 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
9528 OPC_CVT_S_W
= FOP(32, FMT_W
),
9529 OPC_CVT_D_W
= FOP(33, FMT_W
),
9530 OPC_CVT_S_L
= FOP(32, FMT_L
),
9531 OPC_CVT_D_L
= FOP(33, FMT_L
),
9532 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9534 OPC_ADD_PS
= FOP(0, FMT_PS
),
9535 OPC_SUB_PS
= FOP(1, FMT_PS
),
9536 OPC_MUL_PS
= FOP(2, FMT_PS
),
9537 OPC_DIV_PS
= FOP(3, FMT_PS
),
9538 OPC_ABS_PS
= FOP(5, FMT_PS
),
9539 OPC_MOV_PS
= FOP(6, FMT_PS
),
9540 OPC_NEG_PS
= FOP(7, FMT_PS
),
9541 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9542 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9543 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9544 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9545 OPC_MULR_PS
= FOP(26, FMT_PS
),
9546 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9547 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9548 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9549 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9551 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9552 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9553 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9554 OPC_PLL_PS
= FOP(44, FMT_PS
),
9555 OPC_PLU_PS
= FOP(45, FMT_PS
),
9556 OPC_PUL_PS
= FOP(46, FMT_PS
),
9557 OPC_PUU_PS
= FOP(47, FMT_PS
),
9558 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
9559 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
9560 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
9561 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
9562 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
9563 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
9564 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
9565 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
9566 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
9567 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
9568 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
9569 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
9570 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
9571 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
9572 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
9573 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
9577 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9578 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9579 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9580 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9581 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9582 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9583 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9584 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9585 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9586 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9587 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9588 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9589 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9590 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9591 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9592 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9593 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9594 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9595 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9596 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9597 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9598 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9600 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9601 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9602 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9603 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9604 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9605 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9606 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9607 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9608 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9609 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9610 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9611 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9612 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9613 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9614 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9615 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9616 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9617 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9618 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9619 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9620 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9621 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9624 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9626 TCGv t0
= tcg_temp_new();
9631 TCGv_i32 fp0
= tcg_temp_new_i32();
9633 gen_load_fpr32(ctx
, fp0
, fs
);
9634 tcg_gen_ext_i32_tl(t0
, fp0
);
9635 tcg_temp_free_i32(fp0
);
9637 gen_store_gpr(t0
, rt
);
9640 gen_load_gpr(t0
, rt
);
9642 TCGv_i32 fp0
= tcg_temp_new_i32();
9644 tcg_gen_trunc_tl_i32(fp0
, t0
);
9645 gen_store_fpr32(ctx
, fp0
, fs
);
9646 tcg_temp_free_i32(fp0
);
9650 gen_helper_1e0i(cfc1
, t0
, fs
);
9651 gen_store_gpr(t0
, rt
);
9654 gen_load_gpr(t0
, rt
);
9655 save_cpu_state(ctx
, 0);
9656 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(fs
), rt
);
9657 /* Stop translation as we may have changed hflags */
9658 ctx
->base
.is_jmp
= DISAS_STOP
;
9660 #if defined(TARGET_MIPS64)
9662 gen_load_fpr64(ctx
, t0
, fs
);
9663 gen_store_gpr(t0
, rt
);
9666 gen_load_gpr(t0
, rt
);
9667 gen_store_fpr64(ctx
, t0
, fs
);
9672 TCGv_i32 fp0
= tcg_temp_new_i32();
9674 gen_load_fpr32h(ctx
, fp0
, fs
);
9675 tcg_gen_ext_i32_tl(t0
, fp0
);
9676 tcg_temp_free_i32(fp0
);
9678 gen_store_gpr(t0
, rt
);
9681 gen_load_gpr(t0
, rt
);
9683 TCGv_i32 fp0
= tcg_temp_new_i32();
9685 tcg_gen_trunc_tl_i32(fp0
, t0
);
9686 gen_store_fpr32h(ctx
, fp0
, fs
);
9687 tcg_temp_free_i32(fp0
);
9691 MIPS_INVAL("cp1 move");
9692 gen_reserved_instruction(ctx
);
9700 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
9717 l1
= gen_new_label();
9718 t0
= tcg_temp_new_i32();
9719 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9720 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9721 tcg_temp_free_i32(t0
);
9722 gen_load_gpr(cpu_gpr
[rd
], rs
);
9726 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9730 TCGv_i32 t0
= tcg_temp_new_i32();
9731 TCGLabel
*l1
= gen_new_label();
9739 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9740 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9741 gen_load_fpr32(ctx
, t0
, fs
);
9742 gen_store_fpr32(ctx
, t0
, fd
);
9744 tcg_temp_free_i32(t0
);
9747 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9751 TCGv_i32 t0
= tcg_temp_new_i32();
9753 TCGLabel
*l1
= gen_new_label();
9761 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9762 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9763 tcg_temp_free_i32(t0
);
9764 fp0
= tcg_temp_new_i64();
9765 gen_load_fpr64(ctx
, fp0
, fs
);
9766 gen_store_fpr64(ctx
, fp0
, fd
);
9767 tcg_temp_free_i64(fp0
);
9771 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
9775 TCGv_i32 t0
= tcg_temp_new_i32();
9776 TCGLabel
*l1
= gen_new_label();
9777 TCGLabel
*l2
= gen_new_label();
9785 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9786 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9787 gen_load_fpr32(ctx
, t0
, fs
);
9788 gen_store_fpr32(ctx
, t0
, fd
);
9791 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
9792 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
9793 gen_load_fpr32h(ctx
, t0
, fs
);
9794 gen_store_fpr32h(ctx
, t0
, fd
);
9795 tcg_temp_free_i32(t0
);
9799 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9802 TCGv_i32 t1
= tcg_const_i32(0);
9803 TCGv_i32 fp0
= tcg_temp_new_i32();
9804 TCGv_i32 fp1
= tcg_temp_new_i32();
9805 TCGv_i32 fp2
= tcg_temp_new_i32();
9806 gen_load_fpr32(ctx
, fp0
, fd
);
9807 gen_load_fpr32(ctx
, fp1
, ft
);
9808 gen_load_fpr32(ctx
, fp2
, fs
);
9812 tcg_gen_andi_i32(fp0
, fp0
, 1);
9813 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9816 tcg_gen_andi_i32(fp1
, fp1
, 1);
9817 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9820 tcg_gen_andi_i32(fp1
, fp1
, 1);
9821 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9824 MIPS_INVAL("gen_sel_s");
9825 gen_reserved_instruction(ctx
);
9829 gen_store_fpr32(ctx
, fp0
, fd
);
9830 tcg_temp_free_i32(fp2
);
9831 tcg_temp_free_i32(fp1
);
9832 tcg_temp_free_i32(fp0
);
9833 tcg_temp_free_i32(t1
);
9836 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9839 TCGv_i64 t1
= tcg_const_i64(0);
9840 TCGv_i64 fp0
= tcg_temp_new_i64();
9841 TCGv_i64 fp1
= tcg_temp_new_i64();
9842 TCGv_i64 fp2
= tcg_temp_new_i64();
9843 gen_load_fpr64(ctx
, fp0
, fd
);
9844 gen_load_fpr64(ctx
, fp1
, ft
);
9845 gen_load_fpr64(ctx
, fp2
, fs
);
9849 tcg_gen_andi_i64(fp0
, fp0
, 1);
9850 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9853 tcg_gen_andi_i64(fp1
, fp1
, 1);
9854 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9857 tcg_gen_andi_i64(fp1
, fp1
, 1);
9858 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9861 MIPS_INVAL("gen_sel_d");
9862 gen_reserved_instruction(ctx
);
9866 gen_store_fpr64(ctx
, fp0
, fd
);
9867 tcg_temp_free_i64(fp2
);
9868 tcg_temp_free_i64(fp1
);
9869 tcg_temp_free_i64(fp0
);
9870 tcg_temp_free_i64(t1
);
9873 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
9874 int ft
, int fs
, int fd
, int cc
)
9876 uint32_t func
= ctx
->opcode
& 0x3f;
9880 TCGv_i32 fp0
= tcg_temp_new_i32();
9881 TCGv_i32 fp1
= tcg_temp_new_i32();
9883 gen_load_fpr32(ctx
, fp0
, fs
);
9884 gen_load_fpr32(ctx
, fp1
, ft
);
9885 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9886 tcg_temp_free_i32(fp1
);
9887 gen_store_fpr32(ctx
, fp0
, fd
);
9888 tcg_temp_free_i32(fp0
);
9893 TCGv_i32 fp0
= tcg_temp_new_i32();
9894 TCGv_i32 fp1
= tcg_temp_new_i32();
9896 gen_load_fpr32(ctx
, fp0
, fs
);
9897 gen_load_fpr32(ctx
, fp1
, ft
);
9898 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9899 tcg_temp_free_i32(fp1
);
9900 gen_store_fpr32(ctx
, fp0
, fd
);
9901 tcg_temp_free_i32(fp0
);
9906 TCGv_i32 fp0
= tcg_temp_new_i32();
9907 TCGv_i32 fp1
= tcg_temp_new_i32();
9909 gen_load_fpr32(ctx
, fp0
, fs
);
9910 gen_load_fpr32(ctx
, fp1
, ft
);
9911 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9912 tcg_temp_free_i32(fp1
);
9913 gen_store_fpr32(ctx
, fp0
, fd
);
9914 tcg_temp_free_i32(fp0
);
9919 TCGv_i32 fp0
= tcg_temp_new_i32();
9920 TCGv_i32 fp1
= tcg_temp_new_i32();
9922 gen_load_fpr32(ctx
, fp0
, fs
);
9923 gen_load_fpr32(ctx
, fp1
, ft
);
9924 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9925 tcg_temp_free_i32(fp1
);
9926 gen_store_fpr32(ctx
, fp0
, fd
);
9927 tcg_temp_free_i32(fp0
);
9932 TCGv_i32 fp0
= tcg_temp_new_i32();
9934 gen_load_fpr32(ctx
, fp0
, fs
);
9935 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9936 gen_store_fpr32(ctx
, fp0
, fd
);
9937 tcg_temp_free_i32(fp0
);
9942 TCGv_i32 fp0
= tcg_temp_new_i32();
9944 gen_load_fpr32(ctx
, fp0
, fs
);
9946 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9948 gen_helper_float_abs_s(fp0
, fp0
);
9950 gen_store_fpr32(ctx
, fp0
, fd
);
9951 tcg_temp_free_i32(fp0
);
9956 TCGv_i32 fp0
= tcg_temp_new_i32();
9958 gen_load_fpr32(ctx
, fp0
, fs
);
9959 gen_store_fpr32(ctx
, fp0
, fd
);
9960 tcg_temp_free_i32(fp0
);
9965 TCGv_i32 fp0
= tcg_temp_new_i32();
9967 gen_load_fpr32(ctx
, fp0
, fs
);
9969 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9971 gen_helper_float_chs_s(fp0
, fp0
);
9973 gen_store_fpr32(ctx
, fp0
, fd
);
9974 tcg_temp_free_i32(fp0
);
9978 check_cp1_64bitmode(ctx
);
9980 TCGv_i32 fp32
= tcg_temp_new_i32();
9981 TCGv_i64 fp64
= tcg_temp_new_i64();
9983 gen_load_fpr32(ctx
, fp32
, fs
);
9985 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9987 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9989 tcg_temp_free_i32(fp32
);
9990 gen_store_fpr64(ctx
, fp64
, fd
);
9991 tcg_temp_free_i64(fp64
);
9995 check_cp1_64bitmode(ctx
);
9997 TCGv_i32 fp32
= tcg_temp_new_i32();
9998 TCGv_i64 fp64
= tcg_temp_new_i64();
10000 gen_load_fpr32(ctx
, fp32
, fs
);
10001 if (ctx
->nan2008
) {
10002 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10004 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10006 tcg_temp_free_i32(fp32
);
10007 gen_store_fpr64(ctx
, fp64
, fd
);
10008 tcg_temp_free_i64(fp64
);
10012 check_cp1_64bitmode(ctx
);
10014 TCGv_i32 fp32
= tcg_temp_new_i32();
10015 TCGv_i64 fp64
= tcg_temp_new_i64();
10017 gen_load_fpr32(ctx
, fp32
, fs
);
10018 if (ctx
->nan2008
) {
10019 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10021 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10023 tcg_temp_free_i32(fp32
);
10024 gen_store_fpr64(ctx
, fp64
, fd
);
10025 tcg_temp_free_i64(fp64
);
10028 case OPC_FLOOR_L_S
:
10029 check_cp1_64bitmode(ctx
);
10031 TCGv_i32 fp32
= tcg_temp_new_i32();
10032 TCGv_i64 fp64
= tcg_temp_new_i64();
10034 gen_load_fpr32(ctx
, fp32
, fs
);
10035 if (ctx
->nan2008
) {
10036 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10038 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10040 tcg_temp_free_i32(fp32
);
10041 gen_store_fpr64(ctx
, fp64
, fd
);
10042 tcg_temp_free_i64(fp64
);
10045 case OPC_ROUND_W_S
:
10047 TCGv_i32 fp0
= tcg_temp_new_i32();
10049 gen_load_fpr32(ctx
, fp0
, fs
);
10050 if (ctx
->nan2008
) {
10051 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10053 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10055 gen_store_fpr32(ctx
, fp0
, fd
);
10056 tcg_temp_free_i32(fp0
);
10059 case OPC_TRUNC_W_S
:
10061 TCGv_i32 fp0
= tcg_temp_new_i32();
10063 gen_load_fpr32(ctx
, fp0
, fs
);
10064 if (ctx
->nan2008
) {
10065 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10067 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10069 gen_store_fpr32(ctx
, fp0
, fd
);
10070 tcg_temp_free_i32(fp0
);
10075 TCGv_i32 fp0
= tcg_temp_new_i32();
10077 gen_load_fpr32(ctx
, fp0
, fs
);
10078 if (ctx
->nan2008
) {
10079 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10081 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10083 gen_store_fpr32(ctx
, fp0
, fd
);
10084 tcg_temp_free_i32(fp0
);
10087 case OPC_FLOOR_W_S
:
10089 TCGv_i32 fp0
= tcg_temp_new_i32();
10091 gen_load_fpr32(ctx
, fp0
, fs
);
10092 if (ctx
->nan2008
) {
10093 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10095 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10097 gen_store_fpr32(ctx
, fp0
, fd
);
10098 tcg_temp_free_i32(fp0
);
10102 check_insn(ctx
, ISA_MIPS_R6
);
10103 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10106 check_insn(ctx
, ISA_MIPS_R6
);
10107 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10110 check_insn(ctx
, ISA_MIPS_R6
);
10111 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10114 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10115 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10118 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10120 TCGLabel
*l1
= gen_new_label();
10124 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10126 fp0
= tcg_temp_new_i32();
10127 gen_load_fpr32(ctx
, fp0
, fs
);
10128 gen_store_fpr32(ctx
, fp0
, fd
);
10129 tcg_temp_free_i32(fp0
);
10134 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10136 TCGLabel
*l1
= gen_new_label();
10140 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10141 fp0
= tcg_temp_new_i32();
10142 gen_load_fpr32(ctx
, fp0
, fs
);
10143 gen_store_fpr32(ctx
, fp0
, fd
);
10144 tcg_temp_free_i32(fp0
);
10151 TCGv_i32 fp0
= tcg_temp_new_i32();
10153 gen_load_fpr32(ctx
, fp0
, fs
);
10154 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10155 gen_store_fpr32(ctx
, fp0
, fd
);
10156 tcg_temp_free_i32(fp0
);
10161 TCGv_i32 fp0
= tcg_temp_new_i32();
10163 gen_load_fpr32(ctx
, fp0
, fs
);
10164 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10165 gen_store_fpr32(ctx
, fp0
, fd
);
10166 tcg_temp_free_i32(fp0
);
10170 check_insn(ctx
, ISA_MIPS_R6
);
10172 TCGv_i32 fp0
= tcg_temp_new_i32();
10173 TCGv_i32 fp1
= tcg_temp_new_i32();
10174 TCGv_i32 fp2
= tcg_temp_new_i32();
10175 gen_load_fpr32(ctx
, fp0
, fs
);
10176 gen_load_fpr32(ctx
, fp1
, ft
);
10177 gen_load_fpr32(ctx
, fp2
, fd
);
10178 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10179 gen_store_fpr32(ctx
, fp2
, fd
);
10180 tcg_temp_free_i32(fp2
);
10181 tcg_temp_free_i32(fp1
);
10182 tcg_temp_free_i32(fp0
);
10186 check_insn(ctx
, ISA_MIPS_R6
);
10188 TCGv_i32 fp0
= tcg_temp_new_i32();
10189 TCGv_i32 fp1
= tcg_temp_new_i32();
10190 TCGv_i32 fp2
= tcg_temp_new_i32();
10191 gen_load_fpr32(ctx
, fp0
, fs
);
10192 gen_load_fpr32(ctx
, fp1
, ft
);
10193 gen_load_fpr32(ctx
, fp2
, fd
);
10194 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10195 gen_store_fpr32(ctx
, fp2
, fd
);
10196 tcg_temp_free_i32(fp2
);
10197 tcg_temp_free_i32(fp1
);
10198 tcg_temp_free_i32(fp0
);
10202 check_insn(ctx
, ISA_MIPS_R6
);
10204 TCGv_i32 fp0
= tcg_temp_new_i32();
10205 gen_load_fpr32(ctx
, fp0
, fs
);
10206 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10207 gen_store_fpr32(ctx
, fp0
, fd
);
10208 tcg_temp_free_i32(fp0
);
10212 check_insn(ctx
, ISA_MIPS_R6
);
10214 TCGv_i32 fp0
= tcg_temp_new_i32();
10215 gen_load_fpr32(ctx
, fp0
, fs
);
10216 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10217 gen_store_fpr32(ctx
, fp0
, fd
);
10218 tcg_temp_free_i32(fp0
);
10221 case OPC_MIN_S
: /* OPC_RECIP2_S */
10222 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10224 TCGv_i32 fp0
= tcg_temp_new_i32();
10225 TCGv_i32 fp1
= tcg_temp_new_i32();
10226 TCGv_i32 fp2
= tcg_temp_new_i32();
10227 gen_load_fpr32(ctx
, fp0
, fs
);
10228 gen_load_fpr32(ctx
, fp1
, ft
);
10229 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10230 gen_store_fpr32(ctx
, fp2
, fd
);
10231 tcg_temp_free_i32(fp2
);
10232 tcg_temp_free_i32(fp1
);
10233 tcg_temp_free_i32(fp0
);
10236 check_cp1_64bitmode(ctx
);
10238 TCGv_i32 fp0
= tcg_temp_new_i32();
10239 TCGv_i32 fp1
= tcg_temp_new_i32();
10241 gen_load_fpr32(ctx
, fp0
, fs
);
10242 gen_load_fpr32(ctx
, fp1
, ft
);
10243 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10244 tcg_temp_free_i32(fp1
);
10245 gen_store_fpr32(ctx
, fp0
, fd
);
10246 tcg_temp_free_i32(fp0
);
10250 case OPC_MINA_S
: /* OPC_RECIP1_S */
10251 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10253 TCGv_i32 fp0
= tcg_temp_new_i32();
10254 TCGv_i32 fp1
= tcg_temp_new_i32();
10255 TCGv_i32 fp2
= tcg_temp_new_i32();
10256 gen_load_fpr32(ctx
, fp0
, fs
);
10257 gen_load_fpr32(ctx
, fp1
, ft
);
10258 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10259 gen_store_fpr32(ctx
, fp2
, fd
);
10260 tcg_temp_free_i32(fp2
);
10261 tcg_temp_free_i32(fp1
);
10262 tcg_temp_free_i32(fp0
);
10265 check_cp1_64bitmode(ctx
);
10267 TCGv_i32 fp0
= tcg_temp_new_i32();
10269 gen_load_fpr32(ctx
, fp0
, fs
);
10270 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10271 gen_store_fpr32(ctx
, fp0
, fd
);
10272 tcg_temp_free_i32(fp0
);
10276 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10277 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10279 TCGv_i32 fp0
= tcg_temp_new_i32();
10280 TCGv_i32 fp1
= tcg_temp_new_i32();
10281 gen_load_fpr32(ctx
, fp0
, fs
);
10282 gen_load_fpr32(ctx
, fp1
, ft
);
10283 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10284 gen_store_fpr32(ctx
, fp1
, fd
);
10285 tcg_temp_free_i32(fp1
);
10286 tcg_temp_free_i32(fp0
);
10289 check_cp1_64bitmode(ctx
);
10291 TCGv_i32 fp0
= tcg_temp_new_i32();
10293 gen_load_fpr32(ctx
, fp0
, fs
);
10294 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10295 gen_store_fpr32(ctx
, fp0
, fd
);
10296 tcg_temp_free_i32(fp0
);
10300 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10301 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10303 TCGv_i32 fp0
= tcg_temp_new_i32();
10304 TCGv_i32 fp1
= tcg_temp_new_i32();
10305 gen_load_fpr32(ctx
, fp0
, fs
);
10306 gen_load_fpr32(ctx
, fp1
, ft
);
10307 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10308 gen_store_fpr32(ctx
, fp1
, fd
);
10309 tcg_temp_free_i32(fp1
);
10310 tcg_temp_free_i32(fp0
);
10313 check_cp1_64bitmode(ctx
);
10315 TCGv_i32 fp0
= tcg_temp_new_i32();
10316 TCGv_i32 fp1
= tcg_temp_new_i32();
10318 gen_load_fpr32(ctx
, fp0
, fs
);
10319 gen_load_fpr32(ctx
, fp1
, ft
);
10320 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10321 tcg_temp_free_i32(fp1
);
10322 gen_store_fpr32(ctx
, fp0
, fd
);
10323 tcg_temp_free_i32(fp0
);
10328 check_cp1_registers(ctx
, fd
);
10330 TCGv_i32 fp32
= tcg_temp_new_i32();
10331 TCGv_i64 fp64
= tcg_temp_new_i64();
10333 gen_load_fpr32(ctx
, fp32
, fs
);
10334 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
10335 tcg_temp_free_i32(fp32
);
10336 gen_store_fpr64(ctx
, fp64
, fd
);
10337 tcg_temp_free_i64(fp64
);
10342 TCGv_i32 fp0
= tcg_temp_new_i32();
10344 gen_load_fpr32(ctx
, fp0
, fs
);
10345 if (ctx
->nan2008
) {
10346 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
10348 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
10350 gen_store_fpr32(ctx
, fp0
, fd
);
10351 tcg_temp_free_i32(fp0
);
10355 check_cp1_64bitmode(ctx
);
10357 TCGv_i32 fp32
= tcg_temp_new_i32();
10358 TCGv_i64 fp64
= tcg_temp_new_i64();
10360 gen_load_fpr32(ctx
, fp32
, fs
);
10361 if (ctx
->nan2008
) {
10362 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
10364 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
10366 tcg_temp_free_i32(fp32
);
10367 gen_store_fpr64(ctx
, fp64
, fd
);
10368 tcg_temp_free_i64(fp64
);
10374 TCGv_i64 fp64
= tcg_temp_new_i64();
10375 TCGv_i32 fp32_0
= tcg_temp_new_i32();
10376 TCGv_i32 fp32_1
= tcg_temp_new_i32();
10378 gen_load_fpr32(ctx
, fp32_0
, fs
);
10379 gen_load_fpr32(ctx
, fp32_1
, ft
);
10380 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
10381 tcg_temp_free_i32(fp32_1
);
10382 tcg_temp_free_i32(fp32_0
);
10383 gen_store_fpr64(ctx
, fp64
, fd
);
10384 tcg_temp_free_i64(fp64
);
10390 case OPC_CMP_UEQ_S
:
10391 case OPC_CMP_OLT_S
:
10392 case OPC_CMP_ULT_S
:
10393 case OPC_CMP_OLE_S
:
10394 case OPC_CMP_ULE_S
:
10396 case OPC_CMP_NGLE_S
:
10397 case OPC_CMP_SEQ_S
:
10398 case OPC_CMP_NGL_S
:
10400 case OPC_CMP_NGE_S
:
10402 case OPC_CMP_NGT_S
:
10403 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10404 if (ctx
->opcode
& (1 << 6)) {
10405 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
10407 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
10411 check_cp1_registers(ctx
, fs
| ft
| fd
);
10413 TCGv_i64 fp0
= tcg_temp_new_i64();
10414 TCGv_i64 fp1
= tcg_temp_new_i64();
10416 gen_load_fpr64(ctx
, fp0
, fs
);
10417 gen_load_fpr64(ctx
, fp1
, ft
);
10418 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
10419 tcg_temp_free_i64(fp1
);
10420 gen_store_fpr64(ctx
, fp0
, fd
);
10421 tcg_temp_free_i64(fp0
);
10425 check_cp1_registers(ctx
, fs
| ft
| fd
);
10427 TCGv_i64 fp0
= tcg_temp_new_i64();
10428 TCGv_i64 fp1
= tcg_temp_new_i64();
10430 gen_load_fpr64(ctx
, fp0
, fs
);
10431 gen_load_fpr64(ctx
, fp1
, ft
);
10432 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
10433 tcg_temp_free_i64(fp1
);
10434 gen_store_fpr64(ctx
, fp0
, fd
);
10435 tcg_temp_free_i64(fp0
);
10439 check_cp1_registers(ctx
, fs
| ft
| fd
);
10441 TCGv_i64 fp0
= tcg_temp_new_i64();
10442 TCGv_i64 fp1
= tcg_temp_new_i64();
10444 gen_load_fpr64(ctx
, fp0
, fs
);
10445 gen_load_fpr64(ctx
, fp1
, ft
);
10446 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
10447 tcg_temp_free_i64(fp1
);
10448 gen_store_fpr64(ctx
, fp0
, fd
);
10449 tcg_temp_free_i64(fp0
);
10453 check_cp1_registers(ctx
, fs
| ft
| fd
);
10455 TCGv_i64 fp0
= tcg_temp_new_i64();
10456 TCGv_i64 fp1
= tcg_temp_new_i64();
10458 gen_load_fpr64(ctx
, fp0
, fs
);
10459 gen_load_fpr64(ctx
, fp1
, ft
);
10460 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
10461 tcg_temp_free_i64(fp1
);
10462 gen_store_fpr64(ctx
, fp0
, fd
);
10463 tcg_temp_free_i64(fp0
);
10467 check_cp1_registers(ctx
, fs
| fd
);
10469 TCGv_i64 fp0
= tcg_temp_new_i64();
10471 gen_load_fpr64(ctx
, fp0
, fs
);
10472 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
10473 gen_store_fpr64(ctx
, fp0
, fd
);
10474 tcg_temp_free_i64(fp0
);
10478 check_cp1_registers(ctx
, fs
| fd
);
10480 TCGv_i64 fp0
= tcg_temp_new_i64();
10482 gen_load_fpr64(ctx
, fp0
, fs
);
10483 if (ctx
->abs2008
) {
10484 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
10486 gen_helper_float_abs_d(fp0
, fp0
);
10488 gen_store_fpr64(ctx
, fp0
, fd
);
10489 tcg_temp_free_i64(fp0
);
10493 check_cp1_registers(ctx
, fs
| fd
);
10495 TCGv_i64 fp0
= tcg_temp_new_i64();
10497 gen_load_fpr64(ctx
, fp0
, fs
);
10498 gen_store_fpr64(ctx
, fp0
, fd
);
10499 tcg_temp_free_i64(fp0
);
10503 check_cp1_registers(ctx
, fs
| fd
);
10505 TCGv_i64 fp0
= tcg_temp_new_i64();
10507 gen_load_fpr64(ctx
, fp0
, fs
);
10508 if (ctx
->abs2008
) {
10509 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10511 gen_helper_float_chs_d(fp0
, fp0
);
10513 gen_store_fpr64(ctx
, fp0
, fd
);
10514 tcg_temp_free_i64(fp0
);
10517 case OPC_ROUND_L_D
:
10518 check_cp1_64bitmode(ctx
);
10520 TCGv_i64 fp0
= tcg_temp_new_i64();
10522 gen_load_fpr64(ctx
, fp0
, fs
);
10523 if (ctx
->nan2008
) {
10524 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10526 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10528 gen_store_fpr64(ctx
, fp0
, fd
);
10529 tcg_temp_free_i64(fp0
);
10532 case OPC_TRUNC_L_D
:
10533 check_cp1_64bitmode(ctx
);
10535 TCGv_i64 fp0
= tcg_temp_new_i64();
10537 gen_load_fpr64(ctx
, fp0
, fs
);
10538 if (ctx
->nan2008
) {
10539 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10541 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10543 gen_store_fpr64(ctx
, fp0
, fd
);
10544 tcg_temp_free_i64(fp0
);
10548 check_cp1_64bitmode(ctx
);
10550 TCGv_i64 fp0
= tcg_temp_new_i64();
10552 gen_load_fpr64(ctx
, fp0
, fs
);
10553 if (ctx
->nan2008
) {
10554 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10556 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10558 gen_store_fpr64(ctx
, fp0
, fd
);
10559 tcg_temp_free_i64(fp0
);
10562 case OPC_FLOOR_L_D
:
10563 check_cp1_64bitmode(ctx
);
10565 TCGv_i64 fp0
= tcg_temp_new_i64();
10567 gen_load_fpr64(ctx
, fp0
, fs
);
10568 if (ctx
->nan2008
) {
10569 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10571 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10573 gen_store_fpr64(ctx
, fp0
, fd
);
10574 tcg_temp_free_i64(fp0
);
10577 case OPC_ROUND_W_D
:
10578 check_cp1_registers(ctx
, fs
);
10580 TCGv_i32 fp32
= tcg_temp_new_i32();
10581 TCGv_i64 fp64
= tcg_temp_new_i64();
10583 gen_load_fpr64(ctx
, fp64
, fs
);
10584 if (ctx
->nan2008
) {
10585 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10587 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10589 tcg_temp_free_i64(fp64
);
10590 gen_store_fpr32(ctx
, fp32
, fd
);
10591 tcg_temp_free_i32(fp32
);
10594 case OPC_TRUNC_W_D
:
10595 check_cp1_registers(ctx
, fs
);
10597 TCGv_i32 fp32
= tcg_temp_new_i32();
10598 TCGv_i64 fp64
= tcg_temp_new_i64();
10600 gen_load_fpr64(ctx
, fp64
, fs
);
10601 if (ctx
->nan2008
) {
10602 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10604 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10606 tcg_temp_free_i64(fp64
);
10607 gen_store_fpr32(ctx
, fp32
, fd
);
10608 tcg_temp_free_i32(fp32
);
10612 check_cp1_registers(ctx
, fs
);
10614 TCGv_i32 fp32
= tcg_temp_new_i32();
10615 TCGv_i64 fp64
= tcg_temp_new_i64();
10617 gen_load_fpr64(ctx
, fp64
, fs
);
10618 if (ctx
->nan2008
) {
10619 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10621 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10623 tcg_temp_free_i64(fp64
);
10624 gen_store_fpr32(ctx
, fp32
, fd
);
10625 tcg_temp_free_i32(fp32
);
10628 case OPC_FLOOR_W_D
:
10629 check_cp1_registers(ctx
, fs
);
10631 TCGv_i32 fp32
= tcg_temp_new_i32();
10632 TCGv_i64 fp64
= tcg_temp_new_i64();
10634 gen_load_fpr64(ctx
, fp64
, fs
);
10635 if (ctx
->nan2008
) {
10636 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10638 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10640 tcg_temp_free_i64(fp64
);
10641 gen_store_fpr32(ctx
, fp32
, fd
);
10642 tcg_temp_free_i32(fp32
);
10646 check_insn(ctx
, ISA_MIPS_R6
);
10647 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10650 check_insn(ctx
, ISA_MIPS_R6
);
10651 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10654 check_insn(ctx
, ISA_MIPS_R6
);
10655 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10658 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10659 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10662 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10664 TCGLabel
*l1
= gen_new_label();
10668 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10670 fp0
= tcg_temp_new_i64();
10671 gen_load_fpr64(ctx
, fp0
, fs
);
10672 gen_store_fpr64(ctx
, fp0
, fd
);
10673 tcg_temp_free_i64(fp0
);
10678 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10680 TCGLabel
*l1
= gen_new_label();
10684 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10685 fp0
= tcg_temp_new_i64();
10686 gen_load_fpr64(ctx
, fp0
, fs
);
10687 gen_store_fpr64(ctx
, fp0
, fd
);
10688 tcg_temp_free_i64(fp0
);
10694 check_cp1_registers(ctx
, fs
| fd
);
10696 TCGv_i64 fp0
= tcg_temp_new_i64();
10698 gen_load_fpr64(ctx
, fp0
, fs
);
10699 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
10700 gen_store_fpr64(ctx
, fp0
, fd
);
10701 tcg_temp_free_i64(fp0
);
10705 check_cp1_registers(ctx
, fs
| fd
);
10707 TCGv_i64 fp0
= tcg_temp_new_i64();
10709 gen_load_fpr64(ctx
, fp0
, fs
);
10710 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
10711 gen_store_fpr64(ctx
, fp0
, fd
);
10712 tcg_temp_free_i64(fp0
);
10716 check_insn(ctx
, ISA_MIPS_R6
);
10718 TCGv_i64 fp0
= tcg_temp_new_i64();
10719 TCGv_i64 fp1
= tcg_temp_new_i64();
10720 TCGv_i64 fp2
= tcg_temp_new_i64();
10721 gen_load_fpr64(ctx
, fp0
, fs
);
10722 gen_load_fpr64(ctx
, fp1
, ft
);
10723 gen_load_fpr64(ctx
, fp2
, fd
);
10724 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10725 gen_store_fpr64(ctx
, fp2
, fd
);
10726 tcg_temp_free_i64(fp2
);
10727 tcg_temp_free_i64(fp1
);
10728 tcg_temp_free_i64(fp0
);
10732 check_insn(ctx
, ISA_MIPS_R6
);
10734 TCGv_i64 fp0
= tcg_temp_new_i64();
10735 TCGv_i64 fp1
= tcg_temp_new_i64();
10736 TCGv_i64 fp2
= tcg_temp_new_i64();
10737 gen_load_fpr64(ctx
, fp0
, fs
);
10738 gen_load_fpr64(ctx
, fp1
, ft
);
10739 gen_load_fpr64(ctx
, fp2
, fd
);
10740 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10741 gen_store_fpr64(ctx
, fp2
, fd
);
10742 tcg_temp_free_i64(fp2
);
10743 tcg_temp_free_i64(fp1
);
10744 tcg_temp_free_i64(fp0
);
10748 check_insn(ctx
, ISA_MIPS_R6
);
10750 TCGv_i64 fp0
= tcg_temp_new_i64();
10751 gen_load_fpr64(ctx
, fp0
, fs
);
10752 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
10753 gen_store_fpr64(ctx
, fp0
, fd
);
10754 tcg_temp_free_i64(fp0
);
10758 check_insn(ctx
, ISA_MIPS_R6
);
10760 TCGv_i64 fp0
= tcg_temp_new_i64();
10761 gen_load_fpr64(ctx
, fp0
, fs
);
10762 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
10763 gen_store_fpr64(ctx
, fp0
, fd
);
10764 tcg_temp_free_i64(fp0
);
10767 case OPC_MIN_D
: /* OPC_RECIP2_D */
10768 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10770 TCGv_i64 fp0
= tcg_temp_new_i64();
10771 TCGv_i64 fp1
= tcg_temp_new_i64();
10772 gen_load_fpr64(ctx
, fp0
, fs
);
10773 gen_load_fpr64(ctx
, fp1
, ft
);
10774 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
10775 gen_store_fpr64(ctx
, fp1
, fd
);
10776 tcg_temp_free_i64(fp1
);
10777 tcg_temp_free_i64(fp0
);
10780 check_cp1_64bitmode(ctx
);
10782 TCGv_i64 fp0
= tcg_temp_new_i64();
10783 TCGv_i64 fp1
= tcg_temp_new_i64();
10785 gen_load_fpr64(ctx
, fp0
, fs
);
10786 gen_load_fpr64(ctx
, fp1
, ft
);
10787 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
10788 tcg_temp_free_i64(fp1
);
10789 gen_store_fpr64(ctx
, fp0
, fd
);
10790 tcg_temp_free_i64(fp0
);
10794 case OPC_MINA_D
: /* OPC_RECIP1_D */
10795 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10797 TCGv_i64 fp0
= tcg_temp_new_i64();
10798 TCGv_i64 fp1
= tcg_temp_new_i64();
10799 gen_load_fpr64(ctx
, fp0
, fs
);
10800 gen_load_fpr64(ctx
, fp1
, ft
);
10801 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10802 gen_store_fpr64(ctx
, fp1
, fd
);
10803 tcg_temp_free_i64(fp1
);
10804 tcg_temp_free_i64(fp0
);
10807 check_cp1_64bitmode(ctx
);
10809 TCGv_i64 fp0
= tcg_temp_new_i64();
10811 gen_load_fpr64(ctx
, fp0
, fs
);
10812 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10813 gen_store_fpr64(ctx
, fp0
, fd
);
10814 tcg_temp_free_i64(fp0
);
10818 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10819 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10821 TCGv_i64 fp0
= tcg_temp_new_i64();
10822 TCGv_i64 fp1
= tcg_temp_new_i64();
10823 gen_load_fpr64(ctx
, fp0
, fs
);
10824 gen_load_fpr64(ctx
, fp1
, ft
);
10825 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10826 gen_store_fpr64(ctx
, fp1
, fd
);
10827 tcg_temp_free_i64(fp1
);
10828 tcg_temp_free_i64(fp0
);
10831 check_cp1_64bitmode(ctx
);
10833 TCGv_i64 fp0
= tcg_temp_new_i64();
10835 gen_load_fpr64(ctx
, fp0
, fs
);
10836 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10837 gen_store_fpr64(ctx
, fp0
, fd
);
10838 tcg_temp_free_i64(fp0
);
10842 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10843 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10845 TCGv_i64 fp0
= tcg_temp_new_i64();
10846 TCGv_i64 fp1
= tcg_temp_new_i64();
10847 gen_load_fpr64(ctx
, fp0
, fs
);
10848 gen_load_fpr64(ctx
, fp1
, ft
);
10849 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10850 gen_store_fpr64(ctx
, fp1
, fd
);
10851 tcg_temp_free_i64(fp1
);
10852 tcg_temp_free_i64(fp0
);
10855 check_cp1_64bitmode(ctx
);
10857 TCGv_i64 fp0
= tcg_temp_new_i64();
10858 TCGv_i64 fp1
= tcg_temp_new_i64();
10860 gen_load_fpr64(ctx
, fp0
, fs
);
10861 gen_load_fpr64(ctx
, fp1
, ft
);
10862 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10863 tcg_temp_free_i64(fp1
);
10864 gen_store_fpr64(ctx
, fp0
, fd
);
10865 tcg_temp_free_i64(fp0
);
10872 case OPC_CMP_UEQ_D
:
10873 case OPC_CMP_OLT_D
:
10874 case OPC_CMP_ULT_D
:
10875 case OPC_CMP_OLE_D
:
10876 case OPC_CMP_ULE_D
:
10878 case OPC_CMP_NGLE_D
:
10879 case OPC_CMP_SEQ_D
:
10880 case OPC_CMP_NGL_D
:
10882 case OPC_CMP_NGE_D
:
10884 case OPC_CMP_NGT_D
:
10885 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10886 if (ctx
->opcode
& (1 << 6)) {
10887 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
10889 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
10893 check_cp1_registers(ctx
, fs
);
10895 TCGv_i32 fp32
= tcg_temp_new_i32();
10896 TCGv_i64 fp64
= tcg_temp_new_i64();
10898 gen_load_fpr64(ctx
, fp64
, fs
);
10899 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10900 tcg_temp_free_i64(fp64
);
10901 gen_store_fpr32(ctx
, fp32
, fd
);
10902 tcg_temp_free_i32(fp32
);
10906 check_cp1_registers(ctx
, fs
);
10908 TCGv_i32 fp32
= tcg_temp_new_i32();
10909 TCGv_i64 fp64
= tcg_temp_new_i64();
10911 gen_load_fpr64(ctx
, fp64
, fs
);
10912 if (ctx
->nan2008
) {
10913 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10915 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10917 tcg_temp_free_i64(fp64
);
10918 gen_store_fpr32(ctx
, fp32
, fd
);
10919 tcg_temp_free_i32(fp32
);
10923 check_cp1_64bitmode(ctx
);
10925 TCGv_i64 fp0
= tcg_temp_new_i64();
10927 gen_load_fpr64(ctx
, fp0
, fs
);
10928 if (ctx
->nan2008
) {
10929 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10931 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10933 gen_store_fpr64(ctx
, fp0
, fd
);
10934 tcg_temp_free_i64(fp0
);
10939 TCGv_i32 fp0
= tcg_temp_new_i32();
10941 gen_load_fpr32(ctx
, fp0
, fs
);
10942 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10943 gen_store_fpr32(ctx
, fp0
, fd
);
10944 tcg_temp_free_i32(fp0
);
10948 check_cp1_registers(ctx
, fd
);
10950 TCGv_i32 fp32
= tcg_temp_new_i32();
10951 TCGv_i64 fp64
= tcg_temp_new_i64();
10953 gen_load_fpr32(ctx
, fp32
, fs
);
10954 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10955 tcg_temp_free_i32(fp32
);
10956 gen_store_fpr64(ctx
, fp64
, fd
);
10957 tcg_temp_free_i64(fp64
);
10961 check_cp1_64bitmode(ctx
);
10963 TCGv_i32 fp32
= tcg_temp_new_i32();
10964 TCGv_i64 fp64
= tcg_temp_new_i64();
10966 gen_load_fpr64(ctx
, fp64
, fs
);
10967 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10968 tcg_temp_free_i64(fp64
);
10969 gen_store_fpr32(ctx
, fp32
, fd
);
10970 tcg_temp_free_i32(fp32
);
10974 check_cp1_64bitmode(ctx
);
10976 TCGv_i64 fp0
= tcg_temp_new_i64();
10978 gen_load_fpr64(ctx
, fp0
, fs
);
10979 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10980 gen_store_fpr64(ctx
, fp0
, fd
);
10981 tcg_temp_free_i64(fp0
);
10984 case OPC_CVT_PS_PW
:
10987 TCGv_i64 fp0
= tcg_temp_new_i64();
10989 gen_load_fpr64(ctx
, fp0
, fs
);
10990 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10991 gen_store_fpr64(ctx
, fp0
, fd
);
10992 tcg_temp_free_i64(fp0
);
10998 TCGv_i64 fp0
= tcg_temp_new_i64();
10999 TCGv_i64 fp1
= tcg_temp_new_i64();
11001 gen_load_fpr64(ctx
, fp0
, fs
);
11002 gen_load_fpr64(ctx
, fp1
, ft
);
11003 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11004 tcg_temp_free_i64(fp1
);
11005 gen_store_fpr64(ctx
, fp0
, fd
);
11006 tcg_temp_free_i64(fp0
);
11012 TCGv_i64 fp0
= tcg_temp_new_i64();
11013 TCGv_i64 fp1
= tcg_temp_new_i64();
11015 gen_load_fpr64(ctx
, fp0
, fs
);
11016 gen_load_fpr64(ctx
, fp1
, ft
);
11017 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11018 tcg_temp_free_i64(fp1
);
11019 gen_store_fpr64(ctx
, fp0
, fd
);
11020 tcg_temp_free_i64(fp0
);
11026 TCGv_i64 fp0
= tcg_temp_new_i64();
11027 TCGv_i64 fp1
= tcg_temp_new_i64();
11029 gen_load_fpr64(ctx
, fp0
, fs
);
11030 gen_load_fpr64(ctx
, fp1
, ft
);
11031 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11032 tcg_temp_free_i64(fp1
);
11033 gen_store_fpr64(ctx
, fp0
, fd
);
11034 tcg_temp_free_i64(fp0
);
11040 TCGv_i64 fp0
= tcg_temp_new_i64();
11042 gen_load_fpr64(ctx
, fp0
, fs
);
11043 gen_helper_float_abs_ps(fp0
, fp0
);
11044 gen_store_fpr64(ctx
, fp0
, fd
);
11045 tcg_temp_free_i64(fp0
);
11051 TCGv_i64 fp0
= tcg_temp_new_i64();
11053 gen_load_fpr64(ctx
, fp0
, fs
);
11054 gen_store_fpr64(ctx
, fp0
, fd
);
11055 tcg_temp_free_i64(fp0
);
11061 TCGv_i64 fp0
= tcg_temp_new_i64();
11063 gen_load_fpr64(ctx
, fp0
, fs
);
11064 gen_helper_float_chs_ps(fp0
, fp0
);
11065 gen_store_fpr64(ctx
, fp0
, fd
);
11066 tcg_temp_free_i64(fp0
);
11071 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11076 TCGLabel
*l1
= gen_new_label();
11080 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11082 fp0
= tcg_temp_new_i64();
11083 gen_load_fpr64(ctx
, fp0
, fs
);
11084 gen_store_fpr64(ctx
, fp0
, fd
);
11085 tcg_temp_free_i64(fp0
);
11092 TCGLabel
*l1
= gen_new_label();
11096 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11097 fp0
= tcg_temp_new_i64();
11098 gen_load_fpr64(ctx
, fp0
, fs
);
11099 gen_store_fpr64(ctx
, fp0
, fd
);
11100 tcg_temp_free_i64(fp0
);
11108 TCGv_i64 fp0
= tcg_temp_new_i64();
11109 TCGv_i64 fp1
= tcg_temp_new_i64();
11111 gen_load_fpr64(ctx
, fp0
, ft
);
11112 gen_load_fpr64(ctx
, fp1
, fs
);
11113 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11114 tcg_temp_free_i64(fp1
);
11115 gen_store_fpr64(ctx
, fp0
, fd
);
11116 tcg_temp_free_i64(fp0
);
11122 TCGv_i64 fp0
= tcg_temp_new_i64();
11123 TCGv_i64 fp1
= tcg_temp_new_i64();
11125 gen_load_fpr64(ctx
, fp0
, ft
);
11126 gen_load_fpr64(ctx
, fp1
, fs
);
11127 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11128 tcg_temp_free_i64(fp1
);
11129 gen_store_fpr64(ctx
, fp0
, fd
);
11130 tcg_temp_free_i64(fp0
);
11133 case OPC_RECIP2_PS
:
11136 TCGv_i64 fp0
= tcg_temp_new_i64();
11137 TCGv_i64 fp1
= tcg_temp_new_i64();
11139 gen_load_fpr64(ctx
, fp0
, fs
);
11140 gen_load_fpr64(ctx
, fp1
, ft
);
11141 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11142 tcg_temp_free_i64(fp1
);
11143 gen_store_fpr64(ctx
, fp0
, fd
);
11144 tcg_temp_free_i64(fp0
);
11147 case OPC_RECIP1_PS
:
11150 TCGv_i64 fp0
= tcg_temp_new_i64();
11152 gen_load_fpr64(ctx
, fp0
, fs
);
11153 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11154 gen_store_fpr64(ctx
, fp0
, fd
);
11155 tcg_temp_free_i64(fp0
);
11158 case OPC_RSQRT1_PS
:
11161 TCGv_i64 fp0
= tcg_temp_new_i64();
11163 gen_load_fpr64(ctx
, fp0
, fs
);
11164 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11165 gen_store_fpr64(ctx
, fp0
, fd
);
11166 tcg_temp_free_i64(fp0
);
11169 case OPC_RSQRT2_PS
:
11172 TCGv_i64 fp0
= tcg_temp_new_i64();
11173 TCGv_i64 fp1
= tcg_temp_new_i64();
11175 gen_load_fpr64(ctx
, fp0
, fs
);
11176 gen_load_fpr64(ctx
, fp1
, ft
);
11177 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11178 tcg_temp_free_i64(fp1
);
11179 gen_store_fpr64(ctx
, fp0
, fd
);
11180 tcg_temp_free_i64(fp0
);
11184 check_cp1_64bitmode(ctx
);
11186 TCGv_i32 fp0
= tcg_temp_new_i32();
11188 gen_load_fpr32h(ctx
, fp0
, fs
);
11189 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11190 gen_store_fpr32(ctx
, fp0
, fd
);
11191 tcg_temp_free_i32(fp0
);
11194 case OPC_CVT_PW_PS
:
11197 TCGv_i64 fp0
= tcg_temp_new_i64();
11199 gen_load_fpr64(ctx
, fp0
, fs
);
11200 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11201 gen_store_fpr64(ctx
, fp0
, fd
);
11202 tcg_temp_free_i64(fp0
);
11206 check_cp1_64bitmode(ctx
);
11208 TCGv_i32 fp0
= tcg_temp_new_i32();
11210 gen_load_fpr32(ctx
, fp0
, fs
);
11211 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11212 gen_store_fpr32(ctx
, fp0
, fd
);
11213 tcg_temp_free_i32(fp0
);
11219 TCGv_i32 fp0
= tcg_temp_new_i32();
11220 TCGv_i32 fp1
= tcg_temp_new_i32();
11222 gen_load_fpr32(ctx
, fp0
, fs
);
11223 gen_load_fpr32(ctx
, fp1
, ft
);
11224 gen_store_fpr32h(ctx
, fp0
, fd
);
11225 gen_store_fpr32(ctx
, fp1
, fd
);
11226 tcg_temp_free_i32(fp0
);
11227 tcg_temp_free_i32(fp1
);
11233 TCGv_i32 fp0
= tcg_temp_new_i32();
11234 TCGv_i32 fp1
= tcg_temp_new_i32();
11236 gen_load_fpr32(ctx
, fp0
, fs
);
11237 gen_load_fpr32h(ctx
, fp1
, ft
);
11238 gen_store_fpr32(ctx
, fp1
, fd
);
11239 gen_store_fpr32h(ctx
, fp0
, fd
);
11240 tcg_temp_free_i32(fp0
);
11241 tcg_temp_free_i32(fp1
);
11247 TCGv_i32 fp0
= tcg_temp_new_i32();
11248 TCGv_i32 fp1
= tcg_temp_new_i32();
11250 gen_load_fpr32h(ctx
, fp0
, fs
);
11251 gen_load_fpr32(ctx
, fp1
, ft
);
11252 gen_store_fpr32(ctx
, fp1
, fd
);
11253 gen_store_fpr32h(ctx
, fp0
, fd
);
11254 tcg_temp_free_i32(fp0
);
11255 tcg_temp_free_i32(fp1
);
11261 TCGv_i32 fp0
= tcg_temp_new_i32();
11262 TCGv_i32 fp1
= tcg_temp_new_i32();
11264 gen_load_fpr32h(ctx
, fp0
, fs
);
11265 gen_load_fpr32h(ctx
, fp1
, ft
);
11266 gen_store_fpr32(ctx
, fp1
, fd
);
11267 gen_store_fpr32h(ctx
, fp0
, fd
);
11268 tcg_temp_free_i32(fp0
);
11269 tcg_temp_free_i32(fp1
);
11273 case OPC_CMP_UN_PS
:
11274 case OPC_CMP_EQ_PS
:
11275 case OPC_CMP_UEQ_PS
:
11276 case OPC_CMP_OLT_PS
:
11277 case OPC_CMP_ULT_PS
:
11278 case OPC_CMP_OLE_PS
:
11279 case OPC_CMP_ULE_PS
:
11280 case OPC_CMP_SF_PS
:
11281 case OPC_CMP_NGLE_PS
:
11282 case OPC_CMP_SEQ_PS
:
11283 case OPC_CMP_NGL_PS
:
11284 case OPC_CMP_LT_PS
:
11285 case OPC_CMP_NGE_PS
:
11286 case OPC_CMP_LE_PS
:
11287 case OPC_CMP_NGT_PS
:
11288 if (ctx
->opcode
& (1 << 6)) {
11289 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
11291 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
11295 MIPS_INVAL("farith");
11296 gen_reserved_instruction(ctx
);
11301 /* Coprocessor 3 (FPU) */
11302 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
11303 int fd
, int fs
, int base
, int index
)
11305 TCGv t0
= tcg_temp_new();
11308 gen_load_gpr(t0
, index
);
11309 } else if (index
== 0) {
11310 gen_load_gpr(t0
, base
);
11312 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11315 * Don't do NOP if destination is zero: we must perform the actual
11322 TCGv_i32 fp0
= tcg_temp_new_i32();
11324 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11325 tcg_gen_trunc_tl_i32(fp0
, t0
);
11326 gen_store_fpr32(ctx
, fp0
, fd
);
11327 tcg_temp_free_i32(fp0
);
11332 check_cp1_registers(ctx
, fd
);
11334 TCGv_i64 fp0
= tcg_temp_new_i64();
11335 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11336 gen_store_fpr64(ctx
, fp0
, fd
);
11337 tcg_temp_free_i64(fp0
);
11341 check_cp1_64bitmode(ctx
);
11342 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11344 TCGv_i64 fp0
= tcg_temp_new_i64();
11346 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11347 gen_store_fpr64(ctx
, fp0
, fd
);
11348 tcg_temp_free_i64(fp0
);
11354 TCGv_i32 fp0
= tcg_temp_new_i32();
11355 gen_load_fpr32(ctx
, fp0
, fs
);
11356 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
11357 tcg_temp_free_i32(fp0
);
11362 check_cp1_registers(ctx
, fs
);
11364 TCGv_i64 fp0
= tcg_temp_new_i64();
11365 gen_load_fpr64(ctx
, fp0
, fs
);
11366 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11367 tcg_temp_free_i64(fp0
);
11371 check_cp1_64bitmode(ctx
);
11372 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11374 TCGv_i64 fp0
= tcg_temp_new_i64();
11375 gen_load_fpr64(ctx
, fp0
, fs
);
11376 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11377 tcg_temp_free_i64(fp0
);
11384 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
11385 int fd
, int fr
, int fs
, int ft
)
11391 TCGv t0
= tcg_temp_new();
11392 TCGv_i32 fp
= tcg_temp_new_i32();
11393 TCGv_i32 fph
= tcg_temp_new_i32();
11394 TCGLabel
*l1
= gen_new_label();
11395 TCGLabel
*l2
= gen_new_label();
11397 gen_load_gpr(t0
, fr
);
11398 tcg_gen_andi_tl(t0
, t0
, 0x7);
11400 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
11401 gen_load_fpr32(ctx
, fp
, fs
);
11402 gen_load_fpr32h(ctx
, fph
, fs
);
11403 gen_store_fpr32(ctx
, fp
, fd
);
11404 gen_store_fpr32h(ctx
, fph
, fd
);
11407 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
11409 if (cpu_is_bigendian(ctx
)) {
11410 gen_load_fpr32(ctx
, fp
, fs
);
11411 gen_load_fpr32h(ctx
, fph
, ft
);
11412 gen_store_fpr32h(ctx
, fp
, fd
);
11413 gen_store_fpr32(ctx
, fph
, fd
);
11415 gen_load_fpr32h(ctx
, fph
, fs
);
11416 gen_load_fpr32(ctx
, fp
, ft
);
11417 gen_store_fpr32(ctx
, fph
, fd
);
11418 gen_store_fpr32h(ctx
, fp
, fd
);
11421 tcg_temp_free_i32(fp
);
11422 tcg_temp_free_i32(fph
);
11428 TCGv_i32 fp0
= tcg_temp_new_i32();
11429 TCGv_i32 fp1
= tcg_temp_new_i32();
11430 TCGv_i32 fp2
= tcg_temp_new_i32();
11432 gen_load_fpr32(ctx
, fp0
, fs
);
11433 gen_load_fpr32(ctx
, fp1
, ft
);
11434 gen_load_fpr32(ctx
, fp2
, fr
);
11435 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11436 tcg_temp_free_i32(fp0
);
11437 tcg_temp_free_i32(fp1
);
11438 gen_store_fpr32(ctx
, fp2
, fd
);
11439 tcg_temp_free_i32(fp2
);
11444 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11446 TCGv_i64 fp0
= tcg_temp_new_i64();
11447 TCGv_i64 fp1
= tcg_temp_new_i64();
11448 TCGv_i64 fp2
= tcg_temp_new_i64();
11450 gen_load_fpr64(ctx
, fp0
, fs
);
11451 gen_load_fpr64(ctx
, fp1
, ft
);
11452 gen_load_fpr64(ctx
, fp2
, fr
);
11453 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11454 tcg_temp_free_i64(fp0
);
11455 tcg_temp_free_i64(fp1
);
11456 gen_store_fpr64(ctx
, fp2
, fd
);
11457 tcg_temp_free_i64(fp2
);
11463 TCGv_i64 fp0
= tcg_temp_new_i64();
11464 TCGv_i64 fp1
= tcg_temp_new_i64();
11465 TCGv_i64 fp2
= tcg_temp_new_i64();
11467 gen_load_fpr64(ctx
, fp0
, fs
);
11468 gen_load_fpr64(ctx
, fp1
, ft
);
11469 gen_load_fpr64(ctx
, fp2
, fr
);
11470 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11471 tcg_temp_free_i64(fp0
);
11472 tcg_temp_free_i64(fp1
);
11473 gen_store_fpr64(ctx
, fp2
, fd
);
11474 tcg_temp_free_i64(fp2
);
11480 TCGv_i32 fp0
= tcg_temp_new_i32();
11481 TCGv_i32 fp1
= tcg_temp_new_i32();
11482 TCGv_i32 fp2
= tcg_temp_new_i32();
11484 gen_load_fpr32(ctx
, fp0
, fs
);
11485 gen_load_fpr32(ctx
, fp1
, ft
);
11486 gen_load_fpr32(ctx
, fp2
, fr
);
11487 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11488 tcg_temp_free_i32(fp0
);
11489 tcg_temp_free_i32(fp1
);
11490 gen_store_fpr32(ctx
, fp2
, fd
);
11491 tcg_temp_free_i32(fp2
);
11496 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11498 TCGv_i64 fp0
= tcg_temp_new_i64();
11499 TCGv_i64 fp1
= tcg_temp_new_i64();
11500 TCGv_i64 fp2
= tcg_temp_new_i64();
11502 gen_load_fpr64(ctx
, fp0
, fs
);
11503 gen_load_fpr64(ctx
, fp1
, ft
);
11504 gen_load_fpr64(ctx
, fp2
, fr
);
11505 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11506 tcg_temp_free_i64(fp0
);
11507 tcg_temp_free_i64(fp1
);
11508 gen_store_fpr64(ctx
, fp2
, fd
);
11509 tcg_temp_free_i64(fp2
);
11515 TCGv_i64 fp0
= tcg_temp_new_i64();
11516 TCGv_i64 fp1
= tcg_temp_new_i64();
11517 TCGv_i64 fp2
= tcg_temp_new_i64();
11519 gen_load_fpr64(ctx
, fp0
, fs
);
11520 gen_load_fpr64(ctx
, fp1
, ft
);
11521 gen_load_fpr64(ctx
, fp2
, fr
);
11522 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11523 tcg_temp_free_i64(fp0
);
11524 tcg_temp_free_i64(fp1
);
11525 gen_store_fpr64(ctx
, fp2
, fd
);
11526 tcg_temp_free_i64(fp2
);
11532 TCGv_i32 fp0
= tcg_temp_new_i32();
11533 TCGv_i32 fp1
= tcg_temp_new_i32();
11534 TCGv_i32 fp2
= tcg_temp_new_i32();
11536 gen_load_fpr32(ctx
, fp0
, fs
);
11537 gen_load_fpr32(ctx
, fp1
, ft
);
11538 gen_load_fpr32(ctx
, fp2
, fr
);
11539 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11540 tcg_temp_free_i32(fp0
);
11541 tcg_temp_free_i32(fp1
);
11542 gen_store_fpr32(ctx
, fp2
, fd
);
11543 tcg_temp_free_i32(fp2
);
11548 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11550 TCGv_i64 fp0
= tcg_temp_new_i64();
11551 TCGv_i64 fp1
= tcg_temp_new_i64();
11552 TCGv_i64 fp2
= tcg_temp_new_i64();
11554 gen_load_fpr64(ctx
, fp0
, fs
);
11555 gen_load_fpr64(ctx
, fp1
, ft
);
11556 gen_load_fpr64(ctx
, fp2
, fr
);
11557 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11558 tcg_temp_free_i64(fp0
);
11559 tcg_temp_free_i64(fp1
);
11560 gen_store_fpr64(ctx
, fp2
, fd
);
11561 tcg_temp_free_i64(fp2
);
11567 TCGv_i64 fp0
= tcg_temp_new_i64();
11568 TCGv_i64 fp1
= tcg_temp_new_i64();
11569 TCGv_i64 fp2
= tcg_temp_new_i64();
11571 gen_load_fpr64(ctx
, fp0
, fs
);
11572 gen_load_fpr64(ctx
, fp1
, ft
);
11573 gen_load_fpr64(ctx
, fp2
, fr
);
11574 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11575 tcg_temp_free_i64(fp0
);
11576 tcg_temp_free_i64(fp1
);
11577 gen_store_fpr64(ctx
, fp2
, fd
);
11578 tcg_temp_free_i64(fp2
);
11584 TCGv_i32 fp0
= tcg_temp_new_i32();
11585 TCGv_i32 fp1
= tcg_temp_new_i32();
11586 TCGv_i32 fp2
= tcg_temp_new_i32();
11588 gen_load_fpr32(ctx
, fp0
, fs
);
11589 gen_load_fpr32(ctx
, fp1
, ft
);
11590 gen_load_fpr32(ctx
, fp2
, fr
);
11591 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11592 tcg_temp_free_i32(fp0
);
11593 tcg_temp_free_i32(fp1
);
11594 gen_store_fpr32(ctx
, fp2
, fd
);
11595 tcg_temp_free_i32(fp2
);
11600 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11602 TCGv_i64 fp0
= tcg_temp_new_i64();
11603 TCGv_i64 fp1
= tcg_temp_new_i64();
11604 TCGv_i64 fp2
= tcg_temp_new_i64();
11606 gen_load_fpr64(ctx
, fp0
, fs
);
11607 gen_load_fpr64(ctx
, fp1
, ft
);
11608 gen_load_fpr64(ctx
, fp2
, fr
);
11609 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11610 tcg_temp_free_i64(fp0
);
11611 tcg_temp_free_i64(fp1
);
11612 gen_store_fpr64(ctx
, fp2
, fd
);
11613 tcg_temp_free_i64(fp2
);
11619 TCGv_i64 fp0
= tcg_temp_new_i64();
11620 TCGv_i64 fp1
= tcg_temp_new_i64();
11621 TCGv_i64 fp2
= tcg_temp_new_i64();
11623 gen_load_fpr64(ctx
, fp0
, fs
);
11624 gen_load_fpr64(ctx
, fp1
, ft
);
11625 gen_load_fpr64(ctx
, fp2
, fr
);
11626 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11627 tcg_temp_free_i64(fp0
);
11628 tcg_temp_free_i64(fp1
);
11629 gen_store_fpr64(ctx
, fp2
, fd
);
11630 tcg_temp_free_i64(fp2
);
11634 MIPS_INVAL("flt3_arith");
11635 gen_reserved_instruction(ctx
);
11640 void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11644 #if !defined(CONFIG_USER_ONLY)
11646 * The Linux kernel will emulate rdhwr if it's not supported natively.
11647 * Therefore only check the ISA in system mode.
11649 check_insn(ctx
, ISA_MIPS_R2
);
11651 t0
= tcg_temp_new();
11655 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
11656 gen_store_gpr(t0
, rt
);
11659 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
11660 gen_store_gpr(t0
, rt
);
11663 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11666 gen_helper_rdhwr_cc(t0
, cpu_env
);
11667 gen_store_gpr(t0
, rt
);
11669 * Break the TB to be able to take timer interrupts immediately
11670 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
11671 * we break completely out of translated code.
11673 gen_save_pc(ctx
->base
.pc_next
+ 4);
11674 ctx
->base
.is_jmp
= DISAS_EXIT
;
11677 gen_helper_rdhwr_ccres(t0
, cpu_env
);
11678 gen_store_gpr(t0
, rt
);
11681 check_insn(ctx
, ISA_MIPS_R6
);
11684 * Performance counter registers are not implemented other than
11685 * control register 0.
11687 generate_exception(ctx
, EXCP_RI
);
11689 gen_helper_rdhwr_performance(t0
, cpu_env
);
11690 gen_store_gpr(t0
, rt
);
11693 check_insn(ctx
, ISA_MIPS_R6
);
11694 gen_helper_rdhwr_xnp(t0
, cpu_env
);
11695 gen_store_gpr(t0
, rt
);
11698 #if defined(CONFIG_USER_ONLY)
11699 tcg_gen_ld_tl(t0
, cpu_env
,
11700 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11701 gen_store_gpr(t0
, rt
);
11704 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
11705 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
11706 tcg_gen_ld_tl(t0
, cpu_env
,
11707 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11708 gen_store_gpr(t0
, rt
);
11710 gen_reserved_instruction(ctx
);
11714 default: /* Invalid */
11715 MIPS_INVAL("rdhwr");
11716 gen_reserved_instruction(ctx
);
11722 static inline void clear_branch_hflags(DisasContext
*ctx
)
11724 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
11725 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
11726 save_cpu_state(ctx
, 0);
11729 * It is not safe to save ctx->hflags as hflags may be changed
11730 * in execution time by the instruction in delay / forbidden slot.
11732 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
11736 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
11738 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11739 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
11740 /* Branches completion */
11741 clear_branch_hflags(ctx
);
11742 ctx
->base
.is_jmp
= DISAS_NORETURN
;
11743 /* FIXME: Need to clear can_do_io. */
11744 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
11745 case MIPS_HFLAG_FBNSLOT
:
11746 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
11749 /* unconditional branch */
11750 if (proc_hflags
& MIPS_HFLAG_BX
) {
11751 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
11753 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11755 case MIPS_HFLAG_BL
:
11756 /* blikely taken case */
11757 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11759 case MIPS_HFLAG_BC
:
11760 /* Conditional branch */
11762 TCGLabel
*l1
= gen_new_label();
11764 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11765 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
11767 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11770 case MIPS_HFLAG_BR
:
11771 /* unconditional branch to register */
11772 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
11773 TCGv t0
= tcg_temp_new();
11774 TCGv_i32 t1
= tcg_temp_new_i32();
11776 tcg_gen_andi_tl(t0
, btarget
, 0x1);
11777 tcg_gen_trunc_tl_i32(t1
, t0
);
11779 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
11780 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
11781 tcg_gen_or_i32(hflags
, hflags
, t1
);
11782 tcg_temp_free_i32(t1
);
11784 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
11786 tcg_gen_mov_tl(cpu_PC
, btarget
);
11788 tcg_gen_lookup_and_goto_ptr();
11791 LOG_DISAS("unknown branch 0x%x\n", proc_hflags
);
11792 gen_reserved_instruction(ctx
);
11797 /* Compact Branches */
11798 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11799 int rs
, int rt
, int32_t offset
)
11801 int bcond_compute
= 0;
11802 TCGv t0
= tcg_temp_new();
11803 TCGv t1
= tcg_temp_new();
11804 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11806 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11807 #ifdef MIPS_DEBUG_DISAS
11808 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11809 "\n", ctx
->base
.pc_next
);
11811 gen_reserved_instruction(ctx
);
11815 /* Load needed operands and calculate btarget */
11817 /* compact branch */
11818 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11819 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11820 gen_load_gpr(t0
, rs
);
11821 gen_load_gpr(t1
, rt
);
11823 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11824 if (rs
<= rt
&& rs
== 0) {
11825 /* OPC_BEQZALC, OPC_BNEZALC */
11826 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11829 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11830 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11831 gen_load_gpr(t0
, rs
);
11832 gen_load_gpr(t1
, rt
);
11834 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11836 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11837 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11838 if (rs
== 0 || rs
== rt
) {
11839 /* OPC_BLEZALC, OPC_BGEZALC */
11840 /* OPC_BGTZALC, OPC_BLTZALC */
11841 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11843 gen_load_gpr(t0
, rs
);
11844 gen_load_gpr(t1
, rt
);
11846 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11850 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11855 /* OPC_BEQZC, OPC_BNEZC */
11856 gen_load_gpr(t0
, rs
);
11858 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11860 /* OPC_JIC, OPC_JIALC */
11861 TCGv tbase
= tcg_temp_new();
11862 TCGv toffset
= tcg_constant_tl(offset
);
11864 gen_load_gpr(tbase
, rt
);
11865 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11866 tcg_temp_free(tbase
);
11870 MIPS_INVAL("Compact branch/jump");
11871 gen_reserved_instruction(ctx
);
11875 if (bcond_compute
== 0) {
11876 /* Unconditional compact branch */
11879 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11882 ctx
->hflags
|= MIPS_HFLAG_BR
;
11885 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11888 ctx
->hflags
|= MIPS_HFLAG_B
;
11891 MIPS_INVAL("Compact branch/jump");
11892 gen_reserved_instruction(ctx
);
11896 /* Generating branch here as compact branches don't have delay slot */
11897 gen_branch(ctx
, 4);
11899 /* Conditional compact branch */
11900 TCGLabel
*fs
= gen_new_label();
11901 save_cpu_state(ctx
, 0);
11904 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11905 if (rs
== 0 && rt
!= 0) {
11907 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11908 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11910 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11913 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11916 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11917 if (rs
== 0 && rt
!= 0) {
11919 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11920 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11922 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11925 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11928 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11929 if (rs
== 0 && rt
!= 0) {
11931 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11932 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11934 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11937 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11940 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11941 if (rs
== 0 && rt
!= 0) {
11943 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11944 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11946 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11949 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11952 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11953 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11955 /* OPC_BOVC, OPC_BNVC */
11956 TCGv t2
= tcg_temp_new();
11957 TCGv t3
= tcg_temp_new();
11958 TCGv t4
= tcg_temp_new();
11959 TCGv input_overflow
= tcg_temp_new();
11961 gen_load_gpr(t0
, rs
);
11962 gen_load_gpr(t1
, rt
);
11963 tcg_gen_ext32s_tl(t2
, t0
);
11964 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11965 tcg_gen_ext32s_tl(t3
, t1
);
11966 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11967 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11969 tcg_gen_add_tl(t4
, t2
, t3
);
11970 tcg_gen_ext32s_tl(t4
, t4
);
11971 tcg_gen_xor_tl(t2
, t2
, t3
);
11972 tcg_gen_xor_tl(t3
, t4
, t3
);
11973 tcg_gen_andc_tl(t2
, t3
, t2
);
11974 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11975 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11976 if (opc
== OPC_BOVC
) {
11978 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11981 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11983 tcg_temp_free(input_overflow
);
11987 } else if (rs
< rt
&& rs
== 0) {
11988 /* OPC_BEQZALC, OPC_BNEZALC */
11989 if (opc
== OPC_BEQZALC
) {
11991 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11994 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11997 /* OPC_BEQC, OPC_BNEC */
11998 if (opc
== OPC_BEQC
) {
12000 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12003 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12008 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12011 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12014 MIPS_INVAL("Compact conditional branch/jump");
12015 gen_reserved_instruction(ctx
);
12019 /* Generating branch here as compact branches don't have delay slot */
12020 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12023 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12031 void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
12032 int is_64_bit
, int extended
)
12036 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
12037 gen_reserved_instruction(ctx
);
12041 t0
= tcg_temp_new();
12043 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
12044 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
12046 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12052 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
12055 TCGv_i32 t0
= tcg_const_i32(op
);
12056 TCGv t1
= tcg_temp_new();
12057 gen_base_offset_addr(ctx
, t1
, base
, offset
);
12058 gen_helper_cache(cpu_env
, t1
, t0
);
12060 tcg_temp_free_i32(t0
);
12063 static inline bool is_uhi(DisasContext
*ctx
, int sdbbp_code
)
12065 #ifdef CONFIG_USER_ONLY
12068 bool is_user
= (ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
;
12069 return semihosting_enabled(is_user
) && sdbbp_code
== 1;
12073 void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
12075 TCGv t0
= tcg_temp_new();
12076 TCGv t1
= tcg_temp_new();
12078 gen_load_gpr(t0
, base
);
12081 gen_load_gpr(t1
, index
);
12082 tcg_gen_shli_tl(t1
, t1
, 2);
12083 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12086 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12087 gen_store_gpr(t1
, rd
);
12093 static void gen_sync(int stype
)
12095 TCGBar tcg_mo
= TCG_BAR_SC
;
12098 case 0x4: /* SYNC_WMB */
12099 tcg_mo
|= TCG_MO_ST_ST
;
12101 case 0x10: /* SYNC_MB */
12102 tcg_mo
|= TCG_MO_ALL
;
12104 case 0x11: /* SYNC_ACQUIRE */
12105 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
12107 case 0x12: /* SYNC_RELEASE */
12108 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
12110 case 0x13: /* SYNC_RMB */
12111 tcg_mo
|= TCG_MO_LD_LD
;
12114 tcg_mo
|= TCG_MO_ALL
;
12118 tcg_gen_mb(tcg_mo
);
12121 /* ISA extensions (ASEs) */
12123 /* MIPS16 extension to MIPS32 */
12124 #include "mips16e_translate.c.inc"
12126 /* microMIPS extension to MIPS32/MIPS64 */
12129 * Values for microMIPS fmt field. Variable-width, depending on which
12130 * formats the instruction supports.
12149 #include "micromips_translate.c.inc"
12151 #include "nanomips_translate.c.inc"
12153 /* MIPSDSP functions. */
12155 /* Indexed load is not for DSP only */
12156 static void gen_mips_lx(DisasContext
*ctx
, uint32_t opc
,
12157 int rd
, int base
, int offset
)
12161 if (!(ctx
->insn_flags
& INSN_OCTEON
)) {
12164 t0
= tcg_temp_new();
12167 gen_load_gpr(t0
, offset
);
12168 } else if (offset
== 0) {
12169 gen_load_gpr(t0
, base
);
12171 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
12176 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
12177 gen_store_gpr(t0
, rd
);
12180 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
12181 gen_store_gpr(t0
, rd
);
12184 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12185 gen_store_gpr(t0
, rd
);
12187 #if defined(TARGET_MIPS64)
12189 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
12190 gen_store_gpr(t0
, rd
);
12197 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12198 int ret
, int v1
, int v2
)
12204 /* Treat as NOP. */
12208 v1_t
= tcg_temp_new();
12209 v2_t
= tcg_temp_new();
12211 gen_load_gpr(v1_t
, v1
);
12212 gen_load_gpr(v2_t
, v2
);
12215 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
12216 case OPC_MULT_G_2E
:
12220 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12222 case OPC_ADDUH_R_QB
:
12223 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12226 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12228 case OPC_ADDQH_R_PH
:
12229 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12232 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12234 case OPC_ADDQH_R_W
:
12235 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12238 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12240 case OPC_SUBUH_R_QB
:
12241 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12244 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12246 case OPC_SUBQH_R_PH
:
12247 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12250 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12252 case OPC_SUBQH_R_W
:
12253 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12257 case OPC_ABSQ_S_PH_DSP
:
12259 case OPC_ABSQ_S_QB
:
12261 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
12263 case OPC_ABSQ_S_PH
:
12265 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
12269 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
12271 case OPC_PRECEQ_W_PHL
:
12273 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
12274 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12276 case OPC_PRECEQ_W_PHR
:
12278 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
12279 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
12280 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12282 case OPC_PRECEQU_PH_QBL
:
12284 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
12286 case OPC_PRECEQU_PH_QBR
:
12288 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
12290 case OPC_PRECEQU_PH_QBLA
:
12292 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
12294 case OPC_PRECEQU_PH_QBRA
:
12296 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
12298 case OPC_PRECEU_PH_QBL
:
12300 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
12302 case OPC_PRECEU_PH_QBR
:
12304 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
12306 case OPC_PRECEU_PH_QBLA
:
12308 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
12310 case OPC_PRECEU_PH_QBRA
:
12312 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
12316 case OPC_ADDU_QB_DSP
:
12320 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12322 case OPC_ADDQ_S_PH
:
12324 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12328 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12332 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12334 case OPC_ADDU_S_QB
:
12336 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12340 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12342 case OPC_ADDU_S_PH
:
12344 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12348 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12350 case OPC_SUBQ_S_PH
:
12352 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12356 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12360 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12362 case OPC_SUBU_S_QB
:
12364 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12368 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12370 case OPC_SUBU_S_PH
:
12372 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12376 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12380 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12384 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
12386 case OPC_RADDU_W_QB
:
12388 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
12392 case OPC_CMPU_EQ_QB_DSP
:
12394 case OPC_PRECR_QB_PH
:
12396 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12398 case OPC_PRECRQ_QB_PH
:
12400 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12402 case OPC_PRECR_SRA_PH_W
:
12405 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12406 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12408 tcg_temp_free_i32(sa_t
);
12411 case OPC_PRECR_SRA_R_PH_W
:
12414 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12415 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12417 tcg_temp_free_i32(sa_t
);
12420 case OPC_PRECRQ_PH_W
:
12422 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12424 case OPC_PRECRQ_RS_PH_W
:
12426 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12428 case OPC_PRECRQU_S_QB_PH
:
12430 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12434 #ifdef TARGET_MIPS64
12435 case OPC_ABSQ_S_QH_DSP
:
12437 case OPC_PRECEQ_L_PWL
:
12439 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
12441 case OPC_PRECEQ_L_PWR
:
12443 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
12445 case OPC_PRECEQ_PW_QHL
:
12447 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
12449 case OPC_PRECEQ_PW_QHR
:
12451 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
12453 case OPC_PRECEQ_PW_QHLA
:
12455 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
12457 case OPC_PRECEQ_PW_QHRA
:
12459 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
12461 case OPC_PRECEQU_QH_OBL
:
12463 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
12465 case OPC_PRECEQU_QH_OBR
:
12467 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
12469 case OPC_PRECEQU_QH_OBLA
:
12471 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
12473 case OPC_PRECEQU_QH_OBRA
:
12475 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
12477 case OPC_PRECEU_QH_OBL
:
12479 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
12481 case OPC_PRECEU_QH_OBR
:
12483 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
12485 case OPC_PRECEU_QH_OBLA
:
12487 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
12489 case OPC_PRECEU_QH_OBRA
:
12491 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
12493 case OPC_ABSQ_S_OB
:
12495 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
12497 case OPC_ABSQ_S_PW
:
12499 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
12501 case OPC_ABSQ_S_QH
:
12503 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
12507 case OPC_ADDU_OB_DSP
:
12509 case OPC_RADDU_L_OB
:
12511 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
12515 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12517 case OPC_SUBQ_S_PW
:
12519 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12523 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12525 case OPC_SUBQ_S_QH
:
12527 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12531 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12533 case OPC_SUBU_S_OB
:
12535 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12539 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12541 case OPC_SUBU_S_QH
:
12543 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12547 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12549 case OPC_SUBUH_R_OB
:
12551 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12555 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12557 case OPC_ADDQ_S_PW
:
12559 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12563 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12565 case OPC_ADDQ_S_QH
:
12567 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12571 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12573 case OPC_ADDU_S_OB
:
12575 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12579 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12581 case OPC_ADDU_S_QH
:
12583 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12587 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12589 case OPC_ADDUH_R_OB
:
12591 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12595 case OPC_CMPU_EQ_OB_DSP
:
12597 case OPC_PRECR_OB_QH
:
12599 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12601 case OPC_PRECR_SRA_QH_PW
:
12604 TCGv_i32 ret_t
= tcg_const_i32(ret
);
12605 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
12606 tcg_temp_free_i32(ret_t
);
12609 case OPC_PRECR_SRA_R_QH_PW
:
12612 TCGv_i32 sa_v
= tcg_const_i32(ret
);
12613 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
12614 tcg_temp_free_i32(sa_v
);
12617 case OPC_PRECRQ_OB_QH
:
12619 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12621 case OPC_PRECRQ_PW_L
:
12623 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
12625 case OPC_PRECRQ_QH_PW
:
12627 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
12629 case OPC_PRECRQ_RS_QH_PW
:
12631 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12633 case OPC_PRECRQU_S_OB_QH
:
12635 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12642 tcg_temp_free(v1_t
);
12643 tcg_temp_free(v2_t
);
12646 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
12647 int ret
, int v1
, int v2
)
12655 /* Treat as NOP. */
12659 t0
= tcg_temp_new();
12660 v1_t
= tcg_temp_new();
12661 v2_t
= tcg_temp_new();
12663 tcg_gen_movi_tl(t0
, v1
);
12664 gen_load_gpr(v1_t
, v1
);
12665 gen_load_gpr(v2_t
, v2
);
12668 case OPC_SHLL_QB_DSP
:
12670 op2
= MASK_SHLL_QB(ctx
->opcode
);
12674 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12678 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12682 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12686 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12688 case OPC_SHLL_S_PH
:
12690 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12692 case OPC_SHLLV_S_PH
:
12694 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12698 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12700 case OPC_SHLLV_S_W
:
12702 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12706 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
12710 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12714 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
12718 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12722 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
12724 case OPC_SHRA_R_QB
:
12726 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
12730 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12732 case OPC_SHRAV_R_QB
:
12734 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12738 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
12740 case OPC_SHRA_R_PH
:
12742 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
12746 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12748 case OPC_SHRAV_R_PH
:
12750 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12754 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
12756 case OPC_SHRAV_R_W
:
12758 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12760 default: /* Invalid */
12761 MIPS_INVAL("MASK SHLL.QB");
12762 gen_reserved_instruction(ctx
);
12767 #ifdef TARGET_MIPS64
12768 case OPC_SHLL_OB_DSP
:
12769 op2
= MASK_SHLL_OB(ctx
->opcode
);
12773 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12777 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12779 case OPC_SHLL_S_PW
:
12781 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12783 case OPC_SHLLV_S_PW
:
12785 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12789 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12793 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12797 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12801 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12803 case OPC_SHLL_S_QH
:
12805 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12807 case OPC_SHLLV_S_QH
:
12809 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12813 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
12817 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12819 case OPC_SHRA_R_OB
:
12821 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
12823 case OPC_SHRAV_R_OB
:
12825 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12829 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
12833 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12835 case OPC_SHRA_R_PW
:
12837 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
12839 case OPC_SHRAV_R_PW
:
12841 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12845 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
12849 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12851 case OPC_SHRA_R_QH
:
12853 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
12855 case OPC_SHRAV_R_QH
:
12857 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12861 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
12865 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12869 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
12873 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12875 default: /* Invalid */
12876 MIPS_INVAL("MASK SHLL.OB");
12877 gen_reserved_instruction(ctx
);
12885 tcg_temp_free(v1_t
);
12886 tcg_temp_free(v2_t
);
12889 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12890 int ret
, int v1
, int v2
, int check_ret
)
12896 if ((ret
== 0) && (check_ret
== 1)) {
12897 /* Treat as NOP. */
12901 t0
= tcg_temp_new_i32();
12902 v1_t
= tcg_temp_new();
12903 v2_t
= tcg_temp_new();
12905 tcg_gen_movi_i32(t0
, ret
);
12906 gen_load_gpr(v1_t
, v1
);
12907 gen_load_gpr(v2_t
, v2
);
12911 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
12912 * the same mask and op1.
12914 case OPC_MULT_G_2E
:
12918 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12921 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12924 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12926 case OPC_MULQ_RS_W
:
12927 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12931 case OPC_DPA_W_PH_DSP
:
12933 case OPC_DPAU_H_QBL
:
12935 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12937 case OPC_DPAU_H_QBR
:
12939 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12941 case OPC_DPSU_H_QBL
:
12943 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12945 case OPC_DPSU_H_QBR
:
12947 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12951 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12953 case OPC_DPAX_W_PH
:
12955 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12957 case OPC_DPAQ_S_W_PH
:
12959 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12961 case OPC_DPAQX_S_W_PH
:
12963 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12965 case OPC_DPAQX_SA_W_PH
:
12967 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12971 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12973 case OPC_DPSX_W_PH
:
12975 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12977 case OPC_DPSQ_S_W_PH
:
12979 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12981 case OPC_DPSQX_S_W_PH
:
12983 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12985 case OPC_DPSQX_SA_W_PH
:
12987 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12989 case OPC_MULSAQ_S_W_PH
:
12991 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12993 case OPC_DPAQ_SA_L_W
:
12995 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
12997 case OPC_DPSQ_SA_L_W
:
12999 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13001 case OPC_MAQ_S_W_PHL
:
13003 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13005 case OPC_MAQ_S_W_PHR
:
13007 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13009 case OPC_MAQ_SA_W_PHL
:
13011 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13013 case OPC_MAQ_SA_W_PHR
:
13015 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13017 case OPC_MULSA_W_PH
:
13019 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13023 #ifdef TARGET_MIPS64
13024 case OPC_DPAQ_W_QH_DSP
:
13026 int ac
= ret
& 0x03;
13027 tcg_gen_movi_i32(t0
, ac
);
13032 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
13036 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
13040 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
13044 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
13048 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13050 case OPC_DPAQ_S_W_QH
:
13052 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13054 case OPC_DPAQ_SA_L_PW
:
13056 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13058 case OPC_DPAU_H_OBL
:
13060 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13062 case OPC_DPAU_H_OBR
:
13064 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13068 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13070 case OPC_DPSQ_S_W_QH
:
13072 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13074 case OPC_DPSQ_SA_L_PW
:
13076 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13078 case OPC_DPSU_H_OBL
:
13080 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13082 case OPC_DPSU_H_OBR
:
13084 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13086 case OPC_MAQ_S_L_PWL
:
13088 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
13090 case OPC_MAQ_S_L_PWR
:
13092 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
13094 case OPC_MAQ_S_W_QHLL
:
13096 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13098 case OPC_MAQ_SA_W_QHLL
:
13100 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13102 case OPC_MAQ_S_W_QHLR
:
13104 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13106 case OPC_MAQ_SA_W_QHLR
:
13108 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13110 case OPC_MAQ_S_W_QHRL
:
13112 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13114 case OPC_MAQ_SA_W_QHRL
:
13116 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13118 case OPC_MAQ_S_W_QHRR
:
13120 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13122 case OPC_MAQ_SA_W_QHRR
:
13124 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13126 case OPC_MULSAQ_S_L_PW
:
13128 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13130 case OPC_MULSAQ_S_W_QH
:
13132 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13138 case OPC_ADDU_QB_DSP
:
13140 case OPC_MULEU_S_PH_QBL
:
13142 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13144 case OPC_MULEU_S_PH_QBR
:
13146 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13148 case OPC_MULQ_RS_PH
:
13150 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13152 case OPC_MULEQ_S_W_PHL
:
13154 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13156 case OPC_MULEQ_S_W_PHR
:
13158 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13160 case OPC_MULQ_S_PH
:
13162 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13166 #ifdef TARGET_MIPS64
13167 case OPC_ADDU_OB_DSP
:
13169 case OPC_MULEQ_S_PW_QHL
:
13171 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13173 case OPC_MULEQ_S_PW_QHR
:
13175 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13177 case OPC_MULEU_S_QH_OBL
:
13179 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13181 case OPC_MULEU_S_QH_OBR
:
13183 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13185 case OPC_MULQ_RS_QH
:
13187 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13194 tcg_temp_free_i32(t0
);
13195 tcg_temp_free(v1_t
);
13196 tcg_temp_free(v2_t
);
13199 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13207 /* Treat as NOP. */
13211 t0
= tcg_temp_new();
13212 val_t
= tcg_temp_new();
13213 gen_load_gpr(val_t
, val
);
13216 case OPC_ABSQ_S_PH_DSP
:
13220 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
13225 target_long result
;
13226 imm
= (ctx
->opcode
>> 16) & 0xFF;
13227 result
= (uint32_t)imm
<< 24 |
13228 (uint32_t)imm
<< 16 |
13229 (uint32_t)imm
<< 8 |
13231 result
= (int32_t)result
;
13232 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
13237 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13238 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13239 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13240 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13241 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13242 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13247 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13248 imm
= (int16_t)(imm
<< 6) >> 6;
13249 tcg_gen_movi_tl(cpu_gpr
[ret
], \
13250 (target_long
)((int32_t)imm
<< 16 | \
13256 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13257 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13258 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13259 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13263 #ifdef TARGET_MIPS64
13264 case OPC_ABSQ_S_QH_DSP
:
13271 imm
= (ctx
->opcode
>> 16) & 0xFF;
13272 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
13273 temp
= (temp
<< 16) | temp
;
13274 temp
= (temp
<< 32) | temp
;
13275 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13283 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13284 imm
= (int16_t)(imm
<< 6) >> 6;
13285 temp
= ((target_long
)imm
<< 32) \
13286 | ((target_long
)imm
& 0xFFFFFFFF);
13287 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13295 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13296 imm
= (int16_t)(imm
<< 6) >> 6;
13298 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
13299 ((uint64_t)(uint16_t)imm
<< 32) |
13300 ((uint64_t)(uint16_t)imm
<< 16) |
13301 (uint64_t)(uint16_t)imm
;
13302 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13307 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13308 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13309 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13310 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13311 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13312 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13313 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13317 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
13318 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13319 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13323 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13324 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13325 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13326 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13327 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13334 tcg_temp_free(val_t
);
13337 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
13338 uint32_t op1
, uint32_t op2
,
13339 int ret
, int v1
, int v2
, int check_ret
)
13345 if ((ret
== 0) && (check_ret
== 1)) {
13346 /* Treat as NOP. */
13350 t1
= tcg_temp_new();
13351 v1_t
= tcg_temp_new();
13352 v2_t
= tcg_temp_new();
13354 gen_load_gpr(v1_t
, v1
);
13355 gen_load_gpr(v2_t
, v2
);
13358 case OPC_CMPU_EQ_QB_DSP
:
13360 case OPC_CMPU_EQ_QB
:
13362 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
13364 case OPC_CMPU_LT_QB
:
13366 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
13368 case OPC_CMPU_LE_QB
:
13370 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
13372 case OPC_CMPGU_EQ_QB
:
13374 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13376 case OPC_CMPGU_LT_QB
:
13378 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13380 case OPC_CMPGU_LE_QB
:
13382 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13384 case OPC_CMPGDU_EQ_QB
:
13386 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
13387 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13388 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13389 tcg_gen_shli_tl(t1
, t1
, 24);
13390 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13392 case OPC_CMPGDU_LT_QB
:
13394 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
13395 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13396 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13397 tcg_gen_shli_tl(t1
, t1
, 24);
13398 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13400 case OPC_CMPGDU_LE_QB
:
13402 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
13403 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13404 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13405 tcg_gen_shli_tl(t1
, t1
, 24);
13406 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13408 case OPC_CMP_EQ_PH
:
13410 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
13412 case OPC_CMP_LT_PH
:
13414 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
13416 case OPC_CMP_LE_PH
:
13418 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
13422 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13426 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13428 case OPC_PACKRL_PH
:
13430 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13434 #ifdef TARGET_MIPS64
13435 case OPC_CMPU_EQ_OB_DSP
:
13437 case OPC_CMP_EQ_PW
:
13439 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
13441 case OPC_CMP_LT_PW
:
13443 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
13445 case OPC_CMP_LE_PW
:
13447 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
13449 case OPC_CMP_EQ_QH
:
13451 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
13453 case OPC_CMP_LT_QH
:
13455 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
13457 case OPC_CMP_LE_QH
:
13459 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
13461 case OPC_CMPGDU_EQ_OB
:
13463 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13465 case OPC_CMPGDU_LT_OB
:
13467 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13469 case OPC_CMPGDU_LE_OB
:
13471 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13473 case OPC_CMPGU_EQ_OB
:
13475 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13477 case OPC_CMPGU_LT_OB
:
13479 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13481 case OPC_CMPGU_LE_OB
:
13483 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13485 case OPC_CMPU_EQ_OB
:
13487 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
13489 case OPC_CMPU_LT_OB
:
13491 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
13493 case OPC_CMPU_LE_OB
:
13495 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
13497 case OPC_PACKRL_PW
:
13499 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
13503 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13507 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13511 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13519 tcg_temp_free(v1_t
);
13520 tcg_temp_free(v2_t
);
13523 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
13524 uint32_t op1
, int rt
, int rs
, int sa
)
13531 /* Treat as NOP. */
13535 t0
= tcg_temp_new();
13536 gen_load_gpr(t0
, rs
);
13539 case OPC_APPEND_DSP
:
13540 switch (MASK_APPEND(ctx
->opcode
)) {
13543 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
13545 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13549 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13550 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13551 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
13552 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13554 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13558 if (sa
!= 0 && sa
!= 2) {
13559 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13560 tcg_gen_ext32u_tl(t0
, t0
);
13561 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
13562 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13564 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13566 default: /* Invalid */
13567 MIPS_INVAL("MASK APPEND");
13568 gen_reserved_instruction(ctx
);
13572 #ifdef TARGET_MIPS64
13573 case OPC_DAPPEND_DSP
:
13574 switch (MASK_DAPPEND(ctx
->opcode
)) {
13577 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
13581 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
13582 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
13583 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
13587 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13588 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
13589 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13594 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
13595 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13596 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
13597 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13600 default: /* Invalid */
13601 MIPS_INVAL("MASK DAPPEND");
13602 gen_reserved_instruction(ctx
);
13611 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13612 int ret
, int v1
, int v2
, int check_ret
)
13620 if ((ret
== 0) && (check_ret
== 1)) {
13621 /* Treat as NOP. */
13625 t0
= tcg_temp_new();
13626 t1
= tcg_temp_new();
13627 v1_t
= tcg_temp_new();
13629 gen_load_gpr(v1_t
, v1
);
13632 case OPC_EXTR_W_DSP
:
13636 tcg_gen_movi_tl(t0
, v2
);
13637 tcg_gen_movi_tl(t1
, v1
);
13638 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13641 tcg_gen_movi_tl(t0
, v2
);
13642 tcg_gen_movi_tl(t1
, v1
);
13643 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13645 case OPC_EXTR_RS_W
:
13646 tcg_gen_movi_tl(t0
, v2
);
13647 tcg_gen_movi_tl(t1
, v1
);
13648 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13651 tcg_gen_movi_tl(t0
, v2
);
13652 tcg_gen_movi_tl(t1
, v1
);
13653 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13655 case OPC_EXTRV_S_H
:
13656 tcg_gen_movi_tl(t0
, v2
);
13657 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13660 tcg_gen_movi_tl(t0
, v2
);
13661 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13663 case OPC_EXTRV_R_W
:
13664 tcg_gen_movi_tl(t0
, v2
);
13665 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13667 case OPC_EXTRV_RS_W
:
13668 tcg_gen_movi_tl(t0
, v2
);
13669 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13672 tcg_gen_movi_tl(t0
, v2
);
13673 tcg_gen_movi_tl(t1
, v1
);
13674 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13677 tcg_gen_movi_tl(t0
, v2
);
13678 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13681 tcg_gen_movi_tl(t0
, v2
);
13682 tcg_gen_movi_tl(t1
, v1
);
13683 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13686 tcg_gen_movi_tl(t0
, v2
);
13687 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13690 imm
= (ctx
->opcode
>> 20) & 0x3F;
13691 tcg_gen_movi_tl(t0
, ret
);
13692 tcg_gen_movi_tl(t1
, imm
);
13693 gen_helper_shilo(t0
, t1
, cpu_env
);
13696 tcg_gen_movi_tl(t0
, ret
);
13697 gen_helper_shilo(t0
, v1_t
, cpu_env
);
13700 tcg_gen_movi_tl(t0
, ret
);
13701 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
13704 imm
= (ctx
->opcode
>> 11) & 0x3FF;
13705 tcg_gen_movi_tl(t0
, imm
);
13706 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
13709 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13710 tcg_gen_movi_tl(t0
, imm
);
13711 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
13715 #ifdef TARGET_MIPS64
13716 case OPC_DEXTR_W_DSP
:
13720 tcg_gen_movi_tl(t0
, ret
);
13721 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
13725 int shift
= (ctx
->opcode
>> 19) & 0x7F;
13726 int ac
= (ctx
->opcode
>> 11) & 0x03;
13727 tcg_gen_movi_tl(t0
, shift
);
13728 tcg_gen_movi_tl(t1
, ac
);
13729 gen_helper_dshilo(t0
, t1
, cpu_env
);
13734 int ac
= (ctx
->opcode
>> 11) & 0x03;
13735 tcg_gen_movi_tl(t0
, ac
);
13736 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
13740 tcg_gen_movi_tl(t0
, v2
);
13741 tcg_gen_movi_tl(t1
, v1
);
13743 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13746 tcg_gen_movi_tl(t0
, v2
);
13747 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13750 tcg_gen_movi_tl(t0
, v2
);
13751 tcg_gen_movi_tl(t1
, v1
);
13752 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13755 tcg_gen_movi_tl(t0
, v2
);
13756 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13759 tcg_gen_movi_tl(t0
, v2
);
13760 tcg_gen_movi_tl(t1
, v1
);
13761 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13763 case OPC_DEXTR_R_L
:
13764 tcg_gen_movi_tl(t0
, v2
);
13765 tcg_gen_movi_tl(t1
, v1
);
13766 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13768 case OPC_DEXTR_RS_L
:
13769 tcg_gen_movi_tl(t0
, v2
);
13770 tcg_gen_movi_tl(t1
, v1
);
13771 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13774 tcg_gen_movi_tl(t0
, v2
);
13775 tcg_gen_movi_tl(t1
, v1
);
13776 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13778 case OPC_DEXTR_R_W
:
13779 tcg_gen_movi_tl(t0
, v2
);
13780 tcg_gen_movi_tl(t1
, v1
);
13781 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13783 case OPC_DEXTR_RS_W
:
13784 tcg_gen_movi_tl(t0
, v2
);
13785 tcg_gen_movi_tl(t1
, v1
);
13786 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13788 case OPC_DEXTR_S_H
:
13789 tcg_gen_movi_tl(t0
, v2
);
13790 tcg_gen_movi_tl(t1
, v1
);
13791 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13793 case OPC_DEXTRV_S_H
:
13794 tcg_gen_movi_tl(t0
, v2
);
13795 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13798 tcg_gen_movi_tl(t0
, v2
);
13799 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13801 case OPC_DEXTRV_R_L
:
13802 tcg_gen_movi_tl(t0
, v2
);
13803 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13805 case OPC_DEXTRV_RS_L
:
13806 tcg_gen_movi_tl(t0
, v2
);
13807 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13810 tcg_gen_movi_tl(t0
, v2
);
13811 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13813 case OPC_DEXTRV_R_W
:
13814 tcg_gen_movi_tl(t0
, v2
);
13815 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13817 case OPC_DEXTRV_RS_W
:
13818 tcg_gen_movi_tl(t0
, v2
);
13819 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13828 tcg_temp_free(v1_t
);
13831 /* End MIPSDSP functions. */
13833 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
13835 int rs
, rt
, rd
, sa
;
13838 rs
= (ctx
->opcode
>> 21) & 0x1f;
13839 rt
= (ctx
->opcode
>> 16) & 0x1f;
13840 rd
= (ctx
->opcode
>> 11) & 0x1f;
13841 sa
= (ctx
->opcode
>> 6) & 0x1f;
13843 op1
= MASK_SPECIAL(ctx
->opcode
);
13849 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13859 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13862 MIPS_INVAL("special_r6 muldiv");
13863 gen_reserved_instruction(ctx
);
13869 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13873 if (rt
== 0 && sa
== 1) {
13875 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13876 * We need additionally to check other fields.
13878 gen_cl(ctx
, op1
, rd
, rs
);
13880 gen_reserved_instruction(ctx
);
13884 if (is_uhi(ctx
, extract32(ctx
->opcode
, 6, 20))) {
13885 ctx
->base
.is_jmp
= DISAS_SEMIHOST
;
13887 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13888 gen_reserved_instruction(ctx
);
13890 generate_exception_end(ctx
, EXCP_DBp
);
13894 #if defined(TARGET_MIPS64)
13897 if (rt
== 0 && sa
== 1) {
13899 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13900 * We need additionally to check other fields.
13902 check_mips_64(ctx
);
13903 gen_cl(ctx
, op1
, rd
, rs
);
13905 gen_reserved_instruction(ctx
);
13913 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13923 check_mips_64(ctx
);
13924 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13927 MIPS_INVAL("special_r6 muldiv");
13928 gen_reserved_instruction(ctx
);
13933 default: /* Invalid */
13934 MIPS_INVAL("special_r6");
13935 gen_reserved_instruction(ctx
);
13940 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
13942 int rs
= extract32(ctx
->opcode
, 21, 5);
13943 int rt
= extract32(ctx
->opcode
, 16, 5);
13944 int rd
= extract32(ctx
->opcode
, 11, 5);
13945 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
13948 case OPC_MOVN
: /* Conditional move */
13950 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13952 case OPC_MFHI
: /* Move from HI/LO */
13954 gen_HILO(ctx
, op1
, 0, rd
);
13957 case OPC_MTLO
: /* Move to HI/LO */
13958 gen_HILO(ctx
, op1
, 0, rs
);
13962 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
13966 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
13968 #if defined(TARGET_MIPS64)
13973 check_insn_opc_user_only(ctx
, INSN_R5900
);
13974 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
13978 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
13980 default: /* Invalid */
13981 MIPS_INVAL("special_tx79");
13982 gen_reserved_instruction(ctx
);
13987 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
13992 rs
= (ctx
->opcode
>> 21) & 0x1f;
13993 rt
= (ctx
->opcode
>> 16) & 0x1f;
13994 rd
= (ctx
->opcode
>> 11) & 0x1f;
13996 op1
= MASK_SPECIAL(ctx
->opcode
);
13998 case OPC_MOVN
: /* Conditional move */
14000 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
|
14001 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
14002 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
14004 case OPC_MFHI
: /* Move from HI/LO */
14006 gen_HILO(ctx
, op1
, rs
& 3, rd
);
14009 case OPC_MTLO
: /* Move to HI/LO */
14010 gen_HILO(ctx
, op1
, rd
& 3, rs
);
14013 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
14014 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14015 check_cp1_enabled(ctx
);
14016 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
14017 (ctx
->opcode
>> 16) & 1);
14019 generate_exception_err(ctx
, EXCP_CpU
, 1);
14024 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14028 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14030 #if defined(TARGET_MIPS64)
14035 check_insn(ctx
, ISA_MIPS3
);
14036 check_mips_64(ctx
);
14037 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14041 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
14044 #ifdef MIPS_STRICT_STANDARD
14045 MIPS_INVAL("SPIM");
14046 gen_reserved_instruction(ctx
);
14048 /* Implemented as RI exception for now. */
14049 MIPS_INVAL("spim (unofficial)");
14050 gen_reserved_instruction(ctx
);
14053 default: /* Invalid */
14054 MIPS_INVAL("special_legacy");
14055 gen_reserved_instruction(ctx
);
14060 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
14062 int rs
, rt
, rd
, sa
;
14065 rs
= (ctx
->opcode
>> 21) & 0x1f;
14066 rt
= (ctx
->opcode
>> 16) & 0x1f;
14067 rd
= (ctx
->opcode
>> 11) & 0x1f;
14068 sa
= (ctx
->opcode
>> 6) & 0x1f;
14070 op1
= MASK_SPECIAL(ctx
->opcode
);
14072 case OPC_SLL
: /* Shift with immediate */
14073 if (sa
== 5 && rd
== 0 &&
14074 rs
== 0 && rt
== 0) { /* PAUSE */
14075 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
14076 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
14077 gen_reserved_instruction(ctx
);
14083 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14086 switch ((ctx
->opcode
>> 21) & 0x1f) {
14088 /* rotr is decoded as srl on non-R2 CPUs */
14089 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14094 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14097 gen_reserved_instruction(ctx
);
14105 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14107 case OPC_SLLV
: /* Shifts */
14109 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14112 switch ((ctx
->opcode
>> 6) & 0x1f) {
14114 /* rotrv is decoded as srlv on non-R2 CPUs */
14115 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14120 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14123 gen_reserved_instruction(ctx
);
14127 case OPC_SLT
: /* Set on less than */
14129 gen_slt(ctx
, op1
, rd
, rs
, rt
);
14131 case OPC_AND
: /* Logic*/
14135 gen_logic(ctx
, op1
, rd
, rs
, rt
);
14138 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
14140 case OPC_TGE
: /* Traps */
14146 check_insn(ctx
, ISA_MIPS2
);
14147 gen_trap(ctx
, op1
, rs
, rt
, -1, extract32(ctx
->opcode
, 6, 10));
14150 /* Pmon entry point, also R4010 selsl */
14151 #ifdef MIPS_STRICT_STANDARD
14152 MIPS_INVAL("PMON / selsl");
14153 gen_reserved_instruction(ctx
);
14155 gen_helper_pmon(cpu_env
, tcg_constant_i32(sa
));
14159 generate_exception_end(ctx
, EXCP_SYSCALL
);
14162 generate_exception_break(ctx
, extract32(ctx
->opcode
, 6, 20));
14165 check_insn(ctx
, ISA_MIPS2
);
14166 gen_sync(extract32(ctx
->opcode
, 6, 5));
14169 #if defined(TARGET_MIPS64)
14170 /* MIPS64 specific opcodes */
14175 check_insn(ctx
, ISA_MIPS3
);
14176 check_mips_64(ctx
);
14177 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14180 switch ((ctx
->opcode
>> 21) & 0x1f) {
14182 /* drotr is decoded as dsrl on non-R2 CPUs */
14183 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14188 check_insn(ctx
, ISA_MIPS3
);
14189 check_mips_64(ctx
);
14190 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14193 gen_reserved_instruction(ctx
);
14198 switch ((ctx
->opcode
>> 21) & 0x1f) {
14200 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
14201 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14206 check_insn(ctx
, ISA_MIPS3
);
14207 check_mips_64(ctx
);
14208 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14211 gen_reserved_instruction(ctx
);
14219 check_insn(ctx
, ISA_MIPS3
);
14220 check_mips_64(ctx
);
14221 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14225 check_insn(ctx
, ISA_MIPS3
);
14226 check_mips_64(ctx
);
14227 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14230 switch ((ctx
->opcode
>> 6) & 0x1f) {
14232 /* drotrv is decoded as dsrlv on non-R2 CPUs */
14233 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14238 check_insn(ctx
, ISA_MIPS3
);
14239 check_mips_64(ctx
);
14240 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14243 gen_reserved_instruction(ctx
);
14249 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
14250 decode_opc_special_r6(env
, ctx
);
14251 } else if (ctx
->insn_flags
& INSN_R5900
) {
14252 decode_opc_special_tx79(env
, ctx
);
14254 decode_opc_special_legacy(env
, ctx
);
14260 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14265 rs
= (ctx
->opcode
>> 21) & 0x1f;
14266 rt
= (ctx
->opcode
>> 16) & 0x1f;
14267 rd
= (ctx
->opcode
>> 11) & 0x1f;
14269 op1
= MASK_SPECIAL2(ctx
->opcode
);
14271 case OPC_MADD
: /* Multiply and add/sub */
14275 check_insn(ctx
, ISA_MIPS_R1
);
14276 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14279 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14282 case OPC_DIVU_G_2F
:
14283 case OPC_MULT_G_2F
:
14284 case OPC_MULTU_G_2F
:
14286 case OPC_MODU_G_2F
:
14287 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14288 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14292 check_insn(ctx
, ISA_MIPS_R1
);
14293 gen_cl(ctx
, op1
, rd
, rs
);
14296 if (is_uhi(ctx
, extract32(ctx
->opcode
, 6, 20))) {
14297 ctx
->base
.is_jmp
= DISAS_SEMIHOST
;
14300 * XXX: not clear which exception should be raised
14301 * when in debug mode...
14303 check_insn(ctx
, ISA_MIPS_R1
);
14304 generate_exception_end(ctx
, EXCP_DBp
);
14307 #if defined(TARGET_MIPS64)
14310 check_insn(ctx
, ISA_MIPS_R1
);
14311 check_mips_64(ctx
);
14312 gen_cl(ctx
, op1
, rd
, rs
);
14314 case OPC_DMULT_G_2F
:
14315 case OPC_DMULTU_G_2F
:
14316 case OPC_DDIV_G_2F
:
14317 case OPC_DDIVU_G_2F
:
14318 case OPC_DMOD_G_2F
:
14319 case OPC_DMODU_G_2F
:
14320 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14321 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14324 default: /* Invalid */
14325 MIPS_INVAL("special2_legacy");
14326 gen_reserved_instruction(ctx
);
14331 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
14333 int rs
, rt
, rd
, sa
;
14337 rs
= (ctx
->opcode
>> 21) & 0x1f;
14338 rt
= (ctx
->opcode
>> 16) & 0x1f;
14339 rd
= (ctx
->opcode
>> 11) & 0x1f;
14340 sa
= (ctx
->opcode
>> 6) & 0x1f;
14341 imm
= (int16_t)ctx
->opcode
>> 7;
14343 op1
= MASK_SPECIAL3(ctx
->opcode
);
14347 /* hint codes 24-31 are reserved and signal RI */
14348 gen_reserved_instruction(ctx
);
14350 /* Treat as NOP. */
14353 check_cp0_enabled(ctx
);
14354 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14355 gen_cache_operation(ctx
, rt
, rs
, imm
);
14359 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
14362 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14367 /* Treat as NOP. */
14370 op2
= MASK_BSHFL(ctx
->opcode
);
14376 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
14379 gen_bitswap(ctx
, op2
, rd
, rt
);
14384 #ifndef CONFIG_USER_ONLY
14386 if (unlikely(ctx
->gi
<= 1)) {
14387 gen_reserved_instruction(ctx
);
14389 check_cp0_enabled(ctx
);
14390 switch ((ctx
->opcode
>> 6) & 3) {
14391 case 0: /* GINVI */
14392 /* Treat as NOP. */
14394 case 2: /* GINVT */
14395 gen_helper_0e1i(ginvt
, cpu_gpr
[rs
], extract32(ctx
->opcode
, 8, 2));
14398 gen_reserved_instruction(ctx
);
14403 #if defined(TARGET_MIPS64)
14405 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
14408 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14411 check_mips_64(ctx
);
14414 /* Treat as NOP. */
14417 op2
= MASK_DBSHFL(ctx
->opcode
);
14427 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
14430 gen_bitswap(ctx
, op2
, rd
, rt
);
14437 default: /* Invalid */
14438 MIPS_INVAL("special3_r6");
14439 gen_reserved_instruction(ctx
);
14444 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14449 rs
= (ctx
->opcode
>> 21) & 0x1f;
14450 rt
= (ctx
->opcode
>> 16) & 0x1f;
14451 rd
= (ctx
->opcode
>> 11) & 0x1f;
14453 op1
= MASK_SPECIAL3(ctx
->opcode
);
14456 case OPC_DIVU_G_2E
:
14458 case OPC_MODU_G_2E
:
14459 case OPC_MULT_G_2E
:
14460 case OPC_MULTU_G_2E
:
14462 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
14463 * the same mask and op1.
14465 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
14466 op2
= MASK_ADDUH_QB(ctx
->opcode
);
14469 case OPC_ADDUH_R_QB
:
14471 case OPC_ADDQH_R_PH
:
14473 case OPC_ADDQH_R_W
:
14475 case OPC_SUBUH_R_QB
:
14477 case OPC_SUBQH_R_PH
:
14479 case OPC_SUBQH_R_W
:
14480 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14485 case OPC_MULQ_RS_W
:
14486 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14489 MIPS_INVAL("MASK ADDUH.QB");
14490 gen_reserved_instruction(ctx
);
14493 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
14494 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14496 gen_reserved_instruction(ctx
);
14500 op2
= MASK_LX(ctx
->opcode
);
14502 #if defined(TARGET_MIPS64)
14508 gen_mips_lx(ctx
, op2
, rd
, rs
, rt
);
14510 default: /* Invalid */
14511 MIPS_INVAL("MASK LX");
14512 gen_reserved_instruction(ctx
);
14516 case OPC_ABSQ_S_PH_DSP
:
14517 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
14519 case OPC_ABSQ_S_QB
:
14520 case OPC_ABSQ_S_PH
:
14522 case OPC_PRECEQ_W_PHL
:
14523 case OPC_PRECEQ_W_PHR
:
14524 case OPC_PRECEQU_PH_QBL
:
14525 case OPC_PRECEQU_PH_QBR
:
14526 case OPC_PRECEQU_PH_QBLA
:
14527 case OPC_PRECEQU_PH_QBRA
:
14528 case OPC_PRECEU_PH_QBL
:
14529 case OPC_PRECEU_PH_QBR
:
14530 case OPC_PRECEU_PH_QBLA
:
14531 case OPC_PRECEU_PH_QBRA
:
14532 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14539 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14542 MIPS_INVAL("MASK ABSQ_S.PH");
14543 gen_reserved_instruction(ctx
);
14547 case OPC_ADDU_QB_DSP
:
14548 op2
= MASK_ADDU_QB(ctx
->opcode
);
14551 case OPC_ADDQ_S_PH
:
14554 case OPC_ADDU_S_QB
:
14556 case OPC_ADDU_S_PH
:
14558 case OPC_SUBQ_S_PH
:
14561 case OPC_SUBU_S_QB
:
14563 case OPC_SUBU_S_PH
:
14567 case OPC_RADDU_W_QB
:
14568 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14570 case OPC_MULEU_S_PH_QBL
:
14571 case OPC_MULEU_S_PH_QBR
:
14572 case OPC_MULQ_RS_PH
:
14573 case OPC_MULEQ_S_W_PHL
:
14574 case OPC_MULEQ_S_W_PHR
:
14575 case OPC_MULQ_S_PH
:
14576 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14578 default: /* Invalid */
14579 MIPS_INVAL("MASK ADDU.QB");
14580 gen_reserved_instruction(ctx
);
14585 case OPC_CMPU_EQ_QB_DSP
:
14586 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
14588 case OPC_PRECR_SRA_PH_W
:
14589 case OPC_PRECR_SRA_R_PH_W
:
14590 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14592 case OPC_PRECR_QB_PH
:
14593 case OPC_PRECRQ_QB_PH
:
14594 case OPC_PRECRQ_PH_W
:
14595 case OPC_PRECRQ_RS_PH_W
:
14596 case OPC_PRECRQU_S_QB_PH
:
14597 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14599 case OPC_CMPU_EQ_QB
:
14600 case OPC_CMPU_LT_QB
:
14601 case OPC_CMPU_LE_QB
:
14602 case OPC_CMP_EQ_PH
:
14603 case OPC_CMP_LT_PH
:
14604 case OPC_CMP_LE_PH
:
14605 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14607 case OPC_CMPGU_EQ_QB
:
14608 case OPC_CMPGU_LT_QB
:
14609 case OPC_CMPGU_LE_QB
:
14610 case OPC_CMPGDU_EQ_QB
:
14611 case OPC_CMPGDU_LT_QB
:
14612 case OPC_CMPGDU_LE_QB
:
14615 case OPC_PACKRL_PH
:
14616 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14618 default: /* Invalid */
14619 MIPS_INVAL("MASK CMPU.EQ.QB");
14620 gen_reserved_instruction(ctx
);
14624 case OPC_SHLL_QB_DSP
:
14625 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14627 case OPC_DPA_W_PH_DSP
:
14628 op2
= MASK_DPA_W_PH(ctx
->opcode
);
14630 case OPC_DPAU_H_QBL
:
14631 case OPC_DPAU_H_QBR
:
14632 case OPC_DPSU_H_QBL
:
14633 case OPC_DPSU_H_QBR
:
14635 case OPC_DPAX_W_PH
:
14636 case OPC_DPAQ_S_W_PH
:
14637 case OPC_DPAQX_S_W_PH
:
14638 case OPC_DPAQX_SA_W_PH
:
14640 case OPC_DPSX_W_PH
:
14641 case OPC_DPSQ_S_W_PH
:
14642 case OPC_DPSQX_S_W_PH
:
14643 case OPC_DPSQX_SA_W_PH
:
14644 case OPC_MULSAQ_S_W_PH
:
14645 case OPC_DPAQ_SA_L_W
:
14646 case OPC_DPSQ_SA_L_W
:
14647 case OPC_MAQ_S_W_PHL
:
14648 case OPC_MAQ_S_W_PHR
:
14649 case OPC_MAQ_SA_W_PHL
:
14650 case OPC_MAQ_SA_W_PHR
:
14651 case OPC_MULSA_W_PH
:
14652 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14654 default: /* Invalid */
14655 MIPS_INVAL("MASK DPAW.PH");
14656 gen_reserved_instruction(ctx
);
14661 op2
= MASK_INSV(ctx
->opcode
);
14672 t0
= tcg_temp_new();
14673 t1
= tcg_temp_new();
14675 gen_load_gpr(t0
, rt
);
14676 gen_load_gpr(t1
, rs
);
14678 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14684 default: /* Invalid */
14685 MIPS_INVAL("MASK INSV");
14686 gen_reserved_instruction(ctx
);
14690 case OPC_APPEND_DSP
:
14691 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14693 case OPC_EXTR_W_DSP
:
14694 op2
= MASK_EXTR_W(ctx
->opcode
);
14698 case OPC_EXTR_RS_W
:
14700 case OPC_EXTRV_S_H
:
14702 case OPC_EXTRV_R_W
:
14703 case OPC_EXTRV_RS_W
:
14708 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14711 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14717 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14719 default: /* Invalid */
14720 MIPS_INVAL("MASK EXTR.W");
14721 gen_reserved_instruction(ctx
);
14725 #if defined(TARGET_MIPS64)
14726 case OPC_DDIV_G_2E
:
14727 case OPC_DDIVU_G_2E
:
14728 case OPC_DMULT_G_2E
:
14729 case OPC_DMULTU_G_2E
:
14730 case OPC_DMOD_G_2E
:
14731 case OPC_DMODU_G_2E
:
14732 check_insn(ctx
, INSN_LOONGSON2E
);
14733 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14735 case OPC_ABSQ_S_QH_DSP
:
14736 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
14738 case OPC_PRECEQ_L_PWL
:
14739 case OPC_PRECEQ_L_PWR
:
14740 case OPC_PRECEQ_PW_QHL
:
14741 case OPC_PRECEQ_PW_QHR
:
14742 case OPC_PRECEQ_PW_QHLA
:
14743 case OPC_PRECEQ_PW_QHRA
:
14744 case OPC_PRECEQU_QH_OBL
:
14745 case OPC_PRECEQU_QH_OBR
:
14746 case OPC_PRECEQU_QH_OBLA
:
14747 case OPC_PRECEQU_QH_OBRA
:
14748 case OPC_PRECEU_QH_OBL
:
14749 case OPC_PRECEU_QH_OBR
:
14750 case OPC_PRECEU_QH_OBLA
:
14751 case OPC_PRECEU_QH_OBRA
:
14752 case OPC_ABSQ_S_OB
:
14753 case OPC_ABSQ_S_PW
:
14754 case OPC_ABSQ_S_QH
:
14755 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14763 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14765 default: /* Invalid */
14766 MIPS_INVAL("MASK ABSQ_S.QH");
14767 gen_reserved_instruction(ctx
);
14771 case OPC_ADDU_OB_DSP
:
14772 op2
= MASK_ADDU_OB(ctx
->opcode
);
14774 case OPC_RADDU_L_OB
:
14776 case OPC_SUBQ_S_PW
:
14778 case OPC_SUBQ_S_QH
:
14780 case OPC_SUBU_S_OB
:
14782 case OPC_SUBU_S_QH
:
14784 case OPC_SUBUH_R_OB
:
14786 case OPC_ADDQ_S_PW
:
14788 case OPC_ADDQ_S_QH
:
14790 case OPC_ADDU_S_OB
:
14792 case OPC_ADDU_S_QH
:
14794 case OPC_ADDUH_R_OB
:
14795 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14797 case OPC_MULEQ_S_PW_QHL
:
14798 case OPC_MULEQ_S_PW_QHR
:
14799 case OPC_MULEU_S_QH_OBL
:
14800 case OPC_MULEU_S_QH_OBR
:
14801 case OPC_MULQ_RS_QH
:
14802 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14804 default: /* Invalid */
14805 MIPS_INVAL("MASK ADDU.OB");
14806 gen_reserved_instruction(ctx
);
14810 case OPC_CMPU_EQ_OB_DSP
:
14811 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
14813 case OPC_PRECR_SRA_QH_PW
:
14814 case OPC_PRECR_SRA_R_QH_PW
:
14815 /* Return value is rt. */
14816 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14818 case OPC_PRECR_OB_QH
:
14819 case OPC_PRECRQ_OB_QH
:
14820 case OPC_PRECRQ_PW_L
:
14821 case OPC_PRECRQ_QH_PW
:
14822 case OPC_PRECRQ_RS_QH_PW
:
14823 case OPC_PRECRQU_S_OB_QH
:
14824 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14826 case OPC_CMPU_EQ_OB
:
14827 case OPC_CMPU_LT_OB
:
14828 case OPC_CMPU_LE_OB
:
14829 case OPC_CMP_EQ_QH
:
14830 case OPC_CMP_LT_QH
:
14831 case OPC_CMP_LE_QH
:
14832 case OPC_CMP_EQ_PW
:
14833 case OPC_CMP_LT_PW
:
14834 case OPC_CMP_LE_PW
:
14835 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14837 case OPC_CMPGDU_EQ_OB
:
14838 case OPC_CMPGDU_LT_OB
:
14839 case OPC_CMPGDU_LE_OB
:
14840 case OPC_CMPGU_EQ_OB
:
14841 case OPC_CMPGU_LT_OB
:
14842 case OPC_CMPGU_LE_OB
:
14843 case OPC_PACKRL_PW
:
14847 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14849 default: /* Invalid */
14850 MIPS_INVAL("MASK CMPU_EQ.OB");
14851 gen_reserved_instruction(ctx
);
14855 case OPC_DAPPEND_DSP
:
14856 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14858 case OPC_DEXTR_W_DSP
:
14859 op2
= MASK_DEXTR_W(ctx
->opcode
);
14866 case OPC_DEXTR_R_L
:
14867 case OPC_DEXTR_RS_L
:
14869 case OPC_DEXTR_R_W
:
14870 case OPC_DEXTR_RS_W
:
14871 case OPC_DEXTR_S_H
:
14873 case OPC_DEXTRV_R_L
:
14874 case OPC_DEXTRV_RS_L
:
14875 case OPC_DEXTRV_S_H
:
14877 case OPC_DEXTRV_R_W
:
14878 case OPC_DEXTRV_RS_W
:
14879 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14884 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14886 default: /* Invalid */
14887 MIPS_INVAL("MASK EXTR.W");
14888 gen_reserved_instruction(ctx
);
14892 case OPC_DPAQ_W_QH_DSP
:
14893 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
14895 case OPC_DPAU_H_OBL
:
14896 case OPC_DPAU_H_OBR
:
14897 case OPC_DPSU_H_OBL
:
14898 case OPC_DPSU_H_OBR
:
14900 case OPC_DPAQ_S_W_QH
:
14902 case OPC_DPSQ_S_W_QH
:
14903 case OPC_MULSAQ_S_W_QH
:
14904 case OPC_DPAQ_SA_L_PW
:
14905 case OPC_DPSQ_SA_L_PW
:
14906 case OPC_MULSAQ_S_L_PW
:
14907 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14909 case OPC_MAQ_S_W_QHLL
:
14910 case OPC_MAQ_S_W_QHLR
:
14911 case OPC_MAQ_S_W_QHRL
:
14912 case OPC_MAQ_S_W_QHRR
:
14913 case OPC_MAQ_SA_W_QHLL
:
14914 case OPC_MAQ_SA_W_QHLR
:
14915 case OPC_MAQ_SA_W_QHRL
:
14916 case OPC_MAQ_SA_W_QHRR
:
14917 case OPC_MAQ_S_L_PWL
:
14918 case OPC_MAQ_S_L_PWR
:
14923 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14925 default: /* Invalid */
14926 MIPS_INVAL("MASK DPAQ.W.QH");
14927 gen_reserved_instruction(ctx
);
14931 case OPC_DINSV_DSP
:
14932 op2
= MASK_INSV(ctx
->opcode
);
14944 t0
= tcg_temp_new();
14945 t1
= tcg_temp_new();
14947 gen_load_gpr(t0
, rt
);
14948 gen_load_gpr(t1
, rs
);
14950 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14956 default: /* Invalid */
14957 MIPS_INVAL("MASK DINSV");
14958 gen_reserved_instruction(ctx
);
14962 case OPC_SHLL_OB_DSP
:
14963 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14966 default: /* Invalid */
14967 MIPS_INVAL("special3_legacy");
14968 gen_reserved_instruction(ctx
);
14974 #if defined(TARGET_MIPS64)
14976 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
14978 uint32_t opc
= MASK_MMI(ctx
->opcode
);
14979 int rs
= extract32(ctx
->opcode
, 21, 5);
14980 int rt
= extract32(ctx
->opcode
, 16, 5);
14981 int rd
= extract32(ctx
->opcode
, 11, 5);
14984 case MMI_OPC_MULT1
:
14985 case MMI_OPC_MULTU1
:
14987 case MMI_OPC_MADDU
:
14988 case MMI_OPC_MADD1
:
14989 case MMI_OPC_MADDU1
:
14990 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
14993 case MMI_OPC_DIVU1
:
14994 gen_div1_tx79(ctx
, opc
, rs
, rt
);
14997 MIPS_INVAL("TX79 MMI class");
14998 gen_reserved_instruction(ctx
);
15003 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
15005 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_SQ */
15009 * The TX79-specific instruction Store Quadword
15011 * +--------+-------+-------+------------------------+
15012 * | 011111 | base | rt | offset | SQ
15013 * +--------+-------+-------+------------------------+
15016 * has the same opcode as the Read Hardware Register instruction
15018 * +--------+-------+-------+-------+-------+--------+
15019 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
15020 * +--------+-------+-------+-------+-------+--------+
15023 * that is required, trapped and emulated by the Linux kernel. However, all
15024 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
15025 * offset is odd. Therefore all valid SQ instructions can execute normally.
15026 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
15027 * between SQ and RDHWR, as the Linux kernel does.
15029 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
15031 int base
= extract32(ctx
->opcode
, 21, 5);
15032 int rt
= extract32(ctx
->opcode
, 16, 5);
15033 int offset
= extract32(ctx
->opcode
, 0, 16);
15035 #ifdef CONFIG_USER_ONLY
15036 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
15037 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
15039 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
15040 int rd
= extract32(ctx
->opcode
, 11, 5);
15042 gen_rdhwr(ctx
, rt
, rd
, 0);
15047 gen_mmi_sq(ctx
, base
, rt
, offset
);
15052 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
15054 int rs
, rt
, rd
, sa
;
15058 rs
= (ctx
->opcode
>> 21) & 0x1f;
15059 rt
= (ctx
->opcode
>> 16) & 0x1f;
15060 rd
= (ctx
->opcode
>> 11) & 0x1f;
15061 sa
= (ctx
->opcode
>> 6) & 0x1f;
15062 imm
= sextract32(ctx
->opcode
, 7, 9);
15064 op1
= MASK_SPECIAL3(ctx
->opcode
);
15067 * EVA loads and stores overlap Loongson 2E instructions decoded by
15068 * decode_opc_special3_legacy(), so be careful to allow their decoding when
15081 check_cp0_enabled(ctx
);
15082 gen_ld(ctx
, op1
, rt
, rs
, imm
);
15089 check_cp0_enabled(ctx
);
15090 gen_st(ctx
, op1
, rt
, rs
, imm
);
15093 check_cp0_enabled(ctx
);
15094 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
15098 check_cp0_enabled(ctx
);
15099 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15100 gen_cache_operation(ctx
, rt
, rs
, imm
);
15104 check_cp0_enabled(ctx
);
15105 /* Treat as NOP. */
15113 check_insn(ctx
, ISA_MIPS_R2
);
15114 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15117 op2
= MASK_BSHFL(ctx
->opcode
);
15124 check_insn(ctx
, ISA_MIPS_R6
);
15125 decode_opc_special3_r6(env
, ctx
);
15128 check_insn(ctx
, ISA_MIPS_R2
);
15129 gen_bshfl(ctx
, op2
, rt
, rd
);
15133 #if defined(TARGET_MIPS64)
15140 check_insn(ctx
, ISA_MIPS_R2
);
15141 check_mips_64(ctx
);
15142 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15145 op2
= MASK_DBSHFL(ctx
->opcode
);
15156 check_insn(ctx
, ISA_MIPS_R6
);
15157 decode_opc_special3_r6(env
, ctx
);
15160 check_insn(ctx
, ISA_MIPS_R2
);
15161 check_mips_64(ctx
);
15162 op2
= MASK_DBSHFL(ctx
->opcode
);
15163 gen_bshfl(ctx
, op2
, rt
, rd
);
15169 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
15174 TCGv t0
= tcg_temp_new();
15175 TCGv t1
= tcg_temp_new();
15177 gen_load_gpr(t0
, rt
);
15178 gen_load_gpr(t1
, rs
);
15179 gen_helper_fork(t0
, t1
);
15187 TCGv t0
= tcg_temp_new();
15189 gen_load_gpr(t0
, rs
);
15190 gen_helper_yield(t0
, cpu_env
, t0
);
15191 gen_store_gpr(t0
, rd
);
15196 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15197 decode_opc_special3_r6(env
, ctx
);
15199 decode_opc_special3_legacy(env
, ctx
);
15204 static bool decode_opc_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
15207 int rs
, rt
, rd
, sa
;
15211 op
= MASK_OP_MAJOR(ctx
->opcode
);
15212 rs
= (ctx
->opcode
>> 21) & 0x1f;
15213 rt
= (ctx
->opcode
>> 16) & 0x1f;
15214 rd
= (ctx
->opcode
>> 11) & 0x1f;
15215 sa
= (ctx
->opcode
>> 6) & 0x1f;
15216 imm
= (int16_t)ctx
->opcode
;
15219 decode_opc_special(env
, ctx
);
15222 #if defined(TARGET_MIPS64)
15223 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
15224 decode_mmi(env
, ctx
);
15228 if (TARGET_LONG_BITS
== 32 && (ctx
->insn_flags
& ASE_MXU
)) {
15229 if (MASK_SPECIAL2(ctx
->opcode
) == OPC_MUL
) {
15230 gen_arith(ctx
, OPC_MUL
, rd
, rs
, rt
);
15232 decode_ase_mxu(ctx
, ctx
->opcode
);
15236 decode_opc_special2_legacy(env
, ctx
);
15239 #if defined(TARGET_MIPS64)
15240 if (ctx
->insn_flags
& INSN_R5900
) {
15241 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
15243 decode_opc_special3(env
, ctx
);
15246 decode_opc_special3(env
, ctx
);
15250 op1
= MASK_REGIMM(ctx
->opcode
);
15252 case OPC_BLTZL
: /* REGIMM branches */
15256 check_insn(ctx
, ISA_MIPS2
);
15257 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15261 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15265 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15267 /* OPC_NAL, OPC_BAL */
15268 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
15270 gen_reserved_instruction(ctx
);
15273 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15276 case OPC_TGEI
: /* REGIMM traps */
15282 check_insn(ctx
, ISA_MIPS2
);
15283 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15284 gen_trap(ctx
, op1
, rs
, -1, imm
, 0);
15287 check_insn(ctx
, ISA_MIPS_R6
);
15288 gen_reserved_instruction(ctx
);
15291 check_insn(ctx
, ISA_MIPS_R2
);
15293 * Break the TB to be able to sync copied instructions
15296 ctx
->base
.is_jmp
= DISAS_STOP
;
15298 case OPC_BPOSGE32
: /* MIPS DSP branch */
15299 #if defined(TARGET_MIPS64)
15303 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
15305 #if defined(TARGET_MIPS64)
15307 check_insn(ctx
, ISA_MIPS_R6
);
15308 check_mips_64(ctx
);
15310 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
15314 check_insn(ctx
, ISA_MIPS_R6
);
15315 check_mips_64(ctx
);
15317 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
15321 default: /* Invalid */
15322 MIPS_INVAL("regimm");
15323 gen_reserved_instruction(ctx
);
15328 check_cp0_enabled(ctx
);
15329 op1
= MASK_CP0(ctx
->opcode
);
15337 #if defined(TARGET_MIPS64)
15341 #ifndef CONFIG_USER_ONLY
15342 gen_cp0(env
, ctx
, op1
, rt
, rd
);
15343 #endif /* !CONFIG_USER_ONLY */
15361 #ifndef CONFIG_USER_ONLY
15362 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
15363 #endif /* !CONFIG_USER_ONLY */
15366 #ifndef CONFIG_USER_ONLY
15369 TCGv t0
= tcg_temp_new();
15371 op2
= MASK_MFMC0(ctx
->opcode
);
15375 gen_helper_dmt(t0
);
15376 gen_store_gpr(t0
, rt
);
15380 gen_helper_emt(t0
);
15381 gen_store_gpr(t0
, rt
);
15385 gen_helper_dvpe(t0
, cpu_env
);
15386 gen_store_gpr(t0
, rt
);
15390 gen_helper_evpe(t0
, cpu_env
);
15391 gen_store_gpr(t0
, rt
);
15394 check_insn(ctx
, ISA_MIPS_R6
);
15396 gen_helper_dvp(t0
, cpu_env
);
15397 gen_store_gpr(t0
, rt
);
15401 check_insn(ctx
, ISA_MIPS_R6
);
15403 gen_helper_evp(t0
, cpu_env
);
15404 gen_store_gpr(t0
, rt
);
15408 check_insn(ctx
, ISA_MIPS_R2
);
15409 save_cpu_state(ctx
, 1);
15410 gen_helper_di(t0
, cpu_env
);
15411 gen_store_gpr(t0
, rt
);
15413 * Stop translation as we may have switched
15414 * the execution mode.
15416 ctx
->base
.is_jmp
= DISAS_STOP
;
15419 check_insn(ctx
, ISA_MIPS_R2
);
15420 save_cpu_state(ctx
, 1);
15421 gen_helper_ei(t0
, cpu_env
);
15422 gen_store_gpr(t0
, rt
);
15424 * DISAS_STOP isn't sufficient, we need to ensure we break
15425 * out of translated code to check for pending interrupts.
15427 gen_save_pc(ctx
->base
.pc_next
+ 4);
15428 ctx
->base
.is_jmp
= DISAS_EXIT
;
15430 default: /* Invalid */
15431 MIPS_INVAL("mfmc0");
15432 gen_reserved_instruction(ctx
);
15437 #endif /* !CONFIG_USER_ONLY */
15440 check_insn(ctx
, ISA_MIPS_R2
);
15441 gen_load_srsgpr(rt
, rd
);
15444 check_insn(ctx
, ISA_MIPS_R2
);
15445 gen_store_srsgpr(rt
, rd
);
15449 gen_reserved_instruction(ctx
);
15453 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
15454 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15455 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
15456 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15459 /* Arithmetic with immediate opcode */
15460 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15464 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15466 case OPC_SLTI
: /* Set on less than with immediate opcode */
15468 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
15470 case OPC_ANDI
: /* Arithmetic with immediate opcode */
15471 case OPC_LUI
: /* OPC_AUI */
15474 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
15476 case OPC_J
: /* Jump */
15478 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15479 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15482 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
15483 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15485 gen_reserved_instruction(ctx
);
15488 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
15489 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15492 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15495 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
15496 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15498 gen_reserved_instruction(ctx
);
15501 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
15502 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15505 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15508 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
15511 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15513 check_insn(ctx
, ISA_MIPS_R6
);
15514 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
15515 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15518 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
15521 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15523 check_insn(ctx
, ISA_MIPS_R6
);
15524 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
15525 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15530 check_insn(ctx
, ISA_MIPS2
);
15531 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15535 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15537 case OPC_LL
: /* Load and stores */
15538 check_insn(ctx
, ISA_MIPS2
);
15539 if (ctx
->insn_flags
& INSN_R5900
) {
15540 check_insn_opc_user_only(ctx
, INSN_R5900
);
15551 gen_ld(ctx
, op
, rt
, rs
, imm
);
15558 gen_st(ctx
, op
, rt
, rs
, imm
);
15561 check_insn(ctx
, ISA_MIPS2
);
15562 if (ctx
->insn_flags
& INSN_R5900
) {
15563 check_insn_opc_user_only(ctx
, INSN_R5900
);
15565 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
15568 check_cp0_enabled(ctx
);
15569 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
15570 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15571 gen_cache_operation(ctx
, rt
, rs
, imm
);
15573 /* Treat as NOP. */
15576 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
| INSN_R5900
);
15577 /* Treat as NOP. */
15580 /* Floating point (COP1). */
15585 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
15589 op1
= MASK_CP1(ctx
->opcode
);
15594 check_cp1_enabled(ctx
);
15595 check_insn(ctx
, ISA_MIPS_R2
);
15601 check_cp1_enabled(ctx
);
15602 gen_cp1(ctx
, op1
, rt
, rd
);
15604 #if defined(TARGET_MIPS64)
15607 check_cp1_enabled(ctx
);
15608 check_insn(ctx
, ISA_MIPS3
);
15609 check_mips_64(ctx
);
15610 gen_cp1(ctx
, op1
, rt
, rd
);
15613 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
15614 check_cp1_enabled(ctx
);
15615 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15617 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15622 check_insn(ctx
, ASE_MIPS3D
);
15623 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15624 (rt
>> 2) & 0x7, imm
<< 2);
15628 check_cp1_enabled(ctx
);
15629 check_insn(ctx
, ISA_MIPS_R6
);
15630 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15634 check_cp1_enabled(ctx
);
15635 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15637 check_insn(ctx
, ASE_MIPS3D
);
15640 check_cp1_enabled(ctx
);
15641 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15642 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15643 (rt
>> 2) & 0x7, imm
<< 2);
15650 check_cp1_enabled(ctx
);
15651 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15657 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
15658 check_cp1_enabled(ctx
);
15659 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15661 case R6_OPC_CMP_AF_S
:
15662 case R6_OPC_CMP_UN_S
:
15663 case R6_OPC_CMP_EQ_S
:
15664 case R6_OPC_CMP_UEQ_S
:
15665 case R6_OPC_CMP_LT_S
:
15666 case R6_OPC_CMP_ULT_S
:
15667 case R6_OPC_CMP_LE_S
:
15668 case R6_OPC_CMP_ULE_S
:
15669 case R6_OPC_CMP_SAF_S
:
15670 case R6_OPC_CMP_SUN_S
:
15671 case R6_OPC_CMP_SEQ_S
:
15672 case R6_OPC_CMP_SEUQ_S
:
15673 case R6_OPC_CMP_SLT_S
:
15674 case R6_OPC_CMP_SULT_S
:
15675 case R6_OPC_CMP_SLE_S
:
15676 case R6_OPC_CMP_SULE_S
:
15677 case R6_OPC_CMP_OR_S
:
15678 case R6_OPC_CMP_UNE_S
:
15679 case R6_OPC_CMP_NE_S
:
15680 case R6_OPC_CMP_SOR_S
:
15681 case R6_OPC_CMP_SUNE_S
:
15682 case R6_OPC_CMP_SNE_S
:
15683 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15685 case R6_OPC_CMP_AF_D
:
15686 case R6_OPC_CMP_UN_D
:
15687 case R6_OPC_CMP_EQ_D
:
15688 case R6_OPC_CMP_UEQ_D
:
15689 case R6_OPC_CMP_LT_D
:
15690 case R6_OPC_CMP_ULT_D
:
15691 case R6_OPC_CMP_LE_D
:
15692 case R6_OPC_CMP_ULE_D
:
15693 case R6_OPC_CMP_SAF_D
:
15694 case R6_OPC_CMP_SUN_D
:
15695 case R6_OPC_CMP_SEQ_D
:
15696 case R6_OPC_CMP_SEUQ_D
:
15697 case R6_OPC_CMP_SLT_D
:
15698 case R6_OPC_CMP_SULT_D
:
15699 case R6_OPC_CMP_SLE_D
:
15700 case R6_OPC_CMP_SULE_D
:
15701 case R6_OPC_CMP_OR_D
:
15702 case R6_OPC_CMP_UNE_D
:
15703 case R6_OPC_CMP_NE_D
:
15704 case R6_OPC_CMP_SOR_D
:
15705 case R6_OPC_CMP_SUNE_D
:
15706 case R6_OPC_CMP_SNE_D
:
15707 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15710 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
15711 rt
, rd
, sa
, (imm
>> 8) & 0x7);
15716 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15723 gen_reserved_instruction(ctx
);
15728 /* Compact branches [R6] and COP2 [non-R6] */
15729 case OPC_BC
: /* OPC_LWC2 */
15730 case OPC_BALC
: /* OPC_SWC2 */
15731 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15732 /* OPC_BC, OPC_BALC */
15733 gen_compute_compact_branch(ctx
, op
, 0, 0,
15734 sextract32(ctx
->opcode
<< 2, 0, 28));
15735 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15736 gen_loongson_lswc2(ctx
, rt
, rs
, rd
);
15738 /* OPC_LWC2, OPC_SWC2 */
15739 /* COP2: Not implemented. */
15740 generate_exception_err(ctx
, EXCP_CpU
, 2);
15743 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
15744 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
15745 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15747 /* OPC_BEQZC, OPC_BNEZC */
15748 gen_compute_compact_branch(ctx
, op
, rs
, 0,
15749 sextract32(ctx
->opcode
<< 2, 0, 23));
15751 /* OPC_JIC, OPC_JIALC */
15752 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
15754 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15755 gen_loongson_lsdc2(ctx
, rt
, rs
, rd
);
15757 /* OPC_LWC2, OPC_SWC2 */
15758 /* COP2: Not implemented. */
15759 generate_exception_err(ctx
, EXCP_CpU
, 2);
15763 check_insn(ctx
, ASE_LMMI
);
15764 /* Note that these instructions use different fields. */
15765 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
15769 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15770 check_cp1_enabled(ctx
);
15771 op1
= MASK_CP3(ctx
->opcode
);
15775 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15781 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15782 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
15785 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15786 /* Treat as NOP. */
15789 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15803 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15804 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
15808 gen_reserved_instruction(ctx
);
15812 generate_exception_err(ctx
, EXCP_CpU
, 1);
15816 #if defined(TARGET_MIPS64)
15817 /* MIPS64 opcodes */
15819 if (ctx
->insn_flags
& INSN_R5900
) {
15820 check_insn_opc_user_only(ctx
, INSN_R5900
);
15827 check_insn(ctx
, ISA_MIPS3
);
15828 check_mips_64(ctx
);
15829 gen_ld(ctx
, op
, rt
, rs
, imm
);
15834 check_insn(ctx
, ISA_MIPS3
);
15835 check_mips_64(ctx
);
15836 gen_st(ctx
, op
, rt
, rs
, imm
);
15839 check_insn(ctx
, ISA_MIPS3
);
15840 if (ctx
->insn_flags
& INSN_R5900
) {
15841 check_insn_opc_user_only(ctx
, INSN_R5900
);
15843 check_mips_64(ctx
);
15844 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
15846 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
15847 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15848 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
15849 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15852 check_insn(ctx
, ISA_MIPS3
);
15853 check_mips_64(ctx
);
15854 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15858 check_insn(ctx
, ISA_MIPS3
);
15859 check_mips_64(ctx
);
15860 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15863 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
15864 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15865 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15867 MIPS_INVAL("major opcode");
15868 gen_reserved_instruction(ctx
);
15872 case OPC_DAUI
: /* OPC_JALX */
15873 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15874 #if defined(TARGET_MIPS64)
15876 check_mips_64(ctx
);
15878 generate_exception(ctx
, EXCP_RI
);
15879 } else if (rt
!= 0) {
15880 TCGv t0
= tcg_temp_new();
15881 gen_load_gpr(t0
, rs
);
15882 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
15886 gen_reserved_instruction(ctx
);
15887 MIPS_INVAL("major opcode");
15891 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
15892 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15893 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15897 /* MDMX: Not implemented. */
15900 check_insn(ctx
, ISA_MIPS_R6
);
15901 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
15903 default: /* Invalid */
15904 MIPS_INVAL("major opcode");
15910 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15912 /* make sure instructions are on a word boundary */
15913 if (ctx
->base
.pc_next
& 0x3) {
15914 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15915 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
15919 /* Handle blikely not taken case */
15920 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
15921 TCGLabel
*l1
= gen_new_label();
15923 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
15924 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
15925 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
15929 /* Transition to the auto-generated decoder. */
15931 /* Vendor specific extensions */
15932 if (cpu_supports_isa(env
, INSN_R5900
) && decode_ext_txx9(ctx
, ctx
->opcode
)) {
15935 if (cpu_supports_isa(env
, INSN_VR54XX
) && decode_ext_vr54xx(ctx
, ctx
->opcode
)) {
15938 #if defined(TARGET_MIPS64)
15939 if (cpu_supports_isa(env
, INSN_OCTEON
) && decode_ext_octeon(ctx
, ctx
->opcode
)) {
15944 /* ISA extensions */
15945 if (ase_msa_available(env
) && decode_ase_msa(ctx
, ctx
->opcode
)) {
15949 /* ISA (from latest to oldest) */
15950 if (cpu_supports_isa(env
, ISA_MIPS_R6
) && decode_isa_rel6(ctx
, ctx
->opcode
)) {
15954 if (decode_opc_legacy(env
, ctx
)) {
15958 gen_reserved_instruction(ctx
);
15961 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
15963 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
15964 CPUMIPSState
*env
= cs
->env_ptr
;
15966 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
15967 ctx
->saved_pc
= -1;
15968 ctx
->insn_flags
= env
->insn_flags
;
15969 ctx
->CP0_Config0
= env
->CP0_Config0
;
15970 ctx
->CP0_Config1
= env
->CP0_Config1
;
15971 ctx
->CP0_Config2
= env
->CP0_Config2
;
15972 ctx
->CP0_Config3
= env
->CP0_Config3
;
15973 ctx
->CP0_Config5
= env
->CP0_Config5
;
15975 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
15976 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
15977 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
15978 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
15979 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
15980 ctx
->PAMask
= env
->PAMask
;
15981 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
15982 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
15983 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
15984 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
15985 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
15986 /* Restore delay slot state from the tb context. */
15987 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
15988 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
15989 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
15990 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
15991 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
15992 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
15993 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
15994 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
15995 ctx
->mi
= (env
->CP0_Config5
>> CP0C5_MI
) & 1;
15996 ctx
->gi
= (env
->CP0_Config5
>> CP0C5_GI
) & 3;
15997 restore_cpu_state(env
, ctx
);
15998 #ifdef CONFIG_USER_ONLY
15999 ctx
->mem_idx
= MIPS_HFLAG_UM
;
16001 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
16003 ctx
->default_tcg_memop_mask
= (!(ctx
->insn_flags
& ISA_NANOMIPS32
) &&
16004 (ctx
->insn_flags
& (ISA_MIPS_R6
|
16005 INSN_LOONGSON3A
))) ? MO_UNALN
: MO_ALIGN
;
16008 * Execute a branch and its delay slot as a single instruction.
16009 * This is what GDB expects and is consistent with what the
16010 * hardware does (e.g. if a delay slot instruction faults, the
16011 * reported PC is the PC of the branch).
16013 if (ctx
->base
.singlestep_enabled
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16014 ctx
->base
.max_insns
= 2;
16017 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
16021 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16025 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16027 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16029 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
16033 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
16035 CPUMIPSState
*env
= cs
->env_ptr
;
16036 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16040 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
16041 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
16042 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16043 insn_bytes
= decode_isa_nanomips(env
, ctx
);
16044 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
16045 ctx
->opcode
= translator_ldl(env
, &ctx
->base
, ctx
->base
.pc_next
);
16047 decode_opc(env
, ctx
);
16048 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
16049 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16050 insn_bytes
= decode_isa_micromips(env
, ctx
);
16051 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
16052 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16053 insn_bytes
= decode_ase_mips16e(env
, ctx
);
16055 gen_reserved_instruction(ctx
);
16056 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
16060 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
16061 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
16062 MIPS_HFLAG_FBNSLOT
))) {
16064 * Force to generate branch as there is neither delay nor
16069 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
16070 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
16072 * Force to generate branch as microMIPS R6 doesn't restrict
16073 * branches in the forbidden slot.
16079 gen_branch(ctx
, insn_bytes
);
16081 if (ctx
->base
.is_jmp
== DISAS_SEMIHOST
) {
16082 generate_exception_err(ctx
, EXCP_SEMIHOST
, insn_bytes
);
16084 ctx
->base
.pc_next
+= insn_bytes
;
16086 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
16091 * End the TB on (most) page crossings.
16092 * See mips_tr_init_disas_context about single-stepping a branch
16093 * together with its delay slot.
16095 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
16096 && !ctx
->base
.singlestep_enabled
) {
16097 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
16101 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
16103 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16105 switch (ctx
->base
.is_jmp
) {
16107 gen_save_pc(ctx
->base
.pc_next
);
16108 tcg_gen_lookup_and_goto_ptr();
16111 case DISAS_TOO_MANY
:
16112 save_cpu_state(ctx
, 0);
16113 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
16116 tcg_gen_exit_tb(NULL
, 0);
16118 case DISAS_NORETURN
:
16121 g_assert_not_reached();
16125 static void mips_tr_disas_log(const DisasContextBase
*dcbase
,
16126 CPUState
*cs
, FILE *logfile
)
16128 fprintf(logfile
, "IN: %s\n", lookup_symbol(dcbase
->pc_first
));
16129 target_disas(logfile
, cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
16132 static const TranslatorOps mips_tr_ops
= {
16133 .init_disas_context
= mips_tr_init_disas_context
,
16134 .tb_start
= mips_tr_tb_start
,
16135 .insn_start
= mips_tr_insn_start
,
16136 .translate_insn
= mips_tr_translate_insn
,
16137 .tb_stop
= mips_tr_tb_stop
,
16138 .disas_log
= mips_tr_disas_log
,
16141 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int *max_insns
,
16142 target_ulong pc
, void *host_pc
)
16146 translator_loop(cs
, tb
, max_insns
, pc
, host_pc
, &mips_tr_ops
, &ctx
.base
);
16149 void mips_tcg_init(void)
16154 for (i
= 1; i
< 32; i
++)
16155 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
16156 offsetof(CPUMIPSState
,
16159 #if defined(TARGET_MIPS64)
16160 cpu_gpr_hi
[0] = NULL
;
16162 for (unsigned i
= 1; i
< 32; i
++) {
16163 g_autofree
char *rname
= g_strdup_printf("%s[hi]", regnames
[i
]);
16165 cpu_gpr_hi
[i
] = tcg_global_mem_new_i64(cpu_env
,
16166 offsetof(CPUMIPSState
,
16167 active_tc
.gpr_hi
[i
]),
16170 #endif /* !TARGET_MIPS64 */
16171 for (i
= 0; i
< 32; i
++) {
16172 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
16174 fpu_f64
[i
] = tcg_global_mem_new_i64(cpu_env
, off
, fregnames
[i
]);
16176 msa_translate_init();
16177 cpu_PC
= tcg_global_mem_new(cpu_env
,
16178 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
16179 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
16180 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
16181 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
16183 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
16184 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
16187 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
16188 offsetof(CPUMIPSState
,
16189 active_tc
.DSPControl
),
16191 bcond
= tcg_global_mem_new(cpu_env
,
16192 offsetof(CPUMIPSState
, bcond
), "bcond");
16193 btarget
= tcg_global_mem_new(cpu_env
,
16194 offsetof(CPUMIPSState
, btarget
), "btarget");
16195 hflags
= tcg_global_mem_new_i32(cpu_env
,
16196 offsetof(CPUMIPSState
, hflags
), "hflags");
16198 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
16199 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
16201 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
16202 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
16204 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
16206 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
16209 if (TARGET_LONG_BITS
== 32) {
16210 mxu_translate_init();
16214 void mips_restore_state_to_opc(CPUState
*cs
,
16215 const TranslationBlock
*tb
,
16216 const uint64_t *data
)
16218 MIPSCPU
*cpu
= MIPS_CPU(cs
);
16219 CPUMIPSState
*env
= &cpu
->env
;
16221 env
->active_tc
.PC
= data
[0];
16222 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
16223 env
->hflags
|= data
[1];
16224 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
16225 case MIPS_HFLAG_BR
:
16227 case MIPS_HFLAG_BC
:
16228 case MIPS_HFLAG_BL
:
16230 env
->btarget
= data
[2];