]> git.proxmox.com Git - qemu.git/blob - target-m68k/translate.c
Add diagnostic for bad opcode masks.
[qemu.git] / target-m68k / translate.c
1 /*
2 * m68k translation
3 *
4 * Copyright (c) 2005-2007 CodeSourcery
5 * Written by Paul Brook
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "config.h"
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "m68k-qreg.h"
32
33 //#define DEBUG_DISPATCH 1
34
35 static inline void qemu_assert(int cond, const char *msg)
36 {
37 if (!cond) {
38 fprintf (stderr, "badness: %s\n", msg);
39 abort();
40 }
41 }
42
43 /* internal defines */
44 typedef struct DisasContext {
45 CPUM68KState *env;
46 target_ulong insn_pc; /* Start of the current instruction. */
47 target_ulong pc;
48 int is_jmp;
49 int cc_op;
50 int user;
51 uint32_t fpcr;
52 struct TranslationBlock *tb;
53 int singlestep_enabled;
54 } DisasContext;
55
56 #define DISAS_JUMP_NEXT 4
57
58 #if defined(CONFIG_USER_ONLY)
59 #define IS_USER(s) 1
60 #else
61 #define IS_USER(s) s->user
62 #endif
63
64 /* XXX: move that elsewhere */
65 /* ??? Fix exceptions. */
66 static void *gen_throws_exception;
67 #define gen_last_qop NULL
68
69 static uint16_t *gen_opc_ptr;
70 static uint32_t *gen_opparam_ptr;
71 extern FILE *logfile;
72 extern int loglevel;
73
74 enum {
75 #define DEF(s, n, copy_size) INDEX_op_ ## s,
76 #include "opc.h"
77 #undef DEF
78 NB_OPS,
79 };
80
81 #include "gen-op.h"
82
83 #if defined(CONFIG_USER_ONLY)
84 #define gen_st(s, name, addr, val) gen_op_st##name##_raw(addr, val)
85 #define gen_ld(s, name, val, addr) gen_op_ld##name##_raw(val, addr)
86 #else
87 #define gen_st(s, name, addr, val) do { \
88 if (IS_USER(s)) \
89 gen_op_st##name##_user(addr, val); \
90 else \
91 gen_op_st##name##_kernel(addr, val); \
92 } while (0)
93 #define gen_ld(s, name, val, addr) do { \
94 if (IS_USER(s)) \
95 gen_op_ld##name##_user(val, addr); \
96 else \
97 gen_op_ld##name##_kernel(val, addr); \
98 } while (0)
99 #endif
100
101 #include "op-hacks.h"
102
103 #define OS_BYTE 0
104 #define OS_WORD 1
105 #define OS_LONG 2
106 #define OS_SINGLE 4
107 #define OS_DOUBLE 5
108
109 #define DREG(insn, pos) (((insn >> pos) & 7) + QREG_D0)
110 #define AREG(insn, pos) (((insn >> pos) & 7) + QREG_A0)
111 #define FREG(insn, pos) (((insn >> pos) & 7) + QREG_F0)
112
113 typedef void (*disas_proc)(DisasContext *, uint16_t);
114
115 #ifdef DEBUG_DISPATCH
116 #define DISAS_INSN(name) \
117 static void real_disas_##name (DisasContext *s, uint16_t insn); \
118 static void disas_##name (DisasContext *s, uint16_t insn) { \
119 if (logfile) fprintf(logfile, "Dispatch " #name "\n"); \
120 real_disas_##name(s, insn); } \
121 static void real_disas_##name (DisasContext *s, uint16_t insn)
122 #else
123 #define DISAS_INSN(name) \
124 static void disas_##name (DisasContext *s, uint16_t insn)
125 #endif
126
127 /* Generate a load from the specified address. Narrow values are
128 sign extended to full register width. */
129 static inline int gen_load(DisasContext * s, int opsize, int addr, int sign)
130 {
131 int tmp;
132 switch(opsize) {
133 case OS_BYTE:
134 tmp = gen_new_qreg(QMODE_I32);
135 if (sign)
136 gen_ld(s, 8s32, tmp, addr);
137 else
138 gen_ld(s, 8u32, tmp, addr);
139 break;
140 case OS_WORD:
141 tmp = gen_new_qreg(QMODE_I32);
142 if (sign)
143 gen_ld(s, 16s32, tmp, addr);
144 else
145 gen_ld(s, 16u32, tmp, addr);
146 break;
147 case OS_LONG:
148 tmp = gen_new_qreg(QMODE_I32);
149 gen_ld(s, 32, tmp, addr);
150 break;
151 case OS_SINGLE:
152 tmp = gen_new_qreg(QMODE_F32);
153 gen_ld(s, f32, tmp, addr);
154 break;
155 case OS_DOUBLE:
156 tmp = gen_new_qreg(QMODE_F64);
157 gen_ld(s, f64, tmp, addr);
158 break;
159 default:
160 qemu_assert(0, "bad load size");
161 }
162 gen_throws_exception = gen_last_qop;
163 return tmp;
164 }
165
166 /* Generate a store. */
167 static inline void gen_store(DisasContext *s, int opsize, int addr, int val)
168 {
169 switch(opsize) {
170 case OS_BYTE:
171 gen_st(s, 8, addr, val);
172 break;
173 case OS_WORD:
174 gen_st(s, 16, addr, val);
175 break;
176 case OS_LONG:
177 gen_st(s, 32, addr, val);
178 break;
179 case OS_SINGLE:
180 gen_st(s, f32, addr, val);
181 break;
182 case OS_DOUBLE:
183 gen_st(s, f64, addr, val);
184 break;
185 default:
186 qemu_assert(0, "bad store size");
187 }
188 gen_throws_exception = gen_last_qop;
189 }
190
191 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
192 otherwise generate a store. */
193 static int gen_ldst(DisasContext *s, int opsize, int addr, int val)
194 {
195 if (val > 0) {
196 gen_store(s, opsize, addr, val);
197 return 0;
198 } else {
199 return gen_load(s, opsize, addr, val != 0);
200 }
201 }
202
203 /* Read a 32-bit immediate constant. */
204 static inline uint32_t read_im32(DisasContext *s)
205 {
206 uint32_t im;
207 im = ((uint32_t)lduw_code(s->pc)) << 16;
208 s->pc += 2;
209 im |= lduw_code(s->pc);
210 s->pc += 2;
211 return im;
212 }
213
214 /* Calculate and address index. */
215 static int gen_addr_index(uint16_t ext, int tmp)
216 {
217 int add;
218 int scale;
219
220 add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
221 if ((ext & 0x800) == 0) {
222 gen_op_ext16s32(tmp, add);
223 add = tmp;
224 }
225 scale = (ext >> 9) & 3;
226 if (scale != 0) {
227 gen_op_shl32(tmp, add, gen_im32(scale));
228 add = tmp;
229 }
230 return add;
231 }
232
233 /* Handle a base + index + displacement effective addresss. A base of
234 -1 means pc-relative. */
235 static int gen_lea_indexed(DisasContext *s, int opsize, int base)
236 {
237 uint32_t offset;
238 uint16_t ext;
239 int add;
240 int tmp;
241 uint32_t bd, od;
242
243 offset = s->pc;
244 ext = lduw_code(s->pc);
245 s->pc += 2;
246
247 if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
248 return -1;
249
250 if (ext & 0x100) {
251 /* full extension word format */
252 if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
253 return -1;
254
255 if ((ext & 0x30) > 0x10) {
256 /* base displacement */
257 if ((ext & 0x30) == 0x20) {
258 bd = (int16_t)lduw_code(s->pc);
259 s->pc += 2;
260 } else {
261 bd = read_im32(s);
262 }
263 } else {
264 bd = 0;
265 }
266 tmp = gen_new_qreg(QMODE_I32);
267 if ((ext & 0x44) == 0) {
268 /* pre-index */
269 add = gen_addr_index(ext, tmp);
270 } else {
271 add = QREG_NULL;
272 }
273 if ((ext & 0x80) == 0) {
274 /* base not suppressed */
275 if (base == -1) {
276 base = gen_im32(offset + bd);
277 bd = 0;
278 }
279 if (add) {
280 gen_op_add32(tmp, add, base);
281 add = tmp;
282 } else {
283 add = base;
284 }
285 }
286 if (add) {
287 if (bd != 0) {
288 gen_op_add32(tmp, add, gen_im32(bd));
289 add = tmp;
290 }
291 } else {
292 add = gen_im32(bd);
293 }
294 if ((ext & 3) != 0) {
295 /* memory indirect */
296 base = gen_load(s, OS_LONG, add, 0);
297 if ((ext & 0x44) == 4) {
298 add = gen_addr_index(ext, tmp);
299 gen_op_add32(tmp, add, base);
300 add = tmp;
301 } else {
302 add = base;
303 }
304 if ((ext & 3) > 1) {
305 /* outer displacement */
306 if ((ext & 3) == 2) {
307 od = (int16_t)lduw_code(s->pc);
308 s->pc += 2;
309 } else {
310 od = read_im32(s);
311 }
312 } else {
313 od = 0;
314 }
315 if (od != 0) {
316 gen_op_add32(tmp, add, gen_im32(od));
317 add = tmp;
318 }
319 }
320 } else {
321 /* brief extension word format */
322 tmp = gen_new_qreg(QMODE_I32);
323 add = gen_addr_index(ext, tmp);
324 if (base != -1) {
325 gen_op_add32(tmp, add, base);
326 if ((int8_t)ext)
327 gen_op_add32(tmp, tmp, gen_im32((int8_t)ext));
328 } else {
329 gen_op_add32(tmp, add, gen_im32(offset + (int8_t)ext));
330 }
331 add = tmp;
332 }
333 return add;
334 }
335
336 /* Update the CPU env CC_OP state. */
337 static inline void gen_flush_cc_op(DisasContext *s)
338 {
339 if (s->cc_op != CC_OP_DYNAMIC)
340 gen_op_mov32(QREG_CC_OP, gen_im32(s->cc_op));
341 }
342
343 /* Evaluate all the CC flags. */
344 static inline void gen_flush_flags(DisasContext *s)
345 {
346 if (s->cc_op == CC_OP_FLAGS)
347 return;
348 gen_op_flush_flags(s->cc_op);
349 s->cc_op = CC_OP_FLAGS;
350 }
351
352 static inline int opsize_bytes(int opsize)
353 {
354 switch (opsize) {
355 case OS_BYTE: return 1;
356 case OS_WORD: return 2;
357 case OS_LONG: return 4;
358 case OS_SINGLE: return 4;
359 case OS_DOUBLE: return 8;
360 default:
361 qemu_assert(0, "bad operand size");
362 }
363 }
364
365 /* Assign value to a register. If the width is less than the register width
366 only the low part of the register is set. */
367 static void gen_partset_reg(int opsize, int reg, int val)
368 {
369 int tmp;
370 switch (opsize) {
371 case OS_BYTE:
372 gen_op_and32(reg, reg, gen_im32(0xffffff00));
373 tmp = gen_new_qreg(QMODE_I32);
374 gen_op_and32(tmp, val, gen_im32(0xff));
375 gen_op_or32(reg, reg, tmp);
376 break;
377 case OS_WORD:
378 gen_op_and32(reg, reg, gen_im32(0xffff0000));
379 tmp = gen_new_qreg(QMODE_I32);
380 gen_op_and32(tmp, val, gen_im32(0xffff));
381 gen_op_or32(reg, reg, tmp);
382 break;
383 case OS_LONG:
384 gen_op_mov32(reg, val);
385 break;
386 case OS_SINGLE:
387 gen_op_pack_32_f32(reg, val);
388 break;
389 default:
390 qemu_assert(0, "Bad operand size");
391 break;
392 }
393 }
394
395 /* Sign or zero extend a value. */
396 static inline int gen_extend(int val, int opsize, int sign)
397 {
398 int tmp;
399
400 switch (opsize) {
401 case OS_BYTE:
402 tmp = gen_new_qreg(QMODE_I32);
403 if (sign)
404 gen_op_ext8s32(tmp, val);
405 else
406 gen_op_ext8u32(tmp, val);
407 break;
408 case OS_WORD:
409 tmp = gen_new_qreg(QMODE_I32);
410 if (sign)
411 gen_op_ext16s32(tmp, val);
412 else
413 gen_op_ext16u32(tmp, val);
414 break;
415 case OS_LONG:
416 tmp = val;
417 break;
418 case OS_SINGLE:
419 tmp = gen_new_qreg(QMODE_F32);
420 gen_op_pack_f32_32(tmp, val);
421 break;
422 default:
423 qemu_assert(0, "Bad operand size");
424 }
425 return tmp;
426 }
427
428 /* Generate code for an "effective address". Does not adjust the base
429 register for autoincrememnt addressing modes. */
430 static int gen_lea(DisasContext *s, uint16_t insn, int opsize)
431 {
432 int reg;
433 int tmp;
434 uint16_t ext;
435 uint32_t offset;
436
437 reg = insn & 7;
438 switch ((insn >> 3) & 7) {
439 case 0: /* Data register direct. */
440 case 1: /* Address register direct. */
441 return -1;
442 case 2: /* Indirect register */
443 case 3: /* Indirect postincrement. */
444 reg += QREG_A0;
445 return reg;
446 case 4: /* Indirect predecrememnt. */
447 reg += QREG_A0;
448 tmp = gen_new_qreg(QMODE_I32);
449 gen_op_sub32(tmp, reg, gen_im32(opsize_bytes(opsize)));
450 return tmp;
451 case 5: /* Indirect displacement. */
452 reg += QREG_A0;
453 tmp = gen_new_qreg(QMODE_I32);
454 ext = lduw_code(s->pc);
455 s->pc += 2;
456 gen_op_add32(tmp, reg, gen_im32((int16_t)ext));
457 return tmp;
458 case 6: /* Indirect index + displacement. */
459 reg += QREG_A0;
460 return gen_lea_indexed(s, opsize, reg);
461 case 7: /* Other */
462 switch (reg) {
463 case 0: /* Absolute short. */
464 offset = ldsw_code(s->pc);
465 s->pc += 2;
466 return gen_im32(offset);
467 case 1: /* Absolute long. */
468 offset = read_im32(s);
469 return gen_im32(offset);
470 case 2: /* pc displacement */
471 tmp = gen_new_qreg(QMODE_I32);
472 offset = s->pc;
473 offset += ldsw_code(s->pc);
474 s->pc += 2;
475 return gen_im32(offset);
476 case 3: /* pc index+displacement. */
477 return gen_lea_indexed(s, opsize, -1);
478 case 4: /* Immediate. */
479 default:
480 return -1;
481 }
482 }
483 /* Should never happen. */
484 return -1;
485 }
486
487 /* Helper function for gen_ea. Reuse the computed address between the
488 for read/write operands. */
489 static inline int gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
490 int val, int *addrp)
491 {
492 int tmp;
493
494 if (addrp && val > 0) {
495 tmp = *addrp;
496 } else {
497 tmp = gen_lea(s, insn, opsize);
498 if (tmp == -1)
499 return -1;
500 if (addrp)
501 *addrp = tmp;
502 }
503 return gen_ldst(s, opsize, tmp, val);
504 }
505
506 /* Generate code to load/store a value ito/from an EA. If VAL > 0 this is
507 a write otherwise it is a read (0 == sign extend, -1 == zero extend).
508 ADDRP is non-null for readwrite operands. */
509 static int gen_ea(DisasContext *s, uint16_t insn, int opsize, int val,
510 int *addrp)
511 {
512 int reg;
513 int result;
514 uint32_t offset;
515
516 reg = insn & 7;
517 switch ((insn >> 3) & 7) {
518 case 0: /* Data register direct. */
519 reg += QREG_D0;
520 if (val > 0) {
521 gen_partset_reg(opsize, reg, val);
522 return 0;
523 } else {
524 return gen_extend(reg, opsize, val);
525 }
526 case 1: /* Address register direct. */
527 reg += QREG_A0;
528 if (val > 0) {
529 gen_op_mov32(reg, val);
530 return 0;
531 } else {
532 return gen_extend(reg, opsize, val);
533 }
534 case 2: /* Indirect register */
535 reg += QREG_A0;
536 return gen_ldst(s, opsize, reg, val);
537 case 3: /* Indirect postincrement. */
538 reg += QREG_A0;
539 result = gen_ldst(s, opsize, reg, val);
540 /* ??? This is not exception safe. The instruction may still
541 fault after this point. */
542 if (val > 0 || !addrp)
543 gen_op_add32(reg, reg, gen_im32(opsize_bytes(opsize)));
544 return result;
545 case 4: /* Indirect predecrememnt. */
546 {
547 int tmp;
548 if (addrp && val > 0) {
549 tmp = *addrp;
550 } else {
551 tmp = gen_lea(s, insn, opsize);
552 if (tmp == -1)
553 return -1;
554 if (addrp)
555 *addrp = tmp;
556 }
557 result = gen_ldst(s, opsize, tmp, val);
558 /* ??? This is not exception safe. The instruction may still
559 fault after this point. */
560 if (val > 0 || !addrp) {
561 reg += QREG_A0;
562 gen_op_mov32(reg, tmp);
563 }
564 }
565 return result;
566 case 5: /* Indirect displacement. */
567 case 6: /* Indirect index + displacement. */
568 return gen_ea_once(s, insn, opsize, val, addrp);
569 case 7: /* Other */
570 switch (reg) {
571 case 0: /* Absolute short. */
572 case 1: /* Absolute long. */
573 case 2: /* pc displacement */
574 case 3: /* pc index+displacement. */
575 return gen_ea_once(s, insn, opsize, val, addrp);
576 case 4: /* Immediate. */
577 /* Sign extend values for consistency. */
578 switch (opsize) {
579 case OS_BYTE:
580 if (val)
581 offset = ldsb_code(s->pc + 1);
582 else
583 offset = ldub_code(s->pc + 1);
584 s->pc += 2;
585 break;
586 case OS_WORD:
587 if (val)
588 offset = ldsw_code(s->pc);
589 else
590 offset = lduw_code(s->pc);
591 s->pc += 2;
592 break;
593 case OS_LONG:
594 offset = read_im32(s);
595 break;
596 default:
597 qemu_assert(0, "Bad immediate operand");
598 }
599 return gen_im32(offset);
600 default:
601 return -1;
602 }
603 }
604 /* Should never happen. */
605 return -1;
606 }
607
608 static void gen_logic_cc(DisasContext *s, int val)
609 {
610 gen_op_logic_cc(val);
611 s->cc_op = CC_OP_LOGIC;
612 }
613
614 static void gen_jmpcc(DisasContext *s, int cond, int l1)
615 {
616 int tmp;
617
618 gen_flush_flags(s);
619 switch (cond) {
620 case 0: /* T */
621 gen_op_jmp(l1);
622 break;
623 case 1: /* F */
624 break;
625 case 2: /* HI (!C && !Z) */
626 tmp = gen_new_qreg(QMODE_I32);
627 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
628 gen_op_jmp_z32(tmp, l1);
629 break;
630 case 3: /* LS (C || Z) */
631 tmp = gen_new_qreg(QMODE_I32);
632 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
633 gen_op_jmp_nz32(tmp, l1);
634 break;
635 case 4: /* CC (!C) */
636 tmp = gen_new_qreg(QMODE_I32);
637 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
638 gen_op_jmp_z32(tmp, l1);
639 break;
640 case 5: /* CS (C) */
641 tmp = gen_new_qreg(QMODE_I32);
642 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
643 gen_op_jmp_nz32(tmp, l1);
644 break;
645 case 6: /* NE (!Z) */
646 tmp = gen_new_qreg(QMODE_I32);
647 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
648 gen_op_jmp_z32(tmp, l1);
649 break;
650 case 7: /* EQ (Z) */
651 tmp = gen_new_qreg(QMODE_I32);
652 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
653 gen_op_jmp_nz32(tmp, l1);
654 break;
655 case 8: /* VC (!V) */
656 tmp = gen_new_qreg(QMODE_I32);
657 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
658 gen_op_jmp_z32(tmp, l1);
659 break;
660 case 9: /* VS (V) */
661 tmp = gen_new_qreg(QMODE_I32);
662 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
663 gen_op_jmp_nz32(tmp, l1);
664 break;
665 case 10: /* PL (!N) */
666 tmp = gen_new_qreg(QMODE_I32);
667 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
668 gen_op_jmp_z32(tmp, l1);
669 break;
670 case 11: /* MI (N) */
671 tmp = gen_new_qreg(QMODE_I32);
672 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
673 gen_op_jmp_nz32(tmp, l1);
674 break;
675 case 12: /* GE (!(N ^ V)) */
676 tmp = gen_new_qreg(QMODE_I32);
677 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
678 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
679 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
680 gen_op_jmp_z32(tmp, l1);
681 break;
682 case 13: /* LT (N ^ V) */
683 tmp = gen_new_qreg(QMODE_I32);
684 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
685 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
686 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
687 gen_op_jmp_nz32(tmp, l1);
688 break;
689 case 14: /* GT (!(Z || (N ^ V))) */
690 {
691 int l2;
692 l2 = gen_new_label();
693 tmp = gen_new_qreg(QMODE_I32);
694 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
695 gen_op_jmp_nz32(tmp, l2);
696 tmp = gen_new_qreg(QMODE_I32);
697 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
698 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
699 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
700 gen_op_jmp_nz32(tmp, l2);
701 gen_op_jmp(l1);
702 gen_set_label(l2);
703 }
704 break;
705 case 15: /* LE (Z || (N ^ V)) */
706 tmp = gen_new_qreg(QMODE_I32);
707 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
708 gen_op_jmp_nz32(tmp, l1);
709 tmp = gen_new_qreg(QMODE_I32);
710 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
711 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
712 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
713 gen_op_jmp_nz32(tmp, l1);
714 break;
715 default:
716 /* Should ever happen. */
717 abort();
718 }
719 }
720
721 DISAS_INSN(scc)
722 {
723 int l1;
724 int cond;
725 int reg;
726
727 l1 = gen_new_label();
728 cond = (insn >> 8) & 0xf;
729 reg = DREG(insn, 0);
730 gen_op_and32(reg, reg, gen_im32(0xffffff00));
731 gen_jmpcc(s, cond ^ 1, l1);
732 gen_op_or32(reg, reg, gen_im32(0xff));
733 gen_set_label(l1);
734 }
735
736 /* Force a TB lookup after an instruction that changes the CPU state. */
737 static void gen_lookup_tb(DisasContext *s)
738 {
739 gen_flush_cc_op(s);
740 gen_op_mov32(QREG_PC, gen_im32(s->pc));
741 s->is_jmp = DISAS_UPDATE;
742 }
743
744 /* Generate a jump to to the address in qreg DEST. */
745 static void gen_jmp(DisasContext *s, int dest)
746 {
747 gen_flush_cc_op(s);
748 gen_op_mov32(QREG_PC, dest);
749 s->is_jmp = DISAS_JUMP;
750 }
751
752 static void gen_exception(DisasContext *s, uint32_t where, int nr)
753 {
754 gen_flush_cc_op(s);
755 gen_jmp(s, gen_im32(where));
756 gen_op_raise_exception(nr);
757 }
758
759 static inline void gen_addr_fault(DisasContext *s)
760 {
761 gen_exception(s, s->insn_pc, EXCP_ADDRESS);
762 }
763
764 #define SRC_EA(result, opsize, val, addrp) do { \
765 result = gen_ea(s, insn, opsize, val, addrp); \
766 if (result == -1) { \
767 gen_addr_fault(s); \
768 return; \
769 } \
770 } while (0)
771
772 #define DEST_EA(insn, opsize, val, addrp) do { \
773 int ea_result = gen_ea(s, insn, opsize, val, addrp); \
774 if (ea_result == -1) { \
775 gen_addr_fault(s); \
776 return; \
777 } \
778 } while (0)
779
780 /* Generate a jump to an immediate address. */
781 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
782 {
783 TranslationBlock *tb;
784
785 tb = s->tb;
786 if (__builtin_expect (s->singlestep_enabled, 0)) {
787 gen_exception(s, dest, EXCP_DEBUG);
788 } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
789 (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
790 gen_op_goto_tb(0, n, (long)tb);
791 gen_op_mov32(QREG_PC, gen_im32(dest));
792 gen_op_mov32(QREG_T0, gen_im32((long)tb + n));
793 gen_op_exit_tb();
794 } else {
795 gen_jmp(s, gen_im32(dest));
796 gen_op_mov32(QREG_T0, gen_im32(0));
797 gen_op_exit_tb();
798 }
799 s->is_jmp = DISAS_TB_JUMP;
800 }
801
802 DISAS_INSN(undef_mac)
803 {
804 gen_exception(s, s->pc - 2, EXCP_LINEA);
805 }
806
807 DISAS_INSN(undef_fpu)
808 {
809 gen_exception(s, s->pc - 2, EXCP_LINEF);
810 }
811
812 DISAS_INSN(undef)
813 {
814 gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
815 cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
816 insn, s->pc - 2);
817 }
818
819 DISAS_INSN(mulw)
820 {
821 int reg;
822 int tmp;
823 int src;
824 int sign;
825
826 sign = (insn & 0x100) != 0;
827 reg = DREG(insn, 9);
828 tmp = gen_new_qreg(QMODE_I32);
829 if (sign)
830 gen_op_ext16s32(tmp, reg);
831 else
832 gen_op_ext16u32(tmp, reg);
833 SRC_EA(src, OS_WORD, sign ? -1 : 0, NULL);
834 gen_op_mul32(tmp, tmp, src);
835 gen_op_mov32(reg, tmp);
836 /* Unlike m68k, coldfire always clears the overflow bit. */
837 gen_logic_cc(s, tmp);
838 }
839
840 DISAS_INSN(divw)
841 {
842 int reg;
843 int tmp;
844 int src;
845 int sign;
846
847 sign = (insn & 0x100) != 0;
848 reg = DREG(insn, 9);
849 if (sign) {
850 gen_op_ext16s32(QREG_DIV1, reg);
851 } else {
852 gen_op_ext16u32(QREG_DIV1, reg);
853 }
854 SRC_EA(src, OS_WORD, sign ? -1 : 0, NULL);
855 gen_op_mov32(QREG_DIV2, src);
856 if (sign) {
857 gen_op_divs(1);
858 } else {
859 gen_op_divu(1);
860 }
861
862 tmp = gen_new_qreg(QMODE_I32);
863 src = gen_new_qreg(QMODE_I32);
864 gen_op_ext16u32(tmp, QREG_DIV1);
865 gen_op_shl32(src, QREG_DIV2, gen_im32(16));
866 gen_op_or32(reg, tmp, src);
867 gen_op_flags_set();
868 s->cc_op = CC_OP_FLAGS;
869 }
870
871 DISAS_INSN(divl)
872 {
873 int num;
874 int den;
875 int reg;
876 uint16_t ext;
877
878 ext = lduw_code(s->pc);
879 s->pc += 2;
880 if (ext & 0x87f8) {
881 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
882 return;
883 }
884 num = DREG(ext, 12);
885 reg = DREG(ext, 0);
886 gen_op_mov32(QREG_DIV1, num);
887 SRC_EA(den, OS_LONG, 0, NULL);
888 gen_op_mov32(QREG_DIV2, den);
889 if (ext & 0x0800) {
890 gen_op_divs(2);
891 } else {
892 gen_op_divu(2);
893 }
894 if (num == reg) {
895 /* div */
896 gen_op_mov32 (reg, QREG_DIV1);
897 } else {
898 /* rem */
899 gen_op_mov32 (reg, QREG_DIV2);
900 }
901 gen_op_flags_set();
902 s->cc_op = CC_OP_FLAGS;
903 }
904
905 DISAS_INSN(addsub)
906 {
907 int reg;
908 int dest;
909 int src;
910 int tmp;
911 int addr;
912 int add;
913
914 add = (insn & 0x4000) != 0;
915 reg = DREG(insn, 9);
916 dest = gen_new_qreg(QMODE_I32);
917 if (insn & 0x100) {
918 SRC_EA(tmp, OS_LONG, 0, &addr);
919 src = reg;
920 } else {
921 tmp = reg;
922 SRC_EA(src, OS_LONG, 0, NULL);
923 }
924 if (add) {
925 gen_op_add32(dest, tmp, src);
926 gen_op_update_xflag_lt(dest, src);
927 s->cc_op = CC_OP_ADD;
928 } else {
929 gen_op_update_xflag_lt(tmp, src);
930 gen_op_sub32(dest, tmp, src);
931 s->cc_op = CC_OP_SUB;
932 }
933 gen_op_update_cc_add(dest, src);
934 if (insn & 0x100) {
935 DEST_EA(insn, OS_LONG, dest, &addr);
936 } else {
937 gen_op_mov32(reg, dest);
938 }
939 }
940
941
942 /* Reverse the order of the bits in REG. */
943 DISAS_INSN(bitrev)
944 {
945 int val;
946 int tmp1;
947 int tmp2;
948 int reg;
949
950 val = gen_new_qreg(QMODE_I32);
951 tmp1 = gen_new_qreg(QMODE_I32);
952 tmp2 = gen_new_qreg(QMODE_I32);
953 reg = DREG(insn, 0);
954 gen_op_mov32(val, reg);
955 /* Reverse bits within each nibble. */
956 gen_op_shl32(tmp1, val, gen_im32(3));
957 gen_op_and32(tmp1, tmp1, gen_im32(0x88888888));
958 gen_op_shl32(tmp2, val, gen_im32(1));
959 gen_op_and32(tmp2, tmp2, gen_im32(0x44444444));
960 gen_op_or32(tmp1, tmp1, tmp2);
961 gen_op_shr32(tmp2, val, gen_im32(1));
962 gen_op_and32(tmp2, tmp2, gen_im32(0x22222222));
963 gen_op_or32(tmp1, tmp1, tmp2);
964 gen_op_shr32(tmp2, val, gen_im32(3));
965 gen_op_and32(tmp2, tmp2, gen_im32(0x11111111));
966 gen_op_or32(tmp1, tmp1, tmp2);
967 /* Reverse nibbles withing bytes. */
968 gen_op_shl32(val, tmp1, gen_im32(4));
969 gen_op_and32(val, val, gen_im32(0xf0f0f0f0));
970 gen_op_shr32(tmp2, tmp1, gen_im32(4));
971 gen_op_and32(tmp2, tmp2, gen_im32(0x0f0f0f0f));
972 gen_op_or32(val, val, tmp2);
973 /* Reverse bytes. */
974 gen_op_bswap32(reg, val);
975 gen_op_mov32(reg, val);
976 }
977
978 DISAS_INSN(bitop_reg)
979 {
980 int opsize;
981 int op;
982 int src1;
983 int src2;
984 int tmp;
985 int addr;
986 int dest;
987
988 if ((insn & 0x38) != 0)
989 opsize = OS_BYTE;
990 else
991 opsize = OS_LONG;
992 op = (insn >> 6) & 3;
993 SRC_EA(src1, opsize, 0, op ? &addr: NULL);
994 src2 = DREG(insn, 9);
995 dest = gen_new_qreg(QMODE_I32);
996
997 gen_flush_flags(s);
998 tmp = gen_new_qreg(QMODE_I32);
999 if (opsize == OS_BYTE)
1000 gen_op_and32(tmp, src2, gen_im32(7));
1001 else
1002 gen_op_and32(tmp, src2, gen_im32(31));
1003 src2 = tmp;
1004 tmp = gen_new_qreg(QMODE_I32);
1005 gen_op_shl32(tmp, gen_im32(1), src2);
1006
1007 gen_op_btest(src1, tmp);
1008 switch (op) {
1009 case 1: /* bchg */
1010 gen_op_xor32(dest, src1, tmp);
1011 break;
1012 case 2: /* bclr */
1013 gen_op_not32(tmp, tmp);
1014 gen_op_and32(dest, src1, tmp);
1015 break;
1016 case 3: /* bset */
1017 gen_op_or32(dest, src1, tmp);
1018 break;
1019 default: /* btst */
1020 break;
1021 }
1022 if (op)
1023 DEST_EA(insn, opsize, dest, &addr);
1024 }
1025
1026 DISAS_INSN(sats)
1027 {
1028 int reg;
1029 int tmp;
1030 int l1;
1031
1032 reg = DREG(insn, 0);
1033 tmp = gen_new_qreg(QMODE_I32);
1034 gen_flush_flags(s);
1035 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
1036 l1 = gen_new_label();
1037 gen_op_jmp_z32(tmp, l1);
1038 tmp = gen_new_qreg(QMODE_I32);
1039 gen_op_shr32(tmp, reg, gen_im32(31));
1040 gen_op_xor32(tmp, tmp, gen_im32(0x80000000));
1041 gen_op_mov32(reg, tmp);
1042 gen_set_label(l1);
1043 gen_logic_cc(s, tmp);
1044 }
1045
1046 static void gen_push(DisasContext *s, int val)
1047 {
1048 int tmp;
1049
1050 tmp = gen_new_qreg(QMODE_I32);
1051 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
1052 gen_store(s, OS_LONG, tmp, val);
1053 gen_op_mov32(QREG_SP, tmp);
1054 }
1055
1056 DISAS_INSN(movem)
1057 {
1058 int addr;
1059 int i;
1060 uint16_t mask;
1061 int reg;
1062 int tmp;
1063 int is_load;
1064
1065 mask = lduw_code(s->pc);
1066 s->pc += 2;
1067 tmp = gen_lea(s, insn, OS_LONG);
1068 if (tmp == -1) {
1069 gen_addr_fault(s);
1070 return;
1071 }
1072 addr = gen_new_qreg(QMODE_I32);
1073 gen_op_mov32(addr, tmp);
1074 is_load = ((insn & 0x0400) != 0);
1075 for (i = 0; i < 16; i++, mask >>= 1) {
1076 if (mask & 1) {
1077 if (i < 8)
1078 reg = DREG(i, 0);
1079 else
1080 reg = AREG(i, 0);
1081 if (is_load) {
1082 tmp = gen_load(s, OS_LONG, addr, 0);
1083 gen_op_mov32(reg, tmp);
1084 } else {
1085 gen_store(s, OS_LONG, addr, reg);
1086 }
1087 if (mask != 1)
1088 gen_op_add32(addr, addr, gen_im32(4));
1089 }
1090 }
1091 }
1092
1093 DISAS_INSN(bitop_im)
1094 {
1095 int opsize;
1096 int op;
1097 int src1;
1098 uint32_t mask;
1099 int bitnum;
1100 int tmp;
1101 int addr;
1102 int dest;
1103
1104 if ((insn & 0x38) != 0)
1105 opsize = OS_BYTE;
1106 else
1107 opsize = OS_LONG;
1108 op = (insn >> 6) & 3;
1109
1110 bitnum = lduw_code(s->pc);
1111 s->pc += 2;
1112 if (bitnum & 0xff00) {
1113 disas_undef(s, insn);
1114 return;
1115 }
1116
1117 SRC_EA(src1, opsize, 0, op ? &addr: NULL);
1118
1119 gen_flush_flags(s);
1120 tmp = gen_new_qreg(QMODE_I32);
1121 if (opsize == OS_BYTE)
1122 bitnum &= 7;
1123 else
1124 bitnum &= 31;
1125 mask = 1 << bitnum;
1126
1127 gen_op_btest(src1, gen_im32(mask));
1128 if (op)
1129 dest = gen_new_qreg(QMODE_I32);
1130 else
1131 dest = -1;
1132
1133 switch (op) {
1134 case 1: /* bchg */
1135 gen_op_xor32(dest, src1, gen_im32(mask));
1136 break;
1137 case 2: /* bclr */
1138 gen_op_and32(dest, src1, gen_im32(~mask));
1139 break;
1140 case 3: /* bset */
1141 gen_op_or32(dest, src1, gen_im32(mask));
1142 break;
1143 default: /* btst */
1144 break;
1145 }
1146 if (op)
1147 DEST_EA(insn, opsize, dest, &addr);
1148 }
1149
1150 DISAS_INSN(arith_im)
1151 {
1152 int op;
1153 int src1;
1154 int dest;
1155 int src2;
1156 int addr;
1157
1158 op = (insn >> 9) & 7;
1159 SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
1160 src2 = gen_im32(read_im32(s));
1161 dest = gen_new_qreg(QMODE_I32);
1162 switch (op) {
1163 case 0: /* ori */
1164 gen_op_or32(dest, src1, src2);
1165 gen_logic_cc(s, dest);
1166 break;
1167 case 1: /* andi */
1168 gen_op_and32(dest, src1, src2);
1169 gen_logic_cc(s, dest);
1170 break;
1171 case 2: /* subi */
1172 gen_op_mov32(dest, src1);
1173 gen_op_update_xflag_lt(dest, src2);
1174 gen_op_sub32(dest, dest, src2);
1175 gen_op_update_cc_add(dest, src2);
1176 s->cc_op = CC_OP_SUB;
1177 break;
1178 case 3: /* addi */
1179 gen_op_mov32(dest, src1);
1180 gen_op_add32(dest, dest, src2);
1181 gen_op_update_cc_add(dest, src2);
1182 gen_op_update_xflag_lt(dest, src2);
1183 s->cc_op = CC_OP_ADD;
1184 break;
1185 case 5: /* eori */
1186 gen_op_xor32(dest, src1, src2);
1187 gen_logic_cc(s, dest);
1188 break;
1189 case 6: /* cmpi */
1190 gen_op_mov32(dest, src1);
1191 gen_op_sub32(dest, dest, src2);
1192 gen_op_update_cc_add(dest, src2);
1193 s->cc_op = CC_OP_SUB;
1194 break;
1195 default:
1196 abort();
1197 }
1198 if (op != 6) {
1199 DEST_EA(insn, OS_LONG, dest, &addr);
1200 }
1201 }
1202
1203 DISAS_INSN(byterev)
1204 {
1205 int reg;
1206
1207 reg = DREG(insn, 0);
1208 gen_op_bswap32(reg, reg);
1209 }
1210
1211 DISAS_INSN(move)
1212 {
1213 int src;
1214 int dest;
1215 int op;
1216 int opsize;
1217
1218 switch (insn >> 12) {
1219 case 1: /* move.b */
1220 opsize = OS_BYTE;
1221 break;
1222 case 2: /* move.l */
1223 opsize = OS_LONG;
1224 break;
1225 case 3: /* move.w */
1226 opsize = OS_WORD;
1227 break;
1228 default:
1229 abort();
1230 }
1231 SRC_EA(src, opsize, -1, NULL);
1232 op = (insn >> 6) & 7;
1233 if (op == 1) {
1234 /* movea */
1235 /* The value will already have been sign extended. */
1236 dest = AREG(insn, 9);
1237 gen_op_mov32(dest, src);
1238 } else {
1239 /* normal move */
1240 uint16_t dest_ea;
1241 dest_ea = ((insn >> 9) & 7) | (op << 3);
1242 DEST_EA(dest_ea, opsize, src, NULL);
1243 /* This will be correct because loads sign extend. */
1244 gen_logic_cc(s, src);
1245 }
1246 }
1247
1248 DISAS_INSN(negx)
1249 {
1250 int reg;
1251 int dest;
1252 int tmp;
1253
1254 gen_flush_flags(s);
1255 reg = DREG(insn, 0);
1256 dest = gen_new_qreg(QMODE_I32);
1257 gen_op_mov32 (dest, gen_im32(0));
1258 gen_op_subx_cc(dest, reg);
1259 /* !Z is sticky. */
1260 tmp = gen_new_qreg(QMODE_I32);
1261 gen_op_mov32 (tmp, QREG_CC_DEST);
1262 gen_op_update_cc_add(dest, reg);
1263 gen_op_mov32(reg, dest);
1264 s->cc_op = CC_OP_DYNAMIC;
1265 gen_flush_flags(s);
1266 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1267 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1268 s->cc_op = CC_OP_FLAGS;
1269 }
1270
1271 DISAS_INSN(lea)
1272 {
1273 int reg;
1274 int tmp;
1275
1276 reg = AREG(insn, 9);
1277 tmp = gen_lea(s, insn, OS_LONG);
1278 if (tmp == -1) {
1279 gen_addr_fault(s);
1280 return;
1281 }
1282 gen_op_mov32(reg, tmp);
1283 }
1284
1285 DISAS_INSN(clr)
1286 {
1287 int opsize;
1288
1289 switch ((insn >> 6) & 3) {
1290 case 0: /* clr.b */
1291 opsize = OS_BYTE;
1292 break;
1293 case 1: /* clr.w */
1294 opsize = OS_WORD;
1295 break;
1296 case 2: /* clr.l */
1297 opsize = OS_LONG;
1298 break;
1299 default:
1300 abort();
1301 }
1302 DEST_EA(insn, opsize, gen_im32(0), NULL);
1303 gen_logic_cc(s, gen_im32(0));
1304 }
1305
1306 static int gen_get_ccr(DisasContext *s)
1307 {
1308 int dest;
1309
1310 gen_flush_flags(s);
1311 dest = gen_new_qreg(QMODE_I32);
1312 gen_op_get_xflag(dest);
1313 gen_op_shl32(dest, dest, gen_im32(4));
1314 gen_op_or32(dest, dest, QREG_CC_DEST);
1315 return dest;
1316 }
1317
1318 DISAS_INSN(move_from_ccr)
1319 {
1320 int reg;
1321 int ccr;
1322
1323 ccr = gen_get_ccr(s);
1324 reg = DREG(insn, 0);
1325 gen_partset_reg(OS_WORD, reg, ccr);
1326 }
1327
1328 DISAS_INSN(neg)
1329 {
1330 int reg;
1331 int src1;
1332
1333 reg = DREG(insn, 0);
1334 src1 = gen_new_qreg(QMODE_I32);
1335 gen_op_mov32(src1, reg);
1336 gen_op_neg32(reg, src1);
1337 s->cc_op = CC_OP_SUB;
1338 gen_op_update_cc_add(reg, src1);
1339 gen_op_update_xflag_lt(gen_im32(0), src1);
1340 s->cc_op = CC_OP_SUB;
1341 }
1342
1343 static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
1344 {
1345 gen_op_logic_cc(gen_im32(val & 0xf));
1346 gen_op_update_xflag_tst(gen_im32((val & 0x10) >> 4));
1347 if (!ccr_only) {
1348 gen_op_mov32(QREG_SR, gen_im32(val & 0xff00));
1349 }
1350 }
1351
1352 static void gen_set_sr(DisasContext *s, uint16_t insn, int ccr_only)
1353 {
1354 int src1;
1355 int reg;
1356
1357 s->cc_op = CC_OP_FLAGS;
1358 if ((insn & 0x38) == 0)
1359 {
1360 src1 = gen_new_qreg(QMODE_I32);
1361 reg = DREG(insn, 0);
1362 gen_op_and32(src1, reg, gen_im32(0xf));
1363 gen_op_logic_cc(src1);
1364 gen_op_shr32(src1, reg, gen_im32(4));
1365 gen_op_and32(src1, src1, gen_im32(1));
1366 gen_op_update_xflag_tst(src1);
1367 if (!ccr_only) {
1368 gen_op_and32(QREG_SR, reg, gen_im32(0xff00));
1369 }
1370 }
1371 else if ((insn & 0x3f) == 0x3c)
1372 {
1373 uint16_t val;
1374 val = lduw_code(s->pc);
1375 s->pc += 2;
1376 gen_set_sr_im(s, val, ccr_only);
1377 }
1378 else
1379 disas_undef(s, insn);
1380 }
1381
1382 DISAS_INSN(move_to_ccr)
1383 {
1384 gen_set_sr(s, insn, 1);
1385 }
1386
1387 DISAS_INSN(not)
1388 {
1389 int reg;
1390
1391 reg = DREG(insn, 0);
1392 gen_op_not32(reg, reg);
1393 gen_logic_cc(s, reg);
1394 }
1395
1396 DISAS_INSN(swap)
1397 {
1398 int dest;
1399 int src1;
1400 int src2;
1401 int reg;
1402
1403 dest = gen_new_qreg(QMODE_I32);
1404 src1 = gen_new_qreg(QMODE_I32);
1405 src2 = gen_new_qreg(QMODE_I32);
1406 reg = DREG(insn, 0);
1407 gen_op_shl32(src1, reg, gen_im32(16));
1408 gen_op_shr32(src2, reg, gen_im32(16));
1409 gen_op_or32(dest, src1, src2);
1410 gen_op_mov32(reg, dest);
1411 gen_logic_cc(s, dest);
1412 }
1413
1414 DISAS_INSN(pea)
1415 {
1416 int tmp;
1417
1418 tmp = gen_lea(s, insn, OS_LONG);
1419 if (tmp == -1) {
1420 gen_addr_fault(s);
1421 return;
1422 }
1423 gen_push(s, tmp);
1424 }
1425
1426 DISAS_INSN(ext)
1427 {
1428 int reg;
1429 int op;
1430 int tmp;
1431
1432 reg = DREG(insn, 0);
1433 op = (insn >> 6) & 7;
1434 tmp = gen_new_qreg(QMODE_I32);
1435 if (op == 3)
1436 gen_op_ext16s32(tmp, reg);
1437 else
1438 gen_op_ext8s32(tmp, reg);
1439 if (op == 2)
1440 gen_partset_reg(OS_WORD, reg, tmp);
1441 else
1442 gen_op_mov32(reg, tmp);
1443 gen_logic_cc(s, tmp);
1444 }
1445
1446 DISAS_INSN(tst)
1447 {
1448 int opsize;
1449 int tmp;
1450
1451 switch ((insn >> 6) & 3) {
1452 case 0: /* tst.b */
1453 opsize = OS_BYTE;
1454 break;
1455 case 1: /* tst.w */
1456 opsize = OS_WORD;
1457 break;
1458 case 2: /* tst.l */
1459 opsize = OS_LONG;
1460 break;
1461 default:
1462 abort();
1463 }
1464 SRC_EA(tmp, opsize, -1, NULL);
1465 gen_logic_cc(s, tmp);
1466 }
1467
1468 DISAS_INSN(pulse)
1469 {
1470 /* Implemented as a NOP. */
1471 }
1472
1473 DISAS_INSN(illegal)
1474 {
1475 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1476 }
1477
1478 /* ??? This should be atomic. */
1479 DISAS_INSN(tas)
1480 {
1481 int dest;
1482 int src1;
1483 int addr;
1484
1485 dest = gen_new_qreg(QMODE_I32);
1486 SRC_EA(src1, OS_BYTE, -1, &addr);
1487 gen_logic_cc(s, src1);
1488 gen_op_or32(dest, src1, gen_im32(0x80));
1489 DEST_EA(insn, OS_BYTE, dest, &addr);
1490 }
1491
1492 DISAS_INSN(mull)
1493 {
1494 uint16_t ext;
1495 int reg;
1496 int src1;
1497 int dest;
1498
1499 /* The upper 32 bits of the product are discarded, so
1500 muls.l and mulu.l are functionally equivalent. */
1501 ext = lduw_code(s->pc);
1502 s->pc += 2;
1503 if (ext & 0x87ff) {
1504 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1505 return;
1506 }
1507 reg = DREG(ext, 12);
1508 SRC_EA(src1, OS_LONG, 0, NULL);
1509 dest = gen_new_qreg(QMODE_I32);
1510 gen_op_mul32(dest, src1, reg);
1511 gen_op_mov32(reg, dest);
1512 /* Unlike m68k, coldfire always clears the overflow bit. */
1513 gen_logic_cc(s, dest);
1514 }
1515
1516 DISAS_INSN(link)
1517 {
1518 int16_t offset;
1519 int reg;
1520 int tmp;
1521
1522 offset = ldsw_code(s->pc);
1523 s->pc += 2;
1524 reg = AREG(insn, 0);
1525 tmp = gen_new_qreg(QMODE_I32);
1526 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
1527 gen_store(s, OS_LONG, tmp, reg);
1528 if (reg != QREG_SP)
1529 gen_op_mov32(reg, tmp);
1530 gen_op_add32(QREG_SP, tmp, gen_im32(offset));
1531 }
1532
1533 DISAS_INSN(unlk)
1534 {
1535 int src;
1536 int reg;
1537 int tmp;
1538
1539 src = gen_new_qreg(QMODE_I32);
1540 reg = AREG(insn, 0);
1541 gen_op_mov32(src, reg);
1542 tmp = gen_load(s, OS_LONG, src, 0);
1543 gen_op_mov32(reg, tmp);
1544 gen_op_add32(QREG_SP, src, gen_im32(4));
1545 }
1546
1547 DISAS_INSN(nop)
1548 {
1549 }
1550
1551 DISAS_INSN(rts)
1552 {
1553 int tmp;
1554
1555 tmp = gen_load(s, OS_LONG, QREG_SP, 0);
1556 gen_op_add32(QREG_SP, QREG_SP, gen_im32(4));
1557 gen_jmp(s, tmp);
1558 }
1559
1560 DISAS_INSN(jump)
1561 {
1562 int tmp;
1563
1564 /* Load the target address first to ensure correct exception
1565 behavior. */
1566 tmp = gen_lea(s, insn, OS_LONG);
1567 if (tmp == -1) {
1568 gen_addr_fault(s);
1569 return;
1570 }
1571 if ((insn & 0x40) == 0) {
1572 /* jsr */
1573 gen_push(s, gen_im32(s->pc));
1574 }
1575 gen_jmp(s, tmp);
1576 }
1577
1578 DISAS_INSN(addsubq)
1579 {
1580 int src1;
1581 int src2;
1582 int dest;
1583 int val;
1584 int addr;
1585
1586 SRC_EA(src1, OS_LONG, 0, &addr);
1587 val = (insn >> 9) & 7;
1588 if (val == 0)
1589 val = 8;
1590 src2 = gen_im32(val);
1591 dest = gen_new_qreg(QMODE_I32);
1592 gen_op_mov32(dest, src1);
1593 if ((insn & 0x38) == 0x08) {
1594 /* Don't update condition codes if the destination is an
1595 address register. */
1596 if (insn & 0x0100) {
1597 gen_op_sub32(dest, dest, src2);
1598 } else {
1599 gen_op_add32(dest, dest, src2);
1600 }
1601 } else {
1602 if (insn & 0x0100) {
1603 gen_op_update_xflag_lt(dest, src2);
1604 gen_op_sub32(dest, dest, src2);
1605 s->cc_op = CC_OP_SUB;
1606 } else {
1607 gen_op_add32(dest, dest, src2);
1608 gen_op_update_xflag_lt(dest, src2);
1609 s->cc_op = CC_OP_ADD;
1610 }
1611 gen_op_update_cc_add(dest, src2);
1612 }
1613 DEST_EA(insn, OS_LONG, dest, &addr);
1614 }
1615
1616 DISAS_INSN(tpf)
1617 {
1618 switch (insn & 7) {
1619 case 2: /* One extension word. */
1620 s->pc += 2;
1621 break;
1622 case 3: /* Two extension words. */
1623 s->pc += 4;
1624 break;
1625 case 4: /* No extension words. */
1626 break;
1627 default:
1628 disas_undef(s, insn);
1629 }
1630 }
1631
1632 DISAS_INSN(branch)
1633 {
1634 int32_t offset;
1635 uint32_t base;
1636 int op;
1637 int l1;
1638
1639 base = s->pc;
1640 op = (insn >> 8) & 0xf;
1641 offset = (int8_t)insn;
1642 if (offset == 0) {
1643 offset = ldsw_code(s->pc);
1644 s->pc += 2;
1645 } else if (offset == -1) {
1646 offset = read_im32(s);
1647 }
1648 if (op == 1) {
1649 /* bsr */
1650 gen_push(s, gen_im32(s->pc));
1651 }
1652 gen_flush_cc_op(s);
1653 if (op > 1) {
1654 /* Bcc */
1655 l1 = gen_new_label();
1656 gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1657 gen_jmp_tb(s, 1, base + offset);
1658 gen_set_label(l1);
1659 gen_jmp_tb(s, 0, s->pc);
1660 } else {
1661 /* Unconditional branch. */
1662 gen_jmp_tb(s, 0, base + offset);
1663 }
1664 }
1665
1666 DISAS_INSN(moveq)
1667 {
1668 int tmp;
1669
1670 tmp = gen_im32((int8_t)insn);
1671 gen_op_mov32(DREG(insn, 9), tmp);
1672 gen_logic_cc(s, tmp);
1673 }
1674
1675 DISAS_INSN(mvzs)
1676 {
1677 int opsize;
1678 int src;
1679 int reg;
1680
1681 if (insn & 0x40)
1682 opsize = OS_WORD;
1683 else
1684 opsize = OS_BYTE;
1685 SRC_EA(src, opsize, (insn & 0x80) ? 0 : -1, NULL);
1686 reg = DREG(insn, 9);
1687 gen_op_mov32(reg, src);
1688 gen_logic_cc(s, src);
1689 }
1690
1691 DISAS_INSN(or)
1692 {
1693 int reg;
1694 int dest;
1695 int src;
1696 int addr;
1697
1698 reg = DREG(insn, 9);
1699 dest = gen_new_qreg(QMODE_I32);
1700 if (insn & 0x100) {
1701 SRC_EA(src, OS_LONG, 0, &addr);
1702 gen_op_or32(dest, src, reg);
1703 DEST_EA(insn, OS_LONG, dest, &addr);
1704 } else {
1705 SRC_EA(src, OS_LONG, 0, NULL);
1706 gen_op_or32(dest, src, reg);
1707 gen_op_mov32(reg, dest);
1708 }
1709 gen_logic_cc(s, dest);
1710 }
1711
1712 DISAS_INSN(suba)
1713 {
1714 int src;
1715 int reg;
1716
1717 SRC_EA(src, OS_LONG, 0, NULL);
1718 reg = AREG(insn, 9);
1719 gen_op_sub32(reg, reg, src);
1720 }
1721
1722 DISAS_INSN(subx)
1723 {
1724 int reg;
1725 int src;
1726 int dest;
1727 int tmp;
1728
1729 gen_flush_flags(s);
1730 reg = DREG(insn, 9);
1731 src = DREG(insn, 0);
1732 dest = gen_new_qreg(QMODE_I32);
1733 gen_op_mov32 (dest, reg);
1734 gen_op_subx_cc(dest, src);
1735 /* !Z is sticky. */
1736 tmp = gen_new_qreg(QMODE_I32);
1737 gen_op_mov32 (tmp, QREG_CC_DEST);
1738 gen_op_update_cc_add(dest, src);
1739 gen_op_mov32(reg, dest);
1740 s->cc_op = CC_OP_DYNAMIC;
1741 gen_flush_flags(s);
1742 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1743 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1744 s->cc_op = CC_OP_FLAGS;
1745 }
1746
1747 DISAS_INSN(mov3q)
1748 {
1749 int src;
1750 int val;
1751
1752 val = (insn >> 9) & 7;
1753 if (val == 0)
1754 val = -1;
1755 src = gen_im32(val);
1756 gen_logic_cc(s, src);
1757 DEST_EA(insn, OS_LONG, src, NULL);
1758 }
1759
1760 DISAS_INSN(cmp)
1761 {
1762 int op;
1763 int src;
1764 int reg;
1765 int dest;
1766 int opsize;
1767
1768 op = (insn >> 6) & 3;
1769 switch (op) {
1770 case 0: /* cmp.b */
1771 opsize = OS_BYTE;
1772 s->cc_op = CC_OP_CMPB;
1773 break;
1774 case 1: /* cmp.w */
1775 opsize = OS_WORD;
1776 s->cc_op = CC_OP_CMPW;
1777 break;
1778 case 2: /* cmp.l */
1779 opsize = OS_LONG;
1780 s->cc_op = CC_OP_SUB;
1781 break;
1782 default:
1783 abort();
1784 }
1785 SRC_EA(src, opsize, -1, NULL);
1786 reg = DREG(insn, 9);
1787 dest = gen_new_qreg(QMODE_I32);
1788 gen_op_sub32(dest, reg, src);
1789 gen_op_update_cc_add(dest, src);
1790 }
1791
1792 DISAS_INSN(cmpa)
1793 {
1794 int opsize;
1795 int src;
1796 int reg;
1797 int dest;
1798
1799 if (insn & 0x100) {
1800 opsize = OS_LONG;
1801 } else {
1802 opsize = OS_WORD;
1803 }
1804 SRC_EA(src, opsize, -1, NULL);
1805 reg = AREG(insn, 9);
1806 dest = gen_new_qreg(QMODE_I32);
1807 gen_op_sub32(dest, reg, src);
1808 gen_op_update_cc_add(dest, src);
1809 s->cc_op = CC_OP_SUB;
1810 }
1811
1812 DISAS_INSN(eor)
1813 {
1814 int src;
1815 int reg;
1816 int dest;
1817 int addr;
1818
1819 SRC_EA(src, OS_LONG, 0, &addr);
1820 reg = DREG(insn, 9);
1821 dest = gen_new_qreg(QMODE_I32);
1822 gen_op_xor32(dest, src, reg);
1823 gen_logic_cc(s, dest);
1824 DEST_EA(insn, OS_LONG, dest, &addr);
1825 }
1826
1827 DISAS_INSN(and)
1828 {
1829 int src;
1830 int reg;
1831 int dest;
1832 int addr;
1833
1834 reg = DREG(insn, 9);
1835 dest = gen_new_qreg(QMODE_I32);
1836 if (insn & 0x100) {
1837 SRC_EA(src, OS_LONG, 0, &addr);
1838 gen_op_and32(dest, src, reg);
1839 DEST_EA(insn, OS_LONG, dest, &addr);
1840 } else {
1841 SRC_EA(src, OS_LONG, 0, NULL);
1842 gen_op_and32(dest, src, reg);
1843 gen_op_mov32(reg, dest);
1844 }
1845 gen_logic_cc(s, dest);
1846 }
1847
1848 DISAS_INSN(adda)
1849 {
1850 int src;
1851 int reg;
1852
1853 SRC_EA(src, OS_LONG, 0, NULL);
1854 reg = AREG(insn, 9);
1855 gen_op_add32(reg, reg, src);
1856 }
1857
1858 DISAS_INSN(addx)
1859 {
1860 int reg;
1861 int src;
1862 int dest;
1863 int tmp;
1864
1865 gen_flush_flags(s);
1866 reg = DREG(insn, 9);
1867 src = DREG(insn, 0);
1868 dest = gen_new_qreg(QMODE_I32);
1869 gen_op_mov32 (dest, reg);
1870 gen_op_addx_cc(dest, src);
1871 /* !Z is sticky. */
1872 tmp = gen_new_qreg(QMODE_I32);
1873 gen_op_mov32 (tmp, QREG_CC_DEST);
1874 gen_op_update_cc_add(dest, src);
1875 gen_op_mov32(reg, dest);
1876 s->cc_op = CC_OP_DYNAMIC;
1877 gen_flush_flags(s);
1878 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1879 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1880 s->cc_op = CC_OP_FLAGS;
1881 }
1882
1883 DISAS_INSN(shift_im)
1884 {
1885 int reg;
1886 int tmp;
1887
1888 reg = DREG(insn, 0);
1889 tmp = (insn >> 9) & 7;
1890 if (tmp == 0)
1891 tmp = 8;
1892 if (insn & 0x100) {
1893 gen_op_shl_im_cc(reg, tmp);
1894 s->cc_op = CC_OP_SHL;
1895 } else {
1896 if (insn & 8) {
1897 gen_op_shr_im_cc(reg, tmp);
1898 s->cc_op = CC_OP_SHR;
1899 } else {
1900 gen_op_sar_im_cc(reg, tmp);
1901 s->cc_op = CC_OP_SAR;
1902 }
1903 }
1904 }
1905
1906 DISAS_INSN(shift_reg)
1907 {
1908 int reg;
1909 int src;
1910 int tmp;
1911
1912 reg = DREG(insn, 0);
1913 src = DREG(insn, 9);
1914 tmp = gen_new_qreg(QMODE_I32);
1915 gen_op_and32(tmp, src, gen_im32(63));
1916 if (insn & 0x100) {
1917 gen_op_shl_cc(reg, tmp);
1918 s->cc_op = CC_OP_SHL;
1919 } else {
1920 if (insn & 8) {
1921 gen_op_shr_cc(reg, tmp);
1922 s->cc_op = CC_OP_SHR;
1923 } else {
1924 gen_op_sar_cc(reg, tmp);
1925 s->cc_op = CC_OP_SAR;
1926 }
1927 }
1928 }
1929
1930 DISAS_INSN(ff1)
1931 {
1932 cpu_abort(NULL, "Unimplemented insn: ff1");
1933 }
1934
1935 static int gen_get_sr(DisasContext *s)
1936 {
1937 int ccr;
1938 int sr;
1939
1940 ccr = gen_get_ccr(s);
1941 sr = gen_new_qreg(QMODE_I32);
1942 gen_op_and32(sr, QREG_SR, gen_im32(0xffe0));
1943 gen_op_or32(sr, sr, ccr);
1944 return sr;
1945 }
1946
1947 DISAS_INSN(strldsr)
1948 {
1949 uint16_t ext;
1950 uint32_t addr;
1951
1952 addr = s->pc - 2;
1953 ext = lduw_code(s->pc);
1954 s->pc += 2;
1955 if (ext != 0x46FC) {
1956 gen_exception(s, addr, EXCP_UNSUPPORTED);
1957 return;
1958 }
1959 ext = lduw_code(s->pc);
1960 s->pc += 2;
1961 if (IS_USER(s) || (ext & SR_S) == 0) {
1962 gen_exception(s, addr, EXCP_PRIVILEGE);
1963 return;
1964 }
1965 gen_push(s, gen_get_sr(s));
1966 gen_set_sr_im(s, ext, 0);
1967 }
1968
1969 DISAS_INSN(move_from_sr)
1970 {
1971 int reg;
1972 int sr;
1973
1974 if (IS_USER(s)) {
1975 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1976 return;
1977 }
1978 sr = gen_get_sr(s);
1979 reg = DREG(insn, 0);
1980 gen_partset_reg(OS_WORD, reg, sr);
1981 }
1982
1983 DISAS_INSN(move_to_sr)
1984 {
1985 if (IS_USER(s)) {
1986 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1987 return;
1988 }
1989 gen_set_sr(s, insn, 0);
1990 gen_lookup_tb(s);
1991 }
1992
1993 DISAS_INSN(move_from_usp)
1994 {
1995 if (IS_USER(s)) {
1996 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1997 return;
1998 }
1999 /* TODO: Implement USP. */
2000 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2001 }
2002
2003 DISAS_INSN(move_to_usp)
2004 {
2005 if (IS_USER(s)) {
2006 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2007 return;
2008 }
2009 /* TODO: Implement USP. */
2010 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
2011 }
2012
2013 DISAS_INSN(halt)
2014 {
2015 gen_jmp(s, gen_im32(s->pc));
2016 gen_op_halt();
2017 }
2018
2019 DISAS_INSN(stop)
2020 {
2021 uint16_t ext;
2022
2023 if (IS_USER(s)) {
2024 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2025 return;
2026 }
2027
2028 ext = lduw_code(s->pc);
2029 s->pc += 2;
2030
2031 gen_set_sr_im(s, ext, 0);
2032 gen_jmp(s, gen_im32(s->pc));
2033 gen_op_stop();
2034 }
2035
2036 DISAS_INSN(rte)
2037 {
2038 if (IS_USER(s)) {
2039 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2040 return;
2041 }
2042 gen_exception(s, s->pc - 2, EXCP_RTE);
2043 }
2044
2045 DISAS_INSN(movec)
2046 {
2047 uint16_t ext;
2048 int reg;
2049
2050 if (IS_USER(s)) {
2051 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2052 return;
2053 }
2054
2055 ext = lduw_code(s->pc);
2056 s->pc += 2;
2057
2058 if (ext & 0x8000) {
2059 reg = AREG(ext, 12);
2060 } else {
2061 reg = DREG(ext, 12);
2062 }
2063 gen_op_movec(gen_im32(ext & 0xfff), reg);
2064 gen_lookup_tb(s);
2065 }
2066
2067 DISAS_INSN(intouch)
2068 {
2069 if (IS_USER(s)) {
2070 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2071 return;
2072 }
2073 /* ICache fetch. Implement as no-op. */
2074 }
2075
2076 DISAS_INSN(cpushl)
2077 {
2078 if (IS_USER(s)) {
2079 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2080 return;
2081 }
2082 /* Cache push/invalidate. Implement as no-op. */
2083 }
2084
2085 DISAS_INSN(wddata)
2086 {
2087 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2088 }
2089
2090 DISAS_INSN(wdebug)
2091 {
2092 if (IS_USER(s)) {
2093 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
2094 return;
2095 }
2096 /* TODO: Implement wdebug. */
2097 qemu_assert(0, "WDEBUG not implemented");
2098 }
2099
2100 DISAS_INSN(trap)
2101 {
2102 gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
2103 }
2104
2105 /* ??? FP exceptions are not implemented. Most exceptions are deferred until
2106 immediately before the next FP instruction is executed. */
2107 DISAS_INSN(fpu)
2108 {
2109 uint16_t ext;
2110 int opmode;
2111 int src;
2112 int dest;
2113 int res;
2114 int round;
2115 int opsize;
2116
2117 ext = lduw_code(s->pc);
2118 s->pc += 2;
2119 opmode = ext & 0x7f;
2120 switch ((ext >> 13) & 7) {
2121 case 0: case 2:
2122 break;
2123 case 1:
2124 goto undef;
2125 case 3: /* fmove out */
2126 src = FREG(ext, 7);
2127 /* fmove */
2128 /* ??? TODO: Proper behavior on overflow. */
2129 switch ((ext >> 10) & 7) {
2130 case 0:
2131 opsize = OS_LONG;
2132 res = gen_new_qreg(QMODE_I32);
2133 gen_op_f64_to_i32(res, src);
2134 break;
2135 case 1:
2136 opsize = OS_SINGLE;
2137 res = gen_new_qreg(QMODE_F32);
2138 gen_op_f64_to_f32(res, src);
2139 break;
2140 case 4:
2141 opsize = OS_WORD;
2142 res = gen_new_qreg(QMODE_I32);
2143 gen_op_f64_to_i32(res, src);
2144 break;
2145 case 5:
2146 opsize = OS_DOUBLE;
2147 res = src;
2148 break;
2149 case 6:
2150 opsize = OS_BYTE;
2151 res = gen_new_qreg(QMODE_I32);
2152 gen_op_f64_to_i32(res, src);
2153 break;
2154 default:
2155 goto undef;
2156 }
2157 DEST_EA(insn, opsize, res, NULL);
2158 return;
2159 case 4: /* fmove to control register. */
2160 switch ((ext >> 10) & 7) {
2161 case 4: /* FPCR */
2162 /* Not implemented. Ignore writes. */
2163 break;
2164 case 1: /* FPIAR */
2165 case 2: /* FPSR */
2166 default:
2167 cpu_abort(NULL, "Unimplemented: fmove to control %d",
2168 (ext >> 10) & 7);
2169 }
2170 break;
2171 case 5: /* fmove from control register. */
2172 switch ((ext >> 10) & 7) {
2173 case 4: /* FPCR */
2174 /* Not implemented. Always return zero. */
2175 res = gen_im32(0);
2176 break;
2177 case 1: /* FPIAR */
2178 case 2: /* FPSR */
2179 default:
2180 cpu_abort(NULL, "Unimplemented: fmove from control %d",
2181 (ext >> 10) & 7);
2182 goto undef;
2183 }
2184 DEST_EA(insn, OS_LONG, res, NULL);
2185 break;
2186 case 6: /* fmovem */
2187 case 7:
2188 {
2189 int addr;
2190 uint16_t mask;
2191 if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
2192 goto undef;
2193 src = gen_lea(s, insn, OS_LONG);
2194 if (src == -1) {
2195 gen_addr_fault(s);
2196 return;
2197 }
2198 addr = gen_new_qreg(QMODE_I32);
2199 gen_op_mov32(addr, src);
2200 mask = 0x80;
2201 dest = QREG_F0;
2202 while (mask) {
2203 if (ext & mask) {
2204 if (ext & (1 << 13)) {
2205 /* store */
2206 gen_st(s, f64, addr, dest);
2207 } else {
2208 /* load */
2209 gen_ld(s, f64, dest, addr);
2210 }
2211 if (ext & (mask - 1))
2212 gen_op_add32(addr, addr, gen_im32(8));
2213 }
2214 mask >>= 1;
2215 dest++;
2216 }
2217 }
2218 return;
2219 }
2220 if (ext & (1 << 14)) {
2221 int tmp;
2222
2223 /* Source effective address. */
2224 switch ((ext >> 10) & 7) {
2225 case 0: opsize = OS_LONG; break;
2226 case 1: opsize = OS_SINGLE; break;
2227 case 4: opsize = OS_WORD; break;
2228 case 5: opsize = OS_DOUBLE; break;
2229 case 6: opsize = OS_BYTE; break;
2230 default:
2231 goto undef;
2232 }
2233 SRC_EA(tmp, opsize, -1, NULL);
2234 if (opsize == OS_DOUBLE) {
2235 src = tmp;
2236 } else {
2237 src = gen_new_qreg(QMODE_F64);
2238 switch (opsize) {
2239 case OS_LONG:
2240 case OS_WORD:
2241 case OS_BYTE:
2242 gen_op_i32_to_f64(src, tmp);
2243 break;
2244 case OS_SINGLE:
2245 gen_op_f32_to_f64(src, tmp);
2246 break;
2247 }
2248 }
2249 } else {
2250 /* Source register. */
2251 src = FREG(ext, 10);
2252 }
2253 dest = FREG(ext, 7);
2254 res = gen_new_qreg(QMODE_F64);
2255 if (opmode != 0x3a)
2256 gen_op_movf64(res, dest);
2257 round = 1;
2258 switch (opmode) {
2259 case 0: case 0x40: case 0x44: /* fmove */
2260 gen_op_movf64(res, src);
2261 break;
2262 case 1: /* fint */
2263 gen_op_iround_f64(res, src);
2264 round = 0;
2265 break;
2266 case 3: /* fintrz */
2267 gen_op_itrunc_f64(res, src);
2268 round = 0;
2269 break;
2270 case 4: case 0x41: case 0x45: /* fsqrt */
2271 gen_op_sqrtf64(res, src);
2272 break;
2273 case 0x18: case 0x58: case 0x5c: /* fabs */
2274 gen_op_absf64(res, src);
2275 break;
2276 case 0x1a: case 0x5a: case 0x5e: /* fneg */
2277 gen_op_chsf64(res, src);
2278 break;
2279 case 0x20: case 0x60: case 0x64: /* fdiv */
2280 gen_op_divf64(res, res, src);
2281 break;
2282 case 0x22: case 0x62: case 0x66: /* fadd */
2283 gen_op_addf64(res, res, src);
2284 break;
2285 case 0x23: case 0x63: case 0x67: /* fmul */
2286 gen_op_mulf64(res, res, src);
2287 break;
2288 case 0x28: case 0x68: case 0x6c: /* fsub */
2289 gen_op_subf64(res, res, src);
2290 break;
2291 case 0x38: /* fcmp */
2292 gen_op_sub_cmpf64(res, res, src);
2293 dest = 0;
2294 round = 0;
2295 break;
2296 case 0x3a: /* ftst */
2297 gen_op_movf64(res, src);
2298 dest = 0;
2299 round = 0;
2300 break;
2301 default:
2302 goto undef;
2303 }
2304 if (round) {
2305 if (opmode & 0x40) {
2306 if ((opmode & 0x4) != 0)
2307 round = 0;
2308 } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2309 round = 0;
2310 }
2311 }
2312 if (round) {
2313 int tmp;
2314
2315 tmp = gen_new_qreg(QMODE_F32);
2316 gen_op_f64_to_f32(tmp, res);
2317 gen_op_f32_to_f64(res, tmp);
2318 }
2319 gen_op_fp_result(res);
2320 if (dest) {
2321 gen_op_movf64(dest, res);
2322 }
2323 return;
2324 undef:
2325 s->pc -= 2;
2326 disas_undef_fpu(s, insn);
2327 }
2328
2329 DISAS_INSN(fbcc)
2330 {
2331 uint32_t offset;
2332 uint32_t addr;
2333 int flag;
2334 int zero;
2335 int l1;
2336
2337 addr = s->pc;
2338 offset = ldsw_code(s->pc);
2339 s->pc += 2;
2340 if (insn & (1 << 6)) {
2341 offset = (offset << 16) | lduw_code(s->pc);
2342 s->pc += 2;
2343 }
2344
2345 l1 = gen_new_label();
2346 /* TODO: Raise BSUN exception. */
2347 flag = gen_new_qreg(QMODE_I32);
2348 zero = gen_new_qreg(QMODE_F64);
2349 gen_op_zerof64(zero);
2350 gen_op_compare_quietf64(flag, QREG_FP_RESULT, zero);
2351 /* Jump to l1 if condition is true. */
2352 switch (insn & 0xf) {
2353 case 0: /* f */
2354 break;
2355 case 1: /* eq (=0) */
2356 gen_op_jmp_z32(flag, l1);
2357 break;
2358 case 2: /* ogt (=1) */
2359 gen_op_sub32(flag, flag, gen_im32(1));
2360 gen_op_jmp_z32(flag, l1);
2361 break;
2362 case 3: /* oge (=0 or =1) */
2363 gen_op_jmp_z32(flag, l1);
2364 gen_op_sub32(flag, flag, gen_im32(1));
2365 gen_op_jmp_z32(flag, l1);
2366 break;
2367 case 4: /* olt (=-1) */
2368 gen_op_jmp_s32(flag, l1);
2369 break;
2370 case 5: /* ole (=-1 or =0) */
2371 gen_op_jmp_s32(flag, l1);
2372 gen_op_jmp_z32(flag, l1);
2373 break;
2374 case 6: /* ogl (=-1 or =1) */
2375 gen_op_jmp_s32(flag, l1);
2376 gen_op_sub32(flag, flag, gen_im32(1));
2377 gen_op_jmp_z32(flag, l1);
2378 break;
2379 case 7: /* or (=2) */
2380 gen_op_sub32(flag, flag, gen_im32(2));
2381 gen_op_jmp_z32(flag, l1);
2382 break;
2383 case 8: /* un (<2) */
2384 gen_op_sub32(flag, flag, gen_im32(2));
2385 gen_op_jmp_s32(flag, l1);
2386 break;
2387 case 9: /* ueq (=0 or =2) */
2388 gen_op_jmp_z32(flag, l1);
2389 gen_op_sub32(flag, flag, gen_im32(2));
2390 gen_op_jmp_z32(flag, l1);
2391 break;
2392 case 10: /* ugt (>0) */
2393 /* ??? Add jmp_gtu. */
2394 gen_op_sub32(flag, flag, gen_im32(1));
2395 gen_op_jmp_ns32(flag, l1);
2396 break;
2397 case 11: /* uge (>=0) */
2398 gen_op_jmp_ns32(flag, l1);
2399 break;
2400 case 12: /* ult (=-1 or =2) */
2401 gen_op_jmp_s32(flag, l1);
2402 gen_op_sub32(flag, flag, gen_im32(2));
2403 gen_op_jmp_z32(flag, l1);
2404 break;
2405 case 13: /* ule (!=1) */
2406 gen_op_sub32(flag, flag, gen_im32(1));
2407 gen_op_jmp_nz32(flag, l1);
2408 break;
2409 case 14: /* ne (!=0) */
2410 gen_op_jmp_nz32(flag, l1);
2411 break;
2412 case 15: /* t */
2413 gen_op_mov32(flag, gen_im32(1));
2414 break;
2415 }
2416 gen_jmp_tb(s, 0, s->pc);
2417 gen_set_label(l1);
2418 gen_jmp_tb(s, 1, addr + offset);
2419 }
2420
2421 DISAS_INSN(frestore)
2422 {
2423 /* TODO: Implement frestore. */
2424 qemu_assert(0, "FRESTORE not implemented");
2425 }
2426
2427 DISAS_INSN(fsave)
2428 {
2429 /* TODO: Implement fsave. */
2430 qemu_assert(0, "FSAVE not implemented");
2431 }
2432
2433 static disas_proc opcode_table[65536];
2434
2435 static void
2436 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2437 {
2438 int i;
2439 int from;
2440 int to;
2441
2442 /* Sanity check. All set bits must be included in the mask. */
2443 if (opcode & ~mask) {
2444 fprintf(stderr,
2445 "qemu internal error: bogus opcode definition %04x/%04x\n",
2446 opcode, mask);
2447 abort();
2448 }
2449 /* This could probably be cleverer. For now just optimize the case where
2450 the top bits are known. */
2451 /* Find the first zero bit in the mask. */
2452 i = 0x8000;
2453 while ((i & mask) != 0)
2454 i >>= 1;
2455 /* Iterate over all combinations of this and lower bits. */
2456 if (i == 0)
2457 i = 1;
2458 else
2459 i <<= 1;
2460 from = opcode & ~(i - 1);
2461 to = from + i;
2462 for (i = from; i < to; i++) {
2463 if ((i & mask) == opcode)
2464 opcode_table[i] = proc;
2465 }
2466 }
2467
2468 /* Register m68k opcode handlers. Order is important.
2469 Later insn override earlier ones. */
2470 void register_m68k_insns (CPUM68KState *env)
2471 {
2472 #define INSN(name, opcode, mask, feature) \
2473 if (m68k_feature(env, M68K_FEATURE_##feature)) \
2474 register_opcode(disas_##name, 0x##opcode, 0x##mask)
2475 INSN(undef, 0000, 0000, CF_ISA_A);
2476 INSN(arith_im, 0080, fff8, CF_ISA_A);
2477 INSN(bitrev, 00c0, fff8, CF_ISA_C);
2478 INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
2479 INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
2480 INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
2481 INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
2482 INSN(arith_im, 0280, fff8, CF_ISA_A);
2483 INSN(byterev, 02c0, fff8, CF_ISA_A);
2484 INSN(arith_im, 0480, fff8, CF_ISA_A);
2485 INSN(ff1, 04c0, fff8, CF_ISA_C);
2486 INSN(arith_im, 0680, fff8, CF_ISA_A);
2487 INSN(bitop_im, 0800, ffc0, CF_ISA_A);
2488 INSN(bitop_im, 0840, ffc0, CF_ISA_A);
2489 INSN(bitop_im, 0880, ffc0, CF_ISA_A);
2490 INSN(bitop_im, 08c0, ffc0, CF_ISA_A);
2491 INSN(arith_im, 0a80, fff8, CF_ISA_A);
2492 INSN(arith_im, 0c00, ff38, CF_ISA_A);
2493 INSN(move, 1000, f000, CF_ISA_A);
2494 INSN(move, 2000, f000, CF_ISA_A);
2495 INSN(move, 3000, f000, CF_ISA_A);
2496 INSN(strldsr, 40e7, ffff, CF_ISA_A);
2497 INSN(negx, 4080, fff8, CF_ISA_A);
2498 INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
2499 INSN(lea, 41c0, f1c0, CF_ISA_A);
2500 INSN(clr, 4200, ff00, CF_ISA_A);
2501 INSN(undef, 42c0, ffc0, CF_ISA_A);
2502 INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
2503 INSN(neg, 4480, fff8, CF_ISA_A);
2504 INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
2505 INSN(not, 4680, fff8, CF_ISA_A);
2506 INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
2507 INSN(pea, 4840, ffc0, CF_ISA_A);
2508 INSN(swap, 4840, fff8, CF_ISA_A);
2509 INSN(movem, 48c0, fbc0, CF_ISA_A);
2510 INSN(ext, 4880, fff8, CF_ISA_A);
2511 INSN(ext, 48c0, fff8, CF_ISA_A);
2512 INSN(ext, 49c0, fff8, CF_ISA_A);
2513 INSN(tst, 4a00, ff00, CF_ISA_A);
2514 INSN(tas, 4ac0, ffc0, CF_ISA_B);
2515 INSN(halt, 4ac8, ffff, CF_ISA_A);
2516 INSN(pulse, 4acc, ffff, CF_ISA_A);
2517 INSN(illegal, 4afc, ffff, CF_ISA_A);
2518 INSN(mull, 4c00, ffc0, CF_ISA_A);
2519 INSN(divl, 4c40, ffc0, CF_ISA_A);
2520 INSN(sats, 4c80, fff8, CF_ISA_B);
2521 INSN(trap, 4e40, fff0, CF_ISA_A);
2522 INSN(link, 4e50, fff8, CF_ISA_A);
2523 INSN(unlk, 4e58, fff8, CF_ISA_A);
2524 INSN(move_to_usp, 4e60, fff8, CF_ISA_B);
2525 INSN(move_from_usp, 4e68, fff8, CF_ISA_B);
2526 INSN(nop, 4e71, ffff, CF_ISA_A);
2527 INSN(stop, 4e72, ffff, CF_ISA_A);
2528 INSN(rte, 4e73, ffff, CF_ISA_A);
2529 INSN(rts, 4e75, ffff, CF_ISA_A);
2530 INSN(movec, 4e7b, ffff, CF_ISA_A);
2531 INSN(jump, 4e80, ffc0, CF_ISA_A);
2532 INSN(jump, 4ec0, ffc0, CF_ISA_A);
2533 INSN(addsubq, 5180, f1c0, CF_ISA_A);
2534 INSN(scc, 50c0, f0f8, CF_ISA_A);
2535 INSN(addsubq, 5080, f1c0, CF_ISA_A);
2536 INSN(tpf, 51f8, fff8, CF_ISA_A);
2537 INSN(branch, 6000, f000, CF_ISA_A);
2538 INSN(moveq, 7000, f100, CF_ISA_A);
2539 INSN(mvzs, 7100, f100, CF_ISA_B);
2540 INSN(or, 8000, f000, CF_ISA_A);
2541 INSN(divw, 80c0, f0c0, CF_ISA_A);
2542 INSN(addsub, 9000, f000, CF_ISA_A);
2543 INSN(subx, 9180, f1f8, CF_ISA_A);
2544 INSN(suba, 91c0, f1c0, CF_ISA_A);
2545 INSN(undef_mac, a000, f000, CF_ISA_A);
2546 INSN(mov3q, a140, f1c0, CF_ISA_B);
2547 INSN(cmp, b000, f1c0, CF_ISA_B); /* cmp.b */
2548 INSN(cmp, b040, f1c0, CF_ISA_B); /* cmp.w */
2549 INSN(cmpa, b0c0, f1c0, CF_ISA_B); /* cmpa.w */
2550 INSN(cmp, b080, f1c0, CF_ISA_A);
2551 INSN(cmpa, b1c0, f1c0, CF_ISA_A);
2552 INSN(eor, b180, f1c0, CF_ISA_A);
2553 INSN(and, c000, f000, CF_ISA_A);
2554 INSN(mulw, c0c0, f0c0, CF_ISA_A);
2555 INSN(addsub, d000, f000, CF_ISA_A);
2556 INSN(addx, d180, f1f8, CF_ISA_A);
2557 INSN(adda, d1c0, f1c0, CF_ISA_A);
2558 INSN(shift_im, e080, f0f0, CF_ISA_A);
2559 INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
2560 INSN(undef_fpu, f000, f000, CF_ISA_A);
2561 INSN(fpu, f200, ffc0, CF_FPU);
2562 INSN(fbcc, f280, ffc0, CF_FPU);
2563 INSN(frestore, f340, ffc0, CF_FPU);
2564 INSN(fsave, f340, ffc0, CF_FPU);
2565 INSN(intouch, f340, ffc0, CF_ISA_A);
2566 INSN(cpushl, f428, ff38, CF_ISA_A);
2567 INSN(wddata, fb00, ff00, CF_ISA_A);
2568 INSN(wdebug, fbc0, ffc0, CF_ISA_A);
2569 #undef INSN
2570 }
2571
2572 /* ??? Some of this implementation is not exception safe. We should always
2573 write back the result to memory before setting the condition codes. */
2574 static void disas_m68k_insn(CPUState * env, DisasContext *s)
2575 {
2576 uint16_t insn;
2577
2578 insn = lduw_code(s->pc);
2579 s->pc += 2;
2580
2581 opcode_table[insn](s, insn);
2582 }
2583
2584 #if 0
2585 /* Save the result of a floating point operation. */
2586 static void expand_op_fp_result(qOP *qop)
2587 {
2588 gen_op_movf64(QREG_FP_RESULT, qop->args[0]);
2589 }
2590
2591 /* Dummy op to indicate that the flags have been set. */
2592 static void expand_op_flags_set(qOP *qop)
2593 {
2594 }
2595
2596 /* Convert the confition codes into CC_OP_FLAGS format. */
2597 static void expand_op_flush_flags(qOP *qop)
2598 {
2599 int cc_opreg;
2600
2601 if (qop->args[0] == CC_OP_DYNAMIC)
2602 cc_opreg = QREG_CC_OP;
2603 else
2604 cc_opreg = gen_im32(qop->args[0]);
2605 gen_op_helper32(QREG_NULL, cc_opreg, HELPER_flush_flags);
2606 }
2607
2608 /* Set CC_DEST after a logical or direct flag setting operation. */
2609 static void expand_op_logic_cc(qOP *qop)
2610 {
2611 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2612 }
2613
2614 /* Set CC_SRC and CC_DEST after an arithmetic operation. */
2615 static void expand_op_update_cc_add(qOP *qop)
2616 {
2617 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2618 gen_op_mov32(QREG_CC_SRC, qop->args[1]);
2619 }
2620
2621 /* Update the X flag. */
2622 static void expand_op_update_xflag(qOP *qop)
2623 {
2624 int arg0;
2625 int arg1;
2626
2627 arg0 = qop->args[0];
2628 arg1 = qop->args[1];
2629 if (arg1 == QREG_NULL) {
2630 /* CC_X = arg0. */
2631 gen_op_mov32(QREG_CC_X, arg0);
2632 } else {
2633 /* CC_X = arg0 < (unsigned)arg1. */
2634 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2635 }
2636 }
2637
2638 /* Set arg0 to the contents of the X flag. */
2639 static void expand_op_get_xflag(qOP *qop)
2640 {
2641 gen_op_mov32(qop->args[0], QREG_CC_X);
2642 }
2643
2644 /* Expand a shift by immediate. The ISA only allows shifts by 1-8, so we
2645 already know the shift is within range. */
2646 static inline void expand_shift_im(qOP *qop, int right, int arith)
2647 {
2648 int val;
2649 int reg;
2650 int tmp;
2651 int im;
2652
2653 reg = qop->args[0];
2654 im = qop->args[1];
2655 tmp = gen_im32(im);
2656 val = gen_new_qreg(QMODE_I32);
2657 gen_op_mov32(val, reg);
2658 gen_op_mov32(QREG_CC_DEST, val);
2659 gen_op_mov32(QREG_CC_SRC, tmp);
2660 if (right) {
2661 if (arith) {
2662 gen_op_sar32(reg, val, tmp);
2663 } else {
2664 gen_op_shr32(reg, val, tmp);
2665 }
2666 if (im == 1)
2667 tmp = QREG_NULL;
2668 else
2669 tmp = gen_im32(im - 1);
2670 } else {
2671 gen_op_shl32(reg, val, tmp);
2672 tmp = gen_im32(32 - im);
2673 }
2674 if (tmp != QREG_NULL)
2675 gen_op_shr32(val, val, tmp);
2676 gen_op_and32(QREG_CC_X, val, gen_im32(1));
2677 }
2678
2679 static void expand_op_shl_im_cc(qOP *qop)
2680 {
2681 expand_shift_im(qop, 0, 0);
2682 }
2683
2684 static void expand_op_shr_im_cc(qOP *qop)
2685 {
2686 expand_shift_im(qop, 1, 0);
2687 }
2688
2689 static void expand_op_sar_im_cc(qOP *qop)
2690 {
2691 expand_shift_im(qop, 1, 1);
2692 }
2693
2694 /* Expand a shift by register. */
2695 /* ??? This gives incorrect answers for shifts by 0 or >= 32 */
2696 static inline void expand_shift_reg(qOP *qop, int right, int arith)
2697 {
2698 int val;
2699 int reg;
2700 int shift;
2701 int tmp;
2702
2703 reg = qop->args[0];
2704 shift = qop->args[1];
2705 val = gen_new_qreg(QMODE_I32);
2706 gen_op_mov32(val, reg);
2707 gen_op_mov32(QREG_CC_DEST, val);
2708 gen_op_mov32(QREG_CC_SRC, shift);
2709 tmp = gen_new_qreg(QMODE_I32);
2710 if (right) {
2711 if (arith) {
2712 gen_op_sar32(reg, val, shift);
2713 } else {
2714 gen_op_shr32(reg, val, shift);
2715 }
2716 gen_op_sub32(tmp, shift, gen_im32(1));
2717 } else {
2718 gen_op_shl32(reg, val, shift);
2719 gen_op_sub32(tmp, gen_im32(31), shift);
2720 }
2721 gen_op_shl32(val, val, tmp);
2722 gen_op_and32(QREG_CC_X, val, gen_im32(1));
2723 }
2724
2725 static void expand_op_shl_cc(qOP *qop)
2726 {
2727 expand_shift_reg(qop, 0, 0);
2728 }
2729
2730 static void expand_op_shr_cc(qOP *qop)
2731 {
2732 expand_shift_reg(qop, 1, 0);
2733 }
2734
2735 static void expand_op_sar_cc(qOP *qop)
2736 {
2737 expand_shift_reg(qop, 1, 1);
2738 }
2739
2740 /* Set the Z flag to (arg0 & arg1) == 0. */
2741 static void expand_op_btest(qOP *qop)
2742 {
2743 int tmp;
2744 int l1;
2745
2746 l1 = gen_new_label();
2747 tmp = gen_new_qreg(QMODE_I32);
2748 gen_op_and32(tmp, qop->args[0], qop->args[1]);
2749 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(~(uint32_t)CCF_Z));
2750 gen_op_jmp_nz32(tmp, l1);
2751 gen_op_or32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(CCF_Z));
2752 gen_op_label(l1);
2753 }
2754
2755 /* arg0 += arg1 + CC_X */
2756 static void expand_op_addx_cc(qOP *qop)
2757 {
2758 int arg0 = qop->args[0];
2759 int arg1 = qop->args[1];
2760 int l1, l2;
2761
2762 gen_op_add32 (arg0, arg0, arg1);
2763 l1 = gen_new_label();
2764 l2 = gen_new_label();
2765 gen_op_jmp_z32(QREG_CC_X, l1);
2766 gen_op_add32(arg0, arg0, gen_im32(1));
2767 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADDX));
2768 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
2769 gen_op_jmp(l2);
2770 gen_set_label(l1);
2771 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADD));
2772 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2773 gen_set_label(l2);
2774 }
2775
2776 /* arg0 -= arg1 + CC_X */
2777 static void expand_op_subx_cc(qOP *qop)
2778 {
2779 int arg0 = qop->args[0];
2780 int arg1 = qop->args[1];
2781 int l1, l2;
2782
2783 l1 = gen_new_label();
2784 l2 = gen_new_label();
2785 gen_op_jmp_z32(QREG_CC_X, l1);
2786 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
2787 gen_op_sub32(arg0, arg0, gen_im32(1));
2788 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUBX));
2789 gen_op_jmp(l2);
2790 gen_set_label(l1);
2791 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2792 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUB));
2793 gen_set_label(l2);
2794 gen_op_sub32 (arg0, arg0, arg1);
2795 }
2796
2797 /* Expand target specific ops to generic qops. */
2798 static void expand_target_qops(void)
2799 {
2800 qOP *qop;
2801 qOP *next;
2802 int c;
2803
2804 /* Copy the list of qops, expanding target specific ops as we go. */
2805 qop = gen_first_qop;
2806 gen_first_qop = NULL;
2807 gen_last_qop = NULL;
2808 for (; qop; qop = next) {
2809 c = qop->opcode;
2810 next = qop->next;
2811 if (c < FIRST_TARGET_OP) {
2812 qop->prev = gen_last_qop;
2813 qop->next = NULL;
2814 if (gen_last_qop)
2815 gen_last_qop->next = qop;
2816 else
2817 gen_first_qop = qop;
2818 gen_last_qop = qop;
2819 continue;
2820 }
2821 switch (c) {
2822 #define DEF(name, nargs, barrier) \
2823 case INDEX_op_##name: \
2824 expand_op_##name(qop); \
2825 break;
2826 #include "qop-target.def"
2827 #undef DEF
2828 default:
2829 cpu_abort(NULL, "Unexpanded target qop");
2830 }
2831 }
2832 }
2833
2834 /* ??? Implement this. */
2835 static void
2836 optimize_flags(void)
2837 {
2838 }
2839 #endif
2840
2841 /* generate intermediate code for basic block 'tb'. */
2842 static inline int
2843 gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2844 int search_pc)
2845 {
2846 DisasContext dc1, *dc = &dc1;
2847 uint16_t *gen_opc_end;
2848 int j, lj;
2849 target_ulong pc_start;
2850 int pc_offset;
2851 int last_cc_op;
2852
2853 /* generate intermediate code */
2854 pc_start = tb->pc;
2855
2856 dc->tb = tb;
2857
2858 gen_opc_ptr = gen_opc_buf;
2859 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2860 gen_opparam_ptr = gen_opparam_buf;
2861
2862 dc->env = env;
2863 dc->is_jmp = DISAS_NEXT;
2864 dc->pc = pc_start;
2865 dc->cc_op = CC_OP_DYNAMIC;
2866 dc->singlestep_enabled = env->singlestep_enabled;
2867 dc->fpcr = env->fpcr;
2868 dc->user = (env->sr & SR_S) == 0;
2869 nb_gen_labels = 0;
2870 lj = -1;
2871 do {
2872 free_qreg = 0;
2873 pc_offset = dc->pc - pc_start;
2874 gen_throws_exception = NULL;
2875 if (env->nb_breakpoints > 0) {
2876 for(j = 0; j < env->nb_breakpoints; j++) {
2877 if (env->breakpoints[j] == dc->pc) {
2878 gen_exception(dc, dc->pc, EXCP_DEBUG);
2879 dc->is_jmp = DISAS_JUMP;
2880 break;
2881 }
2882 }
2883 if (dc->is_jmp)
2884 break;
2885 }
2886 if (search_pc) {
2887 j = gen_opc_ptr - gen_opc_buf;
2888 if (lj < j) {
2889 lj++;
2890 while (lj < j)
2891 gen_opc_instr_start[lj++] = 0;
2892 }
2893 gen_opc_pc[lj] = dc->pc;
2894 gen_opc_instr_start[lj] = 1;
2895 }
2896 last_cc_op = dc->cc_op;
2897 dc->insn_pc = dc->pc;
2898 disas_m68k_insn(env, dc);
2899 } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
2900 !env->singlestep_enabled &&
2901 (pc_offset) < (TARGET_PAGE_SIZE - 32));
2902
2903 if (__builtin_expect(env->singlestep_enabled, 0)) {
2904 /* Make sure the pc is updated, and raise a debug exception. */
2905 if (!dc->is_jmp) {
2906 gen_flush_cc_op(dc);
2907 gen_op_mov32(QREG_PC, gen_im32((long)dc->pc));
2908 }
2909 gen_op_raise_exception(EXCP_DEBUG);
2910 } else {
2911 switch(dc->is_jmp) {
2912 case DISAS_NEXT:
2913 gen_flush_cc_op(dc);
2914 gen_jmp_tb(dc, 0, dc->pc);
2915 break;
2916 default:
2917 case DISAS_JUMP:
2918 case DISAS_UPDATE:
2919 gen_flush_cc_op(dc);
2920 /* indicate that the hash table must be used to find the next TB */
2921 gen_op_mov32(QREG_T0, gen_im32(0));
2922 gen_op_exit_tb();
2923 break;
2924 case DISAS_TB_JUMP:
2925 /* nothing more to generate */
2926 break;
2927 }
2928 }
2929 *gen_opc_ptr = INDEX_op_end;
2930
2931 #ifdef DEBUG_DISAS
2932 if (loglevel & CPU_LOG_TB_IN_ASM) {
2933 fprintf(logfile, "----------------\n");
2934 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2935 target_disas(logfile, pc_start, dc->pc - pc_start, 0);
2936 fprintf(logfile, "\n");
2937 if (loglevel & (CPU_LOG_TB_OP)) {
2938 fprintf(logfile, "OP:\n");
2939 dump_ops(gen_opc_buf, gen_opparam_buf);
2940 fprintf(logfile, "\n");
2941 }
2942 }
2943 #endif
2944 if (search_pc) {
2945 j = gen_opc_ptr - gen_opc_buf;
2946 lj++;
2947 while (lj <= j)
2948 gen_opc_instr_start[lj++] = 0;
2949 tb->size = 0;
2950 } else {
2951 tb->size = dc->pc - pc_start;
2952 }
2953
2954 //optimize_flags();
2955 //expand_target_qops();
2956 return 0;
2957 }
2958
2959 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
2960 {
2961 return gen_intermediate_code_internal(env, tb, 0);
2962 }
2963
2964 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
2965 {
2966 return gen_intermediate_code_internal(env, tb, 1);
2967 }
2968
2969 void cpu_reset(CPUM68KState *env)
2970 {
2971 memset(env, 0, offsetof(CPUM68KState, breakpoints));
2972 #if !defined (CONFIG_USER_ONLY)
2973 env->sr = 0x2700;
2974 #endif
2975 /* ??? FP regs should be initialized to NaN. */
2976 env->cc_op = CC_OP_FLAGS;
2977 /* TODO: We should set PC from the interrupt vector. */
2978 env->pc = 0;
2979 tlb_flush(env, 1);
2980 }
2981
2982 CPUM68KState *cpu_m68k_init(void)
2983 {
2984 CPUM68KState *env;
2985
2986 env = malloc(sizeof(CPUM68KState));
2987 if (!env)
2988 return NULL;
2989 cpu_exec_init(env);
2990
2991 cpu_reset(env);
2992 return env;
2993 }
2994
2995 void cpu_m68k_close(CPUM68KState *env)
2996 {
2997 free(env);
2998 }
2999
3000 void cpu_dump_state(CPUState *env, FILE *f,
3001 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3002 int flags)
3003 {
3004 int i;
3005 uint16_t sr;
3006 CPU_DoubleU u;
3007 for (i = 0; i < 8; i++)
3008 {
3009 u.d = env->fregs[i];
3010 cpu_fprintf (f, "D%d = %08x A%d = %08x F%d = %08x%08x (%12g)\n",
3011 i, env->dregs[i], i, env->aregs[i],
3012 i, u.l.upper, u.l.lower, u.d);
3013 }
3014 cpu_fprintf (f, "PC = %08x ", env->pc);
3015 sr = env->sr;
3016 cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
3017 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
3018 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
3019 cpu_fprintf (f, "FPRESULT = %12g\n", env->fp_result);
3020 }
3021