Add qemu 2.4.0
[kvmfornfv.git] / qemu / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "exec/helper-proto.h"
30 #include "tcg-op.h"
31 #include "exec/cpu_ldst.h"
32
33 #include "exec/helper-gen.h"
34
35 #include "trace-tcg.h"
36
37
38 #define DEBUG_DISAS
39
40 #define DYNAMIC_PC  1 /* dynamic pc value */
41 #define JUMP_PC     2 /* dynamic pc value which takes only two values
42                          according to jump_pc[T2] */
43
44 /* global register indexes */
45 static TCGv_ptr cpu_env, cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 static TCGv_i32 cpu_softint;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66
67 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
68 static target_ulong gen_opc_jump_pc[2];
69
70 #include "exec/gen-icount.h"
71
72 typedef struct DisasContext {
73     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
74     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
76     int is_br;
77     int mem_idx;
78     int fpu_enabled;
79     int address_mask_32bit;
80     int singlestep;
81     uint32_t cc_op;  /* current CC operation */
82     struct TranslationBlock *tb;
83     sparc_def_t *def;
84     TCGv_i32 t32[3];
85     TCGv ttl[5];
86     int n_t32;
87     int n_ttl;
88 } DisasContext;
89
90 typedef struct {
91     TCGCond cond;
92     bool is_bool;
93     bool g1, g2;
94     TCGv c1, c2;
95 } DisasCompare;
96
97 // This function uses non-native bit order
98 #define GET_FIELD(X, FROM, TO)                                  \
99     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100
101 // This function uses the order in the manuals, i.e. bit 0 is 2^0
102 #define GET_FIELD_SP(X, FROM, TO)               \
103     GET_FIELD(X, 31 - (TO), 31 - (FROM))
104
105 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
106 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107
108 #ifdef TARGET_SPARC64
109 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
110 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #else
112 #define DFPREG(r) (r & 0x1e)
113 #define QFPREG(r) (r & 0x1c)
114 #endif
115
116 #define UA2005_HTRAP_MASK 0xff
117 #define V8_TRAP_MASK 0x7f
118
119 static int sign_extend(int x, int len)
120 {
121     len = 32 - len;
122     return (x << len) >> len;
123 }
124
125 #define IS_IMM (insn & (1<<13))
126
127 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
128 {
129     TCGv_i32 t;
130     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
131     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
132     return t;
133 }
134
135 static inline TCGv get_temp_tl(DisasContext *dc)
136 {
137     TCGv t;
138     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
139     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
140     return t;
141 }
142
143 static inline void gen_update_fprs_dirty(int rd)
144 {
145 #if defined(TARGET_SPARC64)
146     tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
147 #endif
148 }
149
150 /* floating point registers moves */
151 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
152 {
153 #if TCG_TARGET_REG_BITS == 32
154     if (src & 1) {
155         return TCGV_LOW(cpu_fpr[src / 2]);
156     } else {
157         return TCGV_HIGH(cpu_fpr[src / 2]);
158     }
159 #else
160     if (src & 1) {
161         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
162     } else {
163         TCGv_i32 ret = get_temp_i32(dc);
164         TCGv_i64 t = tcg_temp_new_i64();
165
166         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
167         tcg_gen_trunc_i64_i32(ret, t);
168         tcg_temp_free_i64(t);
169
170         return ret;
171     }
172 #endif
173 }
174
175 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
176 {
177 #if TCG_TARGET_REG_BITS == 32
178     if (dst & 1) {
179         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
180     } else {
181         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
182     }
183 #else
184     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
185     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
186                         (dst & 1 ? 0 : 32), 32);
187 #endif
188     gen_update_fprs_dirty(dst);
189 }
190
191 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
192 {
193     return get_temp_i32(dc);
194 }
195
196 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
197 {
198     src = DFPREG(src);
199     return cpu_fpr[src / 2];
200 }
201
202 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
203 {
204     dst = DFPREG(dst);
205     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
206     gen_update_fprs_dirty(dst);
207 }
208
209 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
210 {
211     return cpu_fpr[DFPREG(dst) / 2];
212 }
213
214 static void gen_op_load_fpr_QT0(unsigned int src)
215 {
216     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221
222 static void gen_op_load_fpr_QT1(unsigned int src)
223 {
224     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
225                    offsetof(CPU_QuadU, ll.upper));
226     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
227                    offsetof(CPU_QuadU, ll.lower));
228 }
229
230 static void gen_op_store_QT0_fpr(unsigned int dst)
231 {
232     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
233                    offsetof(CPU_QuadU, ll.upper));
234     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
235                    offsetof(CPU_QuadU, ll.lower));
236 }
237
238 #ifdef TARGET_SPARC64
239 static void gen_move_Q(unsigned int rd, unsigned int rs)
240 {
241     rd = QFPREG(rd);
242     rs = QFPREG(rs);
243
244     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
245     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
246     gen_update_fprs_dirty(rd);
247 }
248 #endif
249
250 /* moves */
251 #ifdef CONFIG_USER_ONLY
252 #define supervisor(dc) 0
253 #ifdef TARGET_SPARC64
254 #define hypervisor(dc) 0
255 #endif
256 #else
257 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
258 #ifdef TARGET_SPARC64
259 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
260 #else
261 #endif
262 #endif
263
264 #ifdef TARGET_SPARC64
265 #ifndef TARGET_ABI32
266 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
267 #else
268 #define AM_CHECK(dc) (1)
269 #endif
270 #endif
271
272 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
273 {
274 #ifdef TARGET_SPARC64
275     if (AM_CHECK(dc))
276         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
277 #endif
278 }
279
280 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
281 {
282     if (reg == 0 || reg >= 8) {
283         TCGv t = get_temp_tl(dc);
284         if (reg == 0) {
285             tcg_gen_movi_tl(t, 0);
286         } else {
287             tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
288         }
289         return t;
290     } else {
291         return cpu_gregs[reg];
292     }
293 }
294
295 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
296 {
297     if (reg > 0) {
298         if (reg < 8) {
299             tcg_gen_mov_tl(cpu_gregs[reg], v);
300         } else {
301             tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
302         }
303     }
304 }
305
306 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
307 {
308     if (reg == 0 || reg >= 8) {
309         return get_temp_tl(dc);
310     } else {
311         return cpu_gregs[reg];
312     }
313 }
314
315 static inline void gen_goto_tb(DisasContext *s, int tb_num,
316                                target_ulong pc, target_ulong npc)
317 {
318     TranslationBlock *tb;
319
320     tb = s->tb;
321     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
322         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
323         !s->singlestep)  {
324         /* jump to same page: we can use a direct jump */
325         tcg_gen_goto_tb(tb_num);
326         tcg_gen_movi_tl(cpu_pc, pc);
327         tcg_gen_movi_tl(cpu_npc, npc);
328         tcg_gen_exit_tb((uintptr_t)tb + tb_num);
329     } else {
330         /* jump to another page: currently not optimized */
331         tcg_gen_movi_tl(cpu_pc, pc);
332         tcg_gen_movi_tl(cpu_npc, npc);
333         tcg_gen_exit_tb(0);
334     }
335 }
336
337 // XXX suboptimal
338 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
339 {
340     tcg_gen_extu_i32_tl(reg, src);
341     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
342     tcg_gen_andi_tl(reg, reg, 0x1);
343 }
344
345 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
346 {
347     tcg_gen_extu_i32_tl(reg, src);
348     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
349     tcg_gen_andi_tl(reg, reg, 0x1);
350 }
351
352 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
353 {
354     tcg_gen_extu_i32_tl(reg, src);
355     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
356     tcg_gen_andi_tl(reg, reg, 0x1);
357 }
358
359 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
363     tcg_gen_andi_tl(reg, reg, 0x1);
364 }
365
366 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
367 {
368     tcg_gen_mov_tl(cpu_cc_src, src1);
369     tcg_gen_mov_tl(cpu_cc_src2, src2);
370     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
371     tcg_gen_mov_tl(dst, cpu_cc_dst);
372 }
373
374 static TCGv_i32 gen_add32_carry32(void)
375 {
376     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
377
378     /* Carry is computed from a previous add: (dst < src)  */
379 #if TARGET_LONG_BITS == 64
380     cc_src1_32 = tcg_temp_new_i32();
381     cc_src2_32 = tcg_temp_new_i32();
382     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
383     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
384 #else
385     cc_src1_32 = cpu_cc_dst;
386     cc_src2_32 = cpu_cc_src;
387 #endif
388
389     carry_32 = tcg_temp_new_i32();
390     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
391
392 #if TARGET_LONG_BITS == 64
393     tcg_temp_free_i32(cc_src1_32);
394     tcg_temp_free_i32(cc_src2_32);
395 #endif
396
397     return carry_32;
398 }
399
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417
418 #if TARGET_LONG_BITS == 64
419     tcg_temp_free_i32(cc_src1_32);
420     tcg_temp_free_i32(cc_src2_32);
421 #endif
422
423     return carry_32;
424 }
425
426 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
427                             TCGv src2, int update_cc)
428 {
429     TCGv_i32 carry_32;
430     TCGv carry;
431
432     switch (dc->cc_op) {
433     case CC_OP_DIV:
434     case CC_OP_LOGIC:
435         /* Carry is known to be zero.  Fall back to plain ADD.  */
436         if (update_cc) {
437             gen_op_add_cc(dst, src1, src2);
438         } else {
439             tcg_gen_add_tl(dst, src1, src2);
440         }
441         return;
442
443     case CC_OP_ADD:
444     case CC_OP_TADD:
445     case CC_OP_TADDTV:
446         if (TARGET_LONG_BITS == 32) {
447             /* We can re-use the host's hardware carry generation by using
448                an ADD2 opcode.  We discard the low part of the output.
449                Ideally we'd combine this operation with the add that
450                generated the carry in the first place.  */
451             carry = tcg_temp_new();
452             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
453             tcg_temp_free(carry);
454             goto add_done;
455         }
456         carry_32 = gen_add32_carry32();
457         break;
458
459     case CC_OP_SUB:
460     case CC_OP_TSUB:
461     case CC_OP_TSUBTV:
462         carry_32 = gen_sub32_carry32();
463         break;
464
465     default:
466         /* We need external help to produce the carry.  */
467         carry_32 = tcg_temp_new_i32();
468         gen_helper_compute_C_icc(carry_32, cpu_env);
469         break;
470     }
471
472 #if TARGET_LONG_BITS == 64
473     carry = tcg_temp_new();
474     tcg_gen_extu_i32_i64(carry, carry_32);
475 #else
476     carry = carry_32;
477 #endif
478
479     tcg_gen_add_tl(dst, src1, src2);
480     tcg_gen_add_tl(dst, dst, carry);
481
482     tcg_temp_free_i32(carry_32);
483 #if TARGET_LONG_BITS == 64
484     tcg_temp_free(carry);
485 #endif
486
487  add_done:
488     if (update_cc) {
489         tcg_gen_mov_tl(cpu_cc_src, src1);
490         tcg_gen_mov_tl(cpu_cc_src2, src2);
491         tcg_gen_mov_tl(cpu_cc_dst, dst);
492         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
493         dc->cc_op = CC_OP_ADDX;
494     }
495 }
496
497 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
498 {
499     tcg_gen_mov_tl(cpu_cc_src, src1);
500     tcg_gen_mov_tl(cpu_cc_src2, src2);
501     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502     tcg_gen_mov_tl(dst, cpu_cc_dst);
503 }
504
505 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
506                             TCGv src2, int update_cc)
507 {
508     TCGv_i32 carry_32;
509     TCGv carry;
510
511     switch (dc->cc_op) {
512     case CC_OP_DIV:
513     case CC_OP_LOGIC:
514         /* Carry is known to be zero.  Fall back to plain SUB.  */
515         if (update_cc) {
516             gen_op_sub_cc(dst, src1, src2);
517         } else {
518             tcg_gen_sub_tl(dst, src1, src2);
519         }
520         return;
521
522     case CC_OP_ADD:
523     case CC_OP_TADD:
524     case CC_OP_TADDTV:
525         carry_32 = gen_add32_carry32();
526         break;
527
528     case CC_OP_SUB:
529     case CC_OP_TSUB:
530     case CC_OP_TSUBTV:
531         if (TARGET_LONG_BITS == 32) {
532             /* We can re-use the host's hardware carry generation by using
533                a SUB2 opcode.  We discard the low part of the output.
534                Ideally we'd combine this operation with the add that
535                generated the carry in the first place.  */
536             carry = tcg_temp_new();
537             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
538             tcg_temp_free(carry);
539             goto sub_done;
540         }
541         carry_32 = gen_sub32_carry32();
542         break;
543
544     default:
545         /* We need external help to produce the carry.  */
546         carry_32 = tcg_temp_new_i32();
547         gen_helper_compute_C_icc(carry_32, cpu_env);
548         break;
549     }
550
551 #if TARGET_LONG_BITS == 64
552     carry = tcg_temp_new();
553     tcg_gen_extu_i32_i64(carry, carry_32);
554 #else
555     carry = carry_32;
556 #endif
557
558     tcg_gen_sub_tl(dst, src1, src2);
559     tcg_gen_sub_tl(dst, dst, carry);
560
561     tcg_temp_free_i32(carry_32);
562 #if TARGET_LONG_BITS == 64
563     tcg_temp_free(carry);
564 #endif
565
566  sub_done:
567     if (update_cc) {
568         tcg_gen_mov_tl(cpu_cc_src, src1);
569         tcg_gen_mov_tl(cpu_cc_src2, src2);
570         tcg_gen_mov_tl(cpu_cc_dst, dst);
571         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
572         dc->cc_op = CC_OP_SUBX;
573     }
574 }
575
576 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
577 {
578     TCGv r_temp, zero, t0;
579
580     r_temp = tcg_temp_new();
581     t0 = tcg_temp_new();
582
583     /* old op:
584     if (!(env->y & 1))
585         T1 = 0;
586     */
587     zero = tcg_const_tl(0);
588     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
589     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
590     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
591     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
592                        zero, cpu_cc_src2);
593     tcg_temp_free(zero);
594
595     // b2 = T0 & 1;
596     // env->y = (b2 << 31) | (env->y >> 1);
597     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
598     tcg_gen_shli_tl(r_temp, r_temp, 31);
599     tcg_gen_shri_tl(t0, cpu_y, 1);
600     tcg_gen_andi_tl(t0, t0, 0x7fffffff);
601     tcg_gen_or_tl(t0, t0, r_temp);
602     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
603
604     // b1 = N ^ V;
605     gen_mov_reg_N(t0, cpu_psr);
606     gen_mov_reg_V(r_temp, cpu_psr);
607     tcg_gen_xor_tl(t0, t0, r_temp);
608     tcg_temp_free(r_temp);
609
610     // T0 = (b1 << 31) | (T0 >> 1);
611     // src1 = T0;
612     tcg_gen_shli_tl(t0, t0, 31);
613     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
614     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
615     tcg_temp_free(t0);
616
617     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
618
619     tcg_gen_mov_tl(dst, cpu_cc_dst);
620 }
621
622 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
623 {
624 #if TARGET_LONG_BITS == 32
625     if (sign_ext) {
626         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
627     } else {
628         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
629     }
630 #else
631     TCGv t0 = tcg_temp_new_i64();
632     TCGv t1 = tcg_temp_new_i64();
633
634     if (sign_ext) {
635         tcg_gen_ext32s_i64(t0, src1);
636         tcg_gen_ext32s_i64(t1, src2);
637     } else {
638         tcg_gen_ext32u_i64(t0, src1);
639         tcg_gen_ext32u_i64(t1, src2);
640     }
641
642     tcg_gen_mul_i64(dst, t0, t1);
643     tcg_temp_free(t0);
644     tcg_temp_free(t1);
645
646     tcg_gen_shri_i64(cpu_y, dst, 32);
647 #endif
648 }
649
650 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
651 {
652     /* zero-extend truncated operands before multiplication */
653     gen_op_multiply(dst, src1, src2, 0);
654 }
655
656 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
657 {
658     /* sign-extend truncated operands before multiplication */
659     gen_op_multiply(dst, src1, src2, 1);
660 }
661
662 // 1
663 static inline void gen_op_eval_ba(TCGv dst)
664 {
665     tcg_gen_movi_tl(dst, 1);
666 }
667
668 // Z
669 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
670 {
671     gen_mov_reg_Z(dst, src);
672 }
673
674 // Z | (N ^ V)
675 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
676 {
677     TCGv t0 = tcg_temp_new();
678     gen_mov_reg_N(t0, src);
679     gen_mov_reg_V(dst, src);
680     tcg_gen_xor_tl(dst, dst, t0);
681     gen_mov_reg_Z(t0, src);
682     tcg_gen_or_tl(dst, dst, t0);
683     tcg_temp_free(t0);
684 }
685
686 // N ^ V
687 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
688 {
689     TCGv t0 = tcg_temp_new();
690     gen_mov_reg_V(t0, src);
691     gen_mov_reg_N(dst, src);
692     tcg_gen_xor_tl(dst, dst, t0);
693     tcg_temp_free(t0);
694 }
695
696 // C | Z
697 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
698 {
699     TCGv t0 = tcg_temp_new();
700     gen_mov_reg_Z(t0, src);
701     gen_mov_reg_C(dst, src);
702     tcg_gen_or_tl(dst, dst, t0);
703     tcg_temp_free(t0);
704 }
705
706 // C
707 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
708 {
709     gen_mov_reg_C(dst, src);
710 }
711
712 // V
713 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
714 {
715     gen_mov_reg_V(dst, src);
716 }
717
718 // 0
719 static inline void gen_op_eval_bn(TCGv dst)
720 {
721     tcg_gen_movi_tl(dst, 0);
722 }
723
724 // N
725 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
726 {
727     gen_mov_reg_N(dst, src);
728 }
729
730 // !Z
731 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
732 {
733     gen_mov_reg_Z(dst, src);
734     tcg_gen_xori_tl(dst, dst, 0x1);
735 }
736
737 // !(Z | (N ^ V))
738 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
739 {
740     gen_op_eval_ble(dst, src);
741     tcg_gen_xori_tl(dst, dst, 0x1);
742 }
743
744 // !(N ^ V)
745 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
746 {
747     gen_op_eval_bl(dst, src);
748     tcg_gen_xori_tl(dst, dst, 0x1);
749 }
750
751 // !(C | Z)
752 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
753 {
754     gen_op_eval_bleu(dst, src);
755     tcg_gen_xori_tl(dst, dst, 0x1);
756 }
757
758 // !C
759 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
760 {
761     gen_mov_reg_C(dst, src);
762     tcg_gen_xori_tl(dst, dst, 0x1);
763 }
764
765 // !N
766 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
767 {
768     gen_mov_reg_N(dst, src);
769     tcg_gen_xori_tl(dst, dst, 0x1);
770 }
771
772 // !V
773 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
774 {
775     gen_mov_reg_V(dst, src);
776     tcg_gen_xori_tl(dst, dst, 0x1);
777 }
778
779 /*
780   FPSR bit field FCC1 | FCC0:
781    0 =
782    1 <
783    2 >
784    3 unordered
785 */
786 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
787                                     unsigned int fcc_offset)
788 {
789     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
790     tcg_gen_andi_tl(reg, reg, 0x1);
791 }
792
793 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
794                                     unsigned int fcc_offset)
795 {
796     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
797     tcg_gen_andi_tl(reg, reg, 0x1);
798 }
799
800 // !0: FCC0 | FCC1
801 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
802                                     unsigned int fcc_offset)
803 {
804     TCGv t0 = tcg_temp_new();
805     gen_mov_reg_FCC0(dst, src, fcc_offset);
806     gen_mov_reg_FCC1(t0, src, fcc_offset);
807     tcg_gen_or_tl(dst, dst, t0);
808     tcg_temp_free(t0);
809 }
810
811 // 1 or 2: FCC0 ^ FCC1
812 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
813                                     unsigned int fcc_offset)
814 {
815     TCGv t0 = tcg_temp_new();
816     gen_mov_reg_FCC0(dst, src, fcc_offset);
817     gen_mov_reg_FCC1(t0, src, fcc_offset);
818     tcg_gen_xor_tl(dst, dst, t0);
819     tcg_temp_free(t0);
820 }
821
822 // 1 or 3: FCC0
823 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
824                                     unsigned int fcc_offset)
825 {
826     gen_mov_reg_FCC0(dst, src, fcc_offset);
827 }
828
829 // 1: FCC0 & !FCC1
830 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
831                                     unsigned int fcc_offset)
832 {
833     TCGv t0 = tcg_temp_new();
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     gen_mov_reg_FCC1(t0, src, fcc_offset);
836     tcg_gen_andc_tl(dst, dst, t0);
837     tcg_temp_free(t0);
838 }
839
840 // 2 or 3: FCC1
841 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
842                                     unsigned int fcc_offset)
843 {
844     gen_mov_reg_FCC1(dst, src, fcc_offset);
845 }
846
847 // 2: !FCC0 & FCC1
848 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
849                                     unsigned int fcc_offset)
850 {
851     TCGv t0 = tcg_temp_new();
852     gen_mov_reg_FCC0(dst, src, fcc_offset);
853     gen_mov_reg_FCC1(t0, src, fcc_offset);
854     tcg_gen_andc_tl(dst, t0, dst);
855     tcg_temp_free(t0);
856 }
857
858 // 3: FCC0 & FCC1
859 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     TCGv t0 = tcg_temp_new();
863     gen_mov_reg_FCC0(dst, src, fcc_offset);
864     gen_mov_reg_FCC1(t0, src, fcc_offset);
865     tcg_gen_and_tl(dst, dst, t0);
866     tcg_temp_free(t0);
867 }
868
869 // 0: !(FCC0 | FCC1)
870 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
871                                     unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_or_tl(dst, dst, t0);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878     tcg_temp_free(t0);
879 }
880
881 // 0 or 3: !(FCC0 ^ FCC1)
882 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
883                                     unsigned int fcc_offset)
884 {
885     TCGv t0 = tcg_temp_new();
886     gen_mov_reg_FCC0(dst, src, fcc_offset);
887     gen_mov_reg_FCC1(t0, src, fcc_offset);
888     tcg_gen_xor_tl(dst, dst, t0);
889     tcg_gen_xori_tl(dst, dst, 0x1);
890     tcg_temp_free(t0);
891 }
892
893 // 0 or 2: !FCC0
894 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
895                                     unsigned int fcc_offset)
896 {
897     gen_mov_reg_FCC0(dst, src, fcc_offset);
898     tcg_gen_xori_tl(dst, dst, 0x1);
899 }
900
901 // !1: !(FCC0 & !FCC1)
902 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
903                                     unsigned int fcc_offset)
904 {
905     TCGv t0 = tcg_temp_new();
906     gen_mov_reg_FCC0(dst, src, fcc_offset);
907     gen_mov_reg_FCC1(t0, src, fcc_offset);
908     tcg_gen_andc_tl(dst, dst, t0);
909     tcg_gen_xori_tl(dst, dst, 0x1);
910     tcg_temp_free(t0);
911 }
912
913 // 0 or 1: !FCC1
914 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
915                                     unsigned int fcc_offset)
916 {
917     gen_mov_reg_FCC1(dst, src, fcc_offset);
918     tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 // !2: !(!FCC0 & FCC1)
922 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
923                                     unsigned int fcc_offset)
924 {
925     TCGv t0 = tcg_temp_new();
926     gen_mov_reg_FCC0(dst, src, fcc_offset);
927     gen_mov_reg_FCC1(t0, src, fcc_offset);
928     tcg_gen_andc_tl(dst, t0, dst);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930     tcg_temp_free(t0);
931 }
932
933 // !3: !(FCC0 & FCC1)
934 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
935                                     unsigned int fcc_offset)
936 {
937     TCGv t0 = tcg_temp_new();
938     gen_mov_reg_FCC0(dst, src, fcc_offset);
939     gen_mov_reg_FCC1(t0, src, fcc_offset);
940     tcg_gen_and_tl(dst, dst, t0);
941     tcg_gen_xori_tl(dst, dst, 0x1);
942     tcg_temp_free(t0);
943 }
944
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946                                target_ulong pc2, TCGv r_cond)
947 {
948     TCGLabel *l1 = gen_new_label();
949
950     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
951
952     gen_goto_tb(dc, 0, pc1, pc1 + 4);
953
954     gen_set_label(l1);
955     gen_goto_tb(dc, 1, pc2, pc2 + 4);
956 }
957
958 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
959                                 target_ulong pc2, TCGv r_cond)
960 {
961     TCGLabel *l1 = gen_new_label();
962
963     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
964
965     gen_goto_tb(dc, 0, pc2, pc1);
966
967     gen_set_label(l1);
968     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
969 }
970
971 static inline void gen_generic_branch(DisasContext *dc)
972 {
973     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
974     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
975     TCGv zero = tcg_const_tl(0);
976
977     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
978
979     tcg_temp_free(npc0);
980     tcg_temp_free(npc1);
981     tcg_temp_free(zero);
982 }
983
984 /* call this function before using the condition register as it may
985    have been set for a jump */
986 static inline void flush_cond(DisasContext *dc)
987 {
988     if (dc->npc == JUMP_PC) {
989         gen_generic_branch(dc);
990         dc->npc = DYNAMIC_PC;
991     }
992 }
993
994 static inline void save_npc(DisasContext *dc)
995 {
996     if (dc->npc == JUMP_PC) {
997         gen_generic_branch(dc);
998         dc->npc = DYNAMIC_PC;
999     } else if (dc->npc != DYNAMIC_PC) {
1000         tcg_gen_movi_tl(cpu_npc, dc->npc);
1001     }
1002 }
1003
1004 static inline void update_psr(DisasContext *dc)
1005 {
1006     if (dc->cc_op != CC_OP_FLAGS) {
1007         dc->cc_op = CC_OP_FLAGS;
1008         gen_helper_compute_psr(cpu_env);
1009     }
1010 }
1011
1012 static inline void save_state(DisasContext *dc)
1013 {
1014     tcg_gen_movi_tl(cpu_pc, dc->pc);
1015     save_npc(dc);
1016 }
1017
1018 static inline void gen_mov_pc_npc(DisasContext *dc)
1019 {
1020     if (dc->npc == JUMP_PC) {
1021         gen_generic_branch(dc);
1022         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1023         dc->pc = DYNAMIC_PC;
1024     } else if (dc->npc == DYNAMIC_PC) {
1025         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1026         dc->pc = DYNAMIC_PC;
1027     } else {
1028         dc->pc = dc->npc;
1029     }
1030 }
1031
1032 static inline void gen_op_next_insn(void)
1033 {
1034     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1035     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1036 }
1037
1038 static void free_compare(DisasCompare *cmp)
1039 {
1040     if (!cmp->g1) {
1041         tcg_temp_free(cmp->c1);
1042     }
1043     if (!cmp->g2) {
1044         tcg_temp_free(cmp->c2);
1045     }
1046 }
1047
1048 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1049                         DisasContext *dc)
1050 {
1051     static int subcc_cond[16] = {
1052         TCG_COND_NEVER,
1053         TCG_COND_EQ,
1054         TCG_COND_LE,
1055         TCG_COND_LT,
1056         TCG_COND_LEU,
1057         TCG_COND_LTU,
1058         -1, /* neg */
1059         -1, /* overflow */
1060         TCG_COND_ALWAYS,
1061         TCG_COND_NE,
1062         TCG_COND_GT,
1063         TCG_COND_GE,
1064         TCG_COND_GTU,
1065         TCG_COND_GEU,
1066         -1, /* pos */
1067         -1, /* no overflow */
1068     };
1069
1070     static int logic_cond[16] = {
1071         TCG_COND_NEVER,
1072         TCG_COND_EQ,     /* eq:  Z */
1073         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1074         TCG_COND_LT,     /* lt:  N ^ V -> N */
1075         TCG_COND_EQ,     /* leu: C | Z -> Z */
1076         TCG_COND_NEVER,  /* ltu: C -> 0 */
1077         TCG_COND_LT,     /* neg: N */
1078         TCG_COND_NEVER,  /* vs:  V -> 0 */
1079         TCG_COND_ALWAYS,
1080         TCG_COND_NE,     /* ne:  !Z */
1081         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1082         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1083         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1084         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1085         TCG_COND_GE,     /* pos: !N */
1086         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1087     };
1088
1089     TCGv_i32 r_src;
1090     TCGv r_dst;
1091
1092 #ifdef TARGET_SPARC64
1093     if (xcc) {
1094         r_src = cpu_xcc;
1095     } else {
1096         r_src = cpu_psr;
1097     }
1098 #else
1099     r_src = cpu_psr;
1100 #endif
1101
1102     switch (dc->cc_op) {
1103     case CC_OP_LOGIC:
1104         cmp->cond = logic_cond[cond];
1105     do_compare_dst_0:
1106         cmp->is_bool = false;
1107         cmp->g2 = false;
1108         cmp->c2 = tcg_const_tl(0);
1109 #ifdef TARGET_SPARC64
1110         if (!xcc) {
1111             cmp->g1 = false;
1112             cmp->c1 = tcg_temp_new();
1113             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1114             break;
1115         }
1116 #endif
1117         cmp->g1 = true;
1118         cmp->c1 = cpu_cc_dst;
1119         break;
1120
1121     case CC_OP_SUB:
1122         switch (cond) {
1123         case 6:  /* neg */
1124         case 14: /* pos */
1125             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1126             goto do_compare_dst_0;
1127
1128         case 7: /* overflow */
1129         case 15: /* !overflow */
1130             goto do_dynamic;
1131
1132         default:
1133             cmp->cond = subcc_cond[cond];
1134             cmp->is_bool = false;
1135 #ifdef TARGET_SPARC64
1136             if (!xcc) {
1137                 /* Note that sign-extension works for unsigned compares as
1138                    long as both operands are sign-extended.  */
1139                 cmp->g1 = cmp->g2 = false;
1140                 cmp->c1 = tcg_temp_new();
1141                 cmp->c2 = tcg_temp_new();
1142                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1143                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1144                 break;
1145             }
1146 #endif
1147             cmp->g1 = cmp->g2 = true;
1148             cmp->c1 = cpu_cc_src;
1149             cmp->c2 = cpu_cc_src2;
1150             break;
1151         }
1152         break;
1153
1154     default:
1155     do_dynamic:
1156         gen_helper_compute_psr(cpu_env);
1157         dc->cc_op = CC_OP_FLAGS;
1158         /* FALLTHRU */
1159
1160     case CC_OP_FLAGS:
1161         /* We're going to generate a boolean result.  */
1162         cmp->cond = TCG_COND_NE;
1163         cmp->is_bool = true;
1164         cmp->g1 = cmp->g2 = false;
1165         cmp->c1 = r_dst = tcg_temp_new();
1166         cmp->c2 = tcg_const_tl(0);
1167
1168         switch (cond) {
1169         case 0x0:
1170             gen_op_eval_bn(r_dst);
1171             break;
1172         case 0x1:
1173             gen_op_eval_be(r_dst, r_src);
1174             break;
1175         case 0x2:
1176             gen_op_eval_ble(r_dst, r_src);
1177             break;
1178         case 0x3:
1179             gen_op_eval_bl(r_dst, r_src);
1180             break;
1181         case 0x4:
1182             gen_op_eval_bleu(r_dst, r_src);
1183             break;
1184         case 0x5:
1185             gen_op_eval_bcs(r_dst, r_src);
1186             break;
1187         case 0x6:
1188             gen_op_eval_bneg(r_dst, r_src);
1189             break;
1190         case 0x7:
1191             gen_op_eval_bvs(r_dst, r_src);
1192             break;
1193         case 0x8:
1194             gen_op_eval_ba(r_dst);
1195             break;
1196         case 0x9:
1197             gen_op_eval_bne(r_dst, r_src);
1198             break;
1199         case 0xa:
1200             gen_op_eval_bg(r_dst, r_src);
1201             break;
1202         case 0xb:
1203             gen_op_eval_bge(r_dst, r_src);
1204             break;
1205         case 0xc:
1206             gen_op_eval_bgu(r_dst, r_src);
1207             break;
1208         case 0xd:
1209             gen_op_eval_bcc(r_dst, r_src);
1210             break;
1211         case 0xe:
1212             gen_op_eval_bpos(r_dst, r_src);
1213             break;
1214         case 0xf:
1215             gen_op_eval_bvc(r_dst, r_src);
1216             break;
1217         }
1218         break;
1219     }
1220 }
1221
1222 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1223 {
1224     unsigned int offset;
1225     TCGv r_dst;
1226
1227     /* For now we still generate a straight boolean result.  */
1228     cmp->cond = TCG_COND_NE;
1229     cmp->is_bool = true;
1230     cmp->g1 = cmp->g2 = false;
1231     cmp->c1 = r_dst = tcg_temp_new();
1232     cmp->c2 = tcg_const_tl(0);
1233
1234     switch (cc) {
1235     default:
1236     case 0x0:
1237         offset = 0;
1238         break;
1239     case 0x1:
1240         offset = 32 - 10;
1241         break;
1242     case 0x2:
1243         offset = 34 - 10;
1244         break;
1245     case 0x3:
1246         offset = 36 - 10;
1247         break;
1248     }
1249
1250     switch (cond) {
1251     case 0x0:
1252         gen_op_eval_bn(r_dst);
1253         break;
1254     case 0x1:
1255         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1256         break;
1257     case 0x2:
1258         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1259         break;
1260     case 0x3:
1261         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1262         break;
1263     case 0x4:
1264         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1265         break;
1266     case 0x5:
1267         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1268         break;
1269     case 0x6:
1270         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1271         break;
1272     case 0x7:
1273         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1274         break;
1275     case 0x8:
1276         gen_op_eval_ba(r_dst);
1277         break;
1278     case 0x9:
1279         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1280         break;
1281     case 0xa:
1282         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1283         break;
1284     case 0xb:
1285         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1286         break;
1287     case 0xc:
1288         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1289         break;
1290     case 0xd:
1291         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1292         break;
1293     case 0xe:
1294         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1295         break;
1296     case 0xf:
1297         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1298         break;
1299     }
1300 }
1301
1302 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1303                      DisasContext *dc)
1304 {
1305     DisasCompare cmp;
1306     gen_compare(&cmp, cc, cond, dc);
1307
1308     /* The interface is to return a boolean in r_dst.  */
1309     if (cmp.is_bool) {
1310         tcg_gen_mov_tl(r_dst, cmp.c1);
1311     } else {
1312         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1313     }
1314
1315     free_compare(&cmp);
1316 }
1317
1318 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1319 {
1320     DisasCompare cmp;
1321     gen_fcompare(&cmp, cc, cond);
1322
1323     /* The interface is to return a boolean in r_dst.  */
1324     if (cmp.is_bool) {
1325         tcg_gen_mov_tl(r_dst, cmp.c1);
1326     } else {
1327         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1328     }
1329
1330     free_compare(&cmp);
1331 }
1332
1333 #ifdef TARGET_SPARC64
1334 // Inverted logic
1335 static const int gen_tcg_cond_reg[8] = {
1336     -1,
1337     TCG_COND_NE,
1338     TCG_COND_GT,
1339     TCG_COND_GE,
1340     -1,
1341     TCG_COND_EQ,
1342     TCG_COND_LE,
1343     TCG_COND_LT,
1344 };
1345
1346 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1347 {
1348     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1349     cmp->is_bool = false;
1350     cmp->g1 = true;
1351     cmp->g2 = false;
1352     cmp->c1 = r_src;
1353     cmp->c2 = tcg_const_tl(0);
1354 }
1355
1356 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1357 {
1358     DisasCompare cmp;
1359     gen_compare_reg(&cmp, cond, r_src);
1360
1361     /* The interface is to return a boolean in r_dst.  */
1362     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1363
1364     free_compare(&cmp);
1365 }
1366 #endif
1367
1368 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1369 {
1370     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1371     target_ulong target = dc->pc + offset;
1372
1373 #ifdef TARGET_SPARC64
1374     if (unlikely(AM_CHECK(dc))) {
1375         target &= 0xffffffffULL;
1376     }
1377 #endif
1378     if (cond == 0x0) {
1379         /* unconditional not taken */
1380         if (a) {
1381             dc->pc = dc->npc + 4;
1382             dc->npc = dc->pc + 4;
1383         } else {
1384             dc->pc = dc->npc;
1385             dc->npc = dc->pc + 4;
1386         }
1387     } else if (cond == 0x8) {
1388         /* unconditional taken */
1389         if (a) {
1390             dc->pc = target;
1391             dc->npc = dc->pc + 4;
1392         } else {
1393             dc->pc = dc->npc;
1394             dc->npc = target;
1395             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1396         }
1397     } else {
1398         flush_cond(dc);
1399         gen_cond(cpu_cond, cc, cond, dc);
1400         if (a) {
1401             gen_branch_a(dc, target, dc->npc, cpu_cond);
1402             dc->is_br = 1;
1403         } else {
1404             dc->pc = dc->npc;
1405             dc->jump_pc[0] = target;
1406             if (unlikely(dc->npc == DYNAMIC_PC)) {
1407                 dc->jump_pc[1] = DYNAMIC_PC;
1408                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1409             } else {
1410                 dc->jump_pc[1] = dc->npc + 4;
1411                 dc->npc = JUMP_PC;
1412             }
1413         }
1414     }
1415 }
1416
1417 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1418 {
1419     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1420     target_ulong target = dc->pc + offset;
1421
1422 #ifdef TARGET_SPARC64
1423     if (unlikely(AM_CHECK(dc))) {
1424         target &= 0xffffffffULL;
1425     }
1426 #endif
1427     if (cond == 0x0) {
1428         /* unconditional not taken */
1429         if (a) {
1430             dc->pc = dc->npc + 4;
1431             dc->npc = dc->pc + 4;
1432         } else {
1433             dc->pc = dc->npc;
1434             dc->npc = dc->pc + 4;
1435         }
1436     } else if (cond == 0x8) {
1437         /* unconditional taken */
1438         if (a) {
1439             dc->pc = target;
1440             dc->npc = dc->pc + 4;
1441         } else {
1442             dc->pc = dc->npc;
1443             dc->npc = target;
1444             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1445         }
1446     } else {
1447         flush_cond(dc);
1448         gen_fcond(cpu_cond, cc, cond);
1449         if (a) {
1450             gen_branch_a(dc, target, dc->npc, cpu_cond);
1451             dc->is_br = 1;
1452         } else {
1453             dc->pc = dc->npc;
1454             dc->jump_pc[0] = target;
1455             if (unlikely(dc->npc == DYNAMIC_PC)) {
1456                 dc->jump_pc[1] = DYNAMIC_PC;
1457                 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1458             } else {
1459                 dc->jump_pc[1] = dc->npc + 4;
1460                 dc->npc = JUMP_PC;
1461             }
1462         }
1463     }
1464 }
1465
1466 #ifdef TARGET_SPARC64
1467 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1468                           TCGv r_reg)
1469 {
1470     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1471     target_ulong target = dc->pc + offset;
1472
1473     if (unlikely(AM_CHECK(dc))) {
1474         target &= 0xffffffffULL;
1475     }
1476     flush_cond(dc);
1477     gen_cond_reg(cpu_cond, cond, r_reg);
1478     if (a) {
1479         gen_branch_a(dc, target, dc->npc, cpu_cond);
1480         dc->is_br = 1;
1481     } else {
1482         dc->pc = dc->npc;
1483         dc->jump_pc[0] = target;
1484         if (unlikely(dc->npc == DYNAMIC_PC)) {
1485             dc->jump_pc[1] = DYNAMIC_PC;
1486             tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1487         } else {
1488             dc->jump_pc[1] = dc->npc + 4;
1489             dc->npc = JUMP_PC;
1490         }
1491     }
1492 }
1493
1494 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1495 {
1496     switch (fccno) {
1497     case 0:
1498         gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1499         break;
1500     case 1:
1501         gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1502         break;
1503     case 2:
1504         gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1505         break;
1506     case 3:
1507         gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1508         break;
1509     }
1510 }
1511
1512 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1513 {
1514     switch (fccno) {
1515     case 0:
1516         gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1517         break;
1518     case 1:
1519         gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1520         break;
1521     case 2:
1522         gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1523         break;
1524     case 3:
1525         gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1526         break;
1527     }
1528 }
1529
1530 static inline void gen_op_fcmpq(int fccno)
1531 {
1532     switch (fccno) {
1533     case 0:
1534         gen_helper_fcmpq(cpu_env);
1535         break;
1536     case 1:
1537         gen_helper_fcmpq_fcc1(cpu_env);
1538         break;
1539     case 2:
1540         gen_helper_fcmpq_fcc2(cpu_env);
1541         break;
1542     case 3:
1543         gen_helper_fcmpq_fcc3(cpu_env);
1544         break;
1545     }
1546 }
1547
1548 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1549 {
1550     switch (fccno) {
1551     case 0:
1552         gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1553         break;
1554     case 1:
1555         gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1556         break;
1557     case 2:
1558         gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1559         break;
1560     case 3:
1561         gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1562         break;
1563     }
1564 }
1565
1566 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1567 {
1568     switch (fccno) {
1569     case 0:
1570         gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1571         break;
1572     case 1:
1573         gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1574         break;
1575     case 2:
1576         gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1577         break;
1578     case 3:
1579         gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1580         break;
1581     }
1582 }
1583
1584 static inline void gen_op_fcmpeq(int fccno)
1585 {
1586     switch (fccno) {
1587     case 0:
1588         gen_helper_fcmpeq(cpu_env);
1589         break;
1590     case 1:
1591         gen_helper_fcmpeq_fcc1(cpu_env);
1592         break;
1593     case 2:
1594         gen_helper_fcmpeq_fcc2(cpu_env);
1595         break;
1596     case 3:
1597         gen_helper_fcmpeq_fcc3(cpu_env);
1598         break;
1599     }
1600 }
1601
1602 #else
1603
1604 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1605 {
1606     gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1607 }
1608
1609 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1610 {
1611     gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1612 }
1613
1614 static inline void gen_op_fcmpq(int fccno)
1615 {
1616     gen_helper_fcmpq(cpu_env);
1617 }
1618
1619 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1620 {
1621     gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1622 }
1623
1624 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1625 {
1626     gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1627 }
1628
1629 static inline void gen_op_fcmpeq(int fccno)
1630 {
1631     gen_helper_fcmpeq(cpu_env);
1632 }
1633 #endif
1634
1635 static inline void gen_op_fpexception_im(int fsr_flags)
1636 {
1637     TCGv_i32 r_const;
1638
1639     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1640     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1641     r_const = tcg_const_i32(TT_FP_EXCP);
1642     gen_helper_raise_exception(cpu_env, r_const);
1643     tcg_temp_free_i32(r_const);
1644 }
1645
1646 static int gen_trap_ifnofpu(DisasContext *dc)
1647 {
1648 #if !defined(CONFIG_USER_ONLY)
1649     if (!dc->fpu_enabled) {
1650         TCGv_i32 r_const;
1651
1652         save_state(dc);
1653         r_const = tcg_const_i32(TT_NFPU_INSN);
1654         gen_helper_raise_exception(cpu_env, r_const);
1655         tcg_temp_free_i32(r_const);
1656         dc->is_br = 1;
1657         return 1;
1658     }
1659 #endif
1660     return 0;
1661 }
1662
1663 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1664 {
1665     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1666 }
1667
1668 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1669                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1670 {
1671     TCGv_i32 dst, src;
1672
1673     src = gen_load_fpr_F(dc, rs);
1674     dst = gen_dest_fpr_F(dc);
1675
1676     gen(dst, cpu_env, src);
1677
1678     gen_store_fpr_F(dc, rd, dst);
1679 }
1680
1681 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1682                                  void (*gen)(TCGv_i32, TCGv_i32))
1683 {
1684     TCGv_i32 dst, src;
1685
1686     src = gen_load_fpr_F(dc, rs);
1687     dst = gen_dest_fpr_F(dc);
1688
1689     gen(dst, src);
1690
1691     gen_store_fpr_F(dc, rd, dst);
1692 }
1693
1694 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1695                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1696 {
1697     TCGv_i32 dst, src1, src2;
1698
1699     src1 = gen_load_fpr_F(dc, rs1);
1700     src2 = gen_load_fpr_F(dc, rs2);
1701     dst = gen_dest_fpr_F(dc);
1702
1703     gen(dst, cpu_env, src1, src2);
1704
1705     gen_store_fpr_F(dc, rd, dst);
1706 }
1707
1708 #ifdef TARGET_SPARC64
1709 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1710                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1711 {
1712     TCGv_i32 dst, src1, src2;
1713
1714     src1 = gen_load_fpr_F(dc, rs1);
1715     src2 = gen_load_fpr_F(dc, rs2);
1716     dst = gen_dest_fpr_F(dc);
1717
1718     gen(dst, src1, src2);
1719
1720     gen_store_fpr_F(dc, rd, dst);
1721 }
1722 #endif
1723
1724 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1725                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1726 {
1727     TCGv_i64 dst, src;
1728
1729     src = gen_load_fpr_D(dc, rs);
1730     dst = gen_dest_fpr_D(dc, rd);
1731
1732     gen(dst, cpu_env, src);
1733
1734     gen_store_fpr_D(dc, rd, dst);
1735 }
1736
1737 #ifdef TARGET_SPARC64
1738 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1739                                  void (*gen)(TCGv_i64, TCGv_i64))
1740 {
1741     TCGv_i64 dst, src;
1742
1743     src = gen_load_fpr_D(dc, rs);
1744     dst = gen_dest_fpr_D(dc, rd);
1745
1746     gen(dst, src);
1747
1748     gen_store_fpr_D(dc, rd, dst);
1749 }
1750 #endif
1751
1752 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1753                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1754 {
1755     TCGv_i64 dst, src1, src2;
1756
1757     src1 = gen_load_fpr_D(dc, rs1);
1758     src2 = gen_load_fpr_D(dc, rs2);
1759     dst = gen_dest_fpr_D(dc, rd);
1760
1761     gen(dst, cpu_env, src1, src2);
1762
1763     gen_store_fpr_D(dc, rd, dst);
1764 }
1765
1766 #ifdef TARGET_SPARC64
1767 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1768                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1769 {
1770     TCGv_i64 dst, src1, src2;
1771
1772     src1 = gen_load_fpr_D(dc, rs1);
1773     src2 = gen_load_fpr_D(dc, rs2);
1774     dst = gen_dest_fpr_D(dc, rd);
1775
1776     gen(dst, src1, src2);
1777
1778     gen_store_fpr_D(dc, rd, dst);
1779 }
1780
1781 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1782                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1783 {
1784     TCGv_i64 dst, src1, src2;
1785
1786     src1 = gen_load_fpr_D(dc, rs1);
1787     src2 = gen_load_fpr_D(dc, rs2);
1788     dst = gen_dest_fpr_D(dc, rd);
1789
1790     gen(dst, cpu_gsr, src1, src2);
1791
1792     gen_store_fpr_D(dc, rd, dst);
1793 }
1794
1795 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1796                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1797 {
1798     TCGv_i64 dst, src0, src1, src2;
1799
1800     src1 = gen_load_fpr_D(dc, rs1);
1801     src2 = gen_load_fpr_D(dc, rs2);
1802     src0 = gen_load_fpr_D(dc, rd);
1803     dst = gen_dest_fpr_D(dc, rd);
1804
1805     gen(dst, src0, src1, src2);
1806
1807     gen_store_fpr_D(dc, rd, dst);
1808 }
1809 #endif
1810
1811 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1812                               void (*gen)(TCGv_ptr))
1813 {
1814     gen_op_load_fpr_QT1(QFPREG(rs));
1815
1816     gen(cpu_env);
1817
1818     gen_op_store_QT0_fpr(QFPREG(rd));
1819     gen_update_fprs_dirty(QFPREG(rd));
1820 }
1821
1822 #ifdef TARGET_SPARC64
1823 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1824                                  void (*gen)(TCGv_ptr))
1825 {
1826     gen_op_load_fpr_QT1(QFPREG(rs));
1827
1828     gen(cpu_env);
1829
1830     gen_op_store_QT0_fpr(QFPREG(rd));
1831     gen_update_fprs_dirty(QFPREG(rd));
1832 }
1833 #endif
1834
1835 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1836                                void (*gen)(TCGv_ptr))
1837 {
1838     gen_op_load_fpr_QT0(QFPREG(rs1));
1839     gen_op_load_fpr_QT1(QFPREG(rs2));
1840
1841     gen(cpu_env);
1842
1843     gen_op_store_QT0_fpr(QFPREG(rd));
1844     gen_update_fprs_dirty(QFPREG(rd));
1845 }
1846
1847 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1848                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1849 {
1850     TCGv_i64 dst;
1851     TCGv_i32 src1, src2;
1852
1853     src1 = gen_load_fpr_F(dc, rs1);
1854     src2 = gen_load_fpr_F(dc, rs2);
1855     dst = gen_dest_fpr_D(dc, rd);
1856
1857     gen(dst, cpu_env, src1, src2);
1858
1859     gen_store_fpr_D(dc, rd, dst);
1860 }
1861
1862 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1863                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1864 {
1865     TCGv_i64 src1, src2;
1866
1867     src1 = gen_load_fpr_D(dc, rs1);
1868     src2 = gen_load_fpr_D(dc, rs2);
1869
1870     gen(cpu_env, src1, src2);
1871
1872     gen_op_store_QT0_fpr(QFPREG(rd));
1873     gen_update_fprs_dirty(QFPREG(rd));
1874 }
1875
1876 #ifdef TARGET_SPARC64
1877 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1878                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1879 {
1880     TCGv_i64 dst;
1881     TCGv_i32 src;
1882
1883     src = gen_load_fpr_F(dc, rs);
1884     dst = gen_dest_fpr_D(dc, rd);
1885
1886     gen(dst, cpu_env, src);
1887
1888     gen_store_fpr_D(dc, rd, dst);
1889 }
1890 #endif
1891
1892 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1893                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1894 {
1895     TCGv_i64 dst;
1896     TCGv_i32 src;
1897
1898     src = gen_load_fpr_F(dc, rs);
1899     dst = gen_dest_fpr_D(dc, rd);
1900
1901     gen(dst, cpu_env, src);
1902
1903     gen_store_fpr_D(dc, rd, dst);
1904 }
1905
1906 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1907                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1908 {
1909     TCGv_i32 dst;
1910     TCGv_i64 src;
1911
1912     src = gen_load_fpr_D(dc, rs);
1913     dst = gen_dest_fpr_F(dc);
1914
1915     gen(dst, cpu_env, src);
1916
1917     gen_store_fpr_F(dc, rd, dst);
1918 }
1919
1920 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1921                               void (*gen)(TCGv_i32, TCGv_ptr))
1922 {
1923     TCGv_i32 dst;
1924
1925     gen_op_load_fpr_QT1(QFPREG(rs));
1926     dst = gen_dest_fpr_F(dc);
1927
1928     gen(dst, cpu_env);
1929
1930     gen_store_fpr_F(dc, rd, dst);
1931 }
1932
1933 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1934                               void (*gen)(TCGv_i64, TCGv_ptr))
1935 {
1936     TCGv_i64 dst;
1937
1938     gen_op_load_fpr_QT1(QFPREG(rs));
1939     dst = gen_dest_fpr_D(dc, rd);
1940
1941     gen(dst, cpu_env);
1942
1943     gen_store_fpr_D(dc, rd, dst);
1944 }
1945
1946 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1947                                  void (*gen)(TCGv_ptr, TCGv_i32))
1948 {
1949     TCGv_i32 src;
1950
1951     src = gen_load_fpr_F(dc, rs);
1952
1953     gen(cpu_env, src);
1954
1955     gen_op_store_QT0_fpr(QFPREG(rd));
1956     gen_update_fprs_dirty(QFPREG(rd));
1957 }
1958
1959 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1960                                  void (*gen)(TCGv_ptr, TCGv_i64))
1961 {
1962     TCGv_i64 src;
1963
1964     src = gen_load_fpr_D(dc, rs);
1965
1966     gen(cpu_env, src);
1967
1968     gen_op_store_QT0_fpr(QFPREG(rd));
1969     gen_update_fprs_dirty(QFPREG(rd));
1970 }
1971
1972 /* asi moves */
1973 #ifdef TARGET_SPARC64
1974 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1975 {
1976     int asi;
1977     TCGv_i32 r_asi;
1978
1979     if (IS_IMM) {
1980         r_asi = tcg_temp_new_i32();
1981         tcg_gen_mov_i32(r_asi, cpu_asi);
1982     } else {
1983         asi = GET_FIELD(insn, 19, 26);
1984         r_asi = tcg_const_i32(asi);
1985     }
1986     return r_asi;
1987 }
1988
1989 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1990                               int sign)
1991 {
1992     TCGv_i32 r_asi, r_size, r_sign;
1993
1994     r_asi = gen_get_asi(insn, addr);
1995     r_size = tcg_const_i32(size);
1996     r_sign = tcg_const_i32(sign);
1997     gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1998     tcg_temp_free_i32(r_sign);
1999     tcg_temp_free_i32(r_size);
2000     tcg_temp_free_i32(r_asi);
2001 }
2002
2003 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2004 {
2005     TCGv_i32 r_asi, r_size;
2006
2007     r_asi = gen_get_asi(insn, addr);
2008     r_size = tcg_const_i32(size);
2009     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2010     tcg_temp_free_i32(r_size);
2011     tcg_temp_free_i32(r_asi);
2012 }
2013
2014 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2015 {
2016     TCGv_i32 r_asi, r_size, r_rd;
2017
2018     r_asi = gen_get_asi(insn, addr);
2019     r_size = tcg_const_i32(size);
2020     r_rd = tcg_const_i32(rd);
2021     gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2022     tcg_temp_free_i32(r_rd);
2023     tcg_temp_free_i32(r_size);
2024     tcg_temp_free_i32(r_asi);
2025 }
2026
2027 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2028 {
2029     TCGv_i32 r_asi, r_size, r_rd;
2030
2031     r_asi = gen_get_asi(insn, addr);
2032     r_size = tcg_const_i32(size);
2033     r_rd = tcg_const_i32(rd);
2034     gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2035     tcg_temp_free_i32(r_rd);
2036     tcg_temp_free_i32(r_size);
2037     tcg_temp_free_i32(r_asi);
2038 }
2039
2040 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2041 {
2042     TCGv_i32 r_asi, r_size, r_sign;
2043     TCGv_i64 t64 = tcg_temp_new_i64();
2044
2045     r_asi = gen_get_asi(insn, addr);
2046     r_size = tcg_const_i32(4);
2047     r_sign = tcg_const_i32(0);
2048     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2049     tcg_temp_free_i32(r_sign);
2050     gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2051     tcg_temp_free_i32(r_size);
2052     tcg_temp_free_i32(r_asi);
2053     tcg_gen_trunc_i64_tl(dst, t64);
2054     tcg_temp_free_i64(t64);
2055 }
2056
2057 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2058                                 int insn, int rd)
2059 {
2060     TCGv_i32 r_asi, r_rd;
2061
2062     r_asi = gen_get_asi(insn, addr);
2063     r_rd = tcg_const_i32(rd);
2064     gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2065     tcg_temp_free_i32(r_rd);
2066     tcg_temp_free_i32(r_asi);
2067 }
2068
2069 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2070                                 int insn, int rd)
2071 {
2072     TCGv_i32 r_asi, r_size;
2073     TCGv lo = gen_load_gpr(dc, rd + 1);
2074     TCGv_i64 t64 = tcg_temp_new_i64();
2075
2076     tcg_gen_concat_tl_i64(t64, lo, hi);
2077     r_asi = gen_get_asi(insn, addr);
2078     r_size = tcg_const_i32(8);
2079     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2080     tcg_temp_free_i32(r_size);
2081     tcg_temp_free_i32(r_asi);
2082     tcg_temp_free_i64(t64);
2083 }
2084
2085 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2086                                 TCGv val2, int insn, int rd)
2087 {
2088     TCGv val1 = gen_load_gpr(dc, rd);
2089     TCGv dst = gen_dest_gpr(dc, rd);
2090     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2091
2092     gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2093     tcg_temp_free_i32(r_asi);
2094     gen_store_gpr(dc, rd, dst);
2095 }
2096
2097 #elif !defined(CONFIG_USER_ONLY)
2098
2099 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2100                               int sign)
2101 {
2102     TCGv_i32 r_asi, r_size, r_sign;
2103     TCGv_i64 t64 = tcg_temp_new_i64();
2104
2105     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2106     r_size = tcg_const_i32(size);
2107     r_sign = tcg_const_i32(sign);
2108     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2109     tcg_temp_free_i32(r_sign);
2110     tcg_temp_free_i32(r_size);
2111     tcg_temp_free_i32(r_asi);
2112     tcg_gen_trunc_i64_tl(dst, t64);
2113     tcg_temp_free_i64(t64);
2114 }
2115
2116 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2117 {
2118     TCGv_i32 r_asi, r_size;
2119     TCGv_i64 t64 = tcg_temp_new_i64();
2120
2121     tcg_gen_extu_tl_i64(t64, src);
2122     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2123     r_size = tcg_const_i32(size);
2124     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2125     tcg_temp_free_i32(r_size);
2126     tcg_temp_free_i32(r_asi);
2127     tcg_temp_free_i64(t64);
2128 }
2129
2130 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2131 {
2132     TCGv_i32 r_asi, r_size, r_sign;
2133     TCGv_i64 r_val, t64;
2134
2135     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2136     r_size = tcg_const_i32(4);
2137     r_sign = tcg_const_i32(0);
2138     t64 = tcg_temp_new_i64();
2139     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2140     tcg_temp_free(r_sign);
2141     r_val = tcg_temp_new_i64();
2142     tcg_gen_extu_tl_i64(r_val, src);
2143     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2144     tcg_temp_free_i64(r_val);
2145     tcg_temp_free_i32(r_size);
2146     tcg_temp_free_i32(r_asi);
2147     tcg_gen_trunc_i64_tl(dst, t64);
2148     tcg_temp_free_i64(t64);
2149 }
2150
2151 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2152                                 int insn, int rd)
2153 {
2154     TCGv_i32 r_asi, r_size, r_sign;
2155     TCGv t;
2156     TCGv_i64 t64;
2157
2158     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2159     r_size = tcg_const_i32(8);
2160     r_sign = tcg_const_i32(0);
2161     t64 = tcg_temp_new_i64();
2162     gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2163     tcg_temp_free_i32(r_sign);
2164     tcg_temp_free_i32(r_size);
2165     tcg_temp_free_i32(r_asi);
2166
2167     t = gen_dest_gpr(dc, rd + 1);
2168     tcg_gen_trunc_i64_tl(t, t64);
2169     gen_store_gpr(dc, rd + 1, t);
2170
2171     tcg_gen_shri_i64(t64, t64, 32);
2172     tcg_gen_trunc_i64_tl(hi, t64);
2173     tcg_temp_free_i64(t64);
2174     gen_store_gpr(dc, rd, hi);
2175 }
2176
2177 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2178                                 int insn, int rd)
2179 {
2180     TCGv_i32 r_asi, r_size;
2181     TCGv lo = gen_load_gpr(dc, rd + 1);
2182     TCGv_i64 t64 = tcg_temp_new_i64();
2183
2184     tcg_gen_concat_tl_i64(t64, lo, hi);
2185     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2186     r_size = tcg_const_i32(8);
2187     gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2188     tcg_temp_free_i32(r_size);
2189     tcg_temp_free_i32(r_asi);
2190     tcg_temp_free_i64(t64);
2191 }
2192 #endif
2193
2194 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2195 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2196                                TCGv val2, int insn, int rd)
2197 {
2198     TCGv val1 = gen_load_gpr(dc, rd);
2199     TCGv dst = gen_dest_gpr(dc, rd);
2200 #ifdef TARGET_SPARC64
2201     TCGv_i32 r_asi = gen_get_asi(insn, addr);
2202 #else
2203     TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2204 #endif
2205
2206     gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2207     tcg_temp_free_i32(r_asi);
2208     gen_store_gpr(dc, rd, dst);
2209 }
2210
2211 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2212 {
2213     TCGv_i64 r_val;
2214     TCGv_i32 r_asi, r_size;
2215
2216     gen_ld_asi(dst, addr, insn, 1, 0);
2217
2218     r_val = tcg_const_i64(0xffULL);
2219     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2220     r_size = tcg_const_i32(1);
2221     gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2222     tcg_temp_free_i32(r_size);
2223     tcg_temp_free_i32(r_asi);
2224     tcg_temp_free_i64(r_val);
2225 }
2226 #endif
2227
2228 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2229 {
2230     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2231     return gen_load_gpr(dc, rs1);
2232 }
2233
2234 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2235 {
2236     if (IS_IMM) { /* immediate */
2237         target_long simm = GET_FIELDs(insn, 19, 31);
2238         TCGv t = get_temp_tl(dc);
2239         tcg_gen_movi_tl(t, simm);
2240         return t;
2241     } else {      /* register */
2242         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2243         return gen_load_gpr(dc, rs2);
2244     }
2245 }
2246
2247 #ifdef TARGET_SPARC64
2248 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2249 {
2250     TCGv_i32 c32, zero, dst, s1, s2;
2251
2252     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2253        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2254        the later.  */
2255     c32 = tcg_temp_new_i32();
2256     if (cmp->is_bool) {
2257         tcg_gen_trunc_i64_i32(c32, cmp->c1);
2258     } else {
2259         TCGv_i64 c64 = tcg_temp_new_i64();
2260         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2261         tcg_gen_trunc_i64_i32(c32, c64);
2262         tcg_temp_free_i64(c64);
2263     }
2264
2265     s1 = gen_load_fpr_F(dc, rs);
2266     s2 = gen_load_fpr_F(dc, rd);
2267     dst = gen_dest_fpr_F(dc);
2268     zero = tcg_const_i32(0);
2269
2270     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2271
2272     tcg_temp_free_i32(c32);
2273     tcg_temp_free_i32(zero);
2274     gen_store_fpr_F(dc, rd, dst);
2275 }
2276
2277 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2278 {
2279     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2280     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2281                         gen_load_fpr_D(dc, rs),
2282                         gen_load_fpr_D(dc, rd));
2283     gen_store_fpr_D(dc, rd, dst);
2284 }
2285
2286 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2287 {
2288     int qd = QFPREG(rd);
2289     int qs = QFPREG(rs);
2290
2291     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2292                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2293     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2294                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2295
2296     gen_update_fprs_dirty(qd);
2297 }
2298
2299 #ifndef CONFIG_USER_ONLY
2300 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2301 {
2302     TCGv_i32 r_tl = tcg_temp_new_i32();
2303
2304     /* load env->tl into r_tl */
2305     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2306
2307     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2308     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2309
2310     /* calculate offset to current trap state from env->ts, reuse r_tl */
2311     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2312     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2313
2314     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2315     {
2316         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2317         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2318         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2319         tcg_temp_free_ptr(r_tl_tmp);
2320     }
2321
2322     tcg_temp_free_i32(r_tl);
2323 }
2324 #endif
2325
2326 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2327                      int width, bool cc, bool left)
2328 {
2329     TCGv lo1, lo2, t1, t2;
2330     uint64_t amask, tabl, tabr;
2331     int shift, imask, omask;
2332
2333     if (cc) {
2334         tcg_gen_mov_tl(cpu_cc_src, s1);
2335         tcg_gen_mov_tl(cpu_cc_src2, s2);
2336         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2337         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2338         dc->cc_op = CC_OP_SUB;
2339     }
2340
2341     /* Theory of operation: there are two tables, left and right (not to
2342        be confused with the left and right versions of the opcode).  These
2343        are indexed by the low 3 bits of the inputs.  To make things "easy",
2344        these tables are loaded into two constants, TABL and TABR below.
2345        The operation index = (input & imask) << shift calculates the index
2346        into the constant, while val = (table >> index) & omask calculates
2347        the value we're looking for.  */
2348     switch (width) {
2349     case 8:
2350         imask = 0x7;
2351         shift = 3;
2352         omask = 0xff;
2353         if (left) {
2354             tabl = 0x80c0e0f0f8fcfeffULL;
2355             tabr = 0xff7f3f1f0f070301ULL;
2356         } else {
2357             tabl = 0x0103070f1f3f7fffULL;
2358             tabr = 0xfffefcf8f0e0c080ULL;
2359         }
2360         break;
2361     case 16:
2362         imask = 0x6;
2363         shift = 1;
2364         omask = 0xf;
2365         if (left) {
2366             tabl = 0x8cef;
2367             tabr = 0xf731;
2368         } else {
2369             tabl = 0x137f;
2370             tabr = 0xfec8;
2371         }
2372         break;
2373     case 32:
2374         imask = 0x4;
2375         shift = 0;
2376         omask = 0x3;
2377         if (left) {
2378             tabl = (2 << 2) | 3;
2379             tabr = (3 << 2) | 1;
2380         } else {
2381             tabl = (1 << 2) | 3;
2382             tabr = (3 << 2) | 2;
2383         }
2384         break;
2385     default:
2386         abort();
2387     }
2388
2389     lo1 = tcg_temp_new();
2390     lo2 = tcg_temp_new();
2391     tcg_gen_andi_tl(lo1, s1, imask);
2392     tcg_gen_andi_tl(lo2, s2, imask);
2393     tcg_gen_shli_tl(lo1, lo1, shift);
2394     tcg_gen_shli_tl(lo2, lo2, shift);
2395
2396     t1 = tcg_const_tl(tabl);
2397     t2 = tcg_const_tl(tabr);
2398     tcg_gen_shr_tl(lo1, t1, lo1);
2399     tcg_gen_shr_tl(lo2, t2, lo2);
2400     tcg_gen_andi_tl(dst, lo1, omask);
2401     tcg_gen_andi_tl(lo2, lo2, omask);
2402
2403     amask = -8;
2404     if (AM_CHECK(dc)) {
2405         amask &= 0xffffffffULL;
2406     }
2407     tcg_gen_andi_tl(s1, s1, amask);
2408     tcg_gen_andi_tl(s2, s2, amask);
2409
2410     /* We want to compute
2411         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2412        We've already done dst = lo1, so this reduces to
2413         dst &= (s1 == s2 ? -1 : lo2)
2414        Which we perform by
2415         lo2 |= -(s1 == s2)
2416         dst &= lo2
2417     */
2418     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2419     tcg_gen_neg_tl(t1, t1);
2420     tcg_gen_or_tl(lo2, lo2, t1);
2421     tcg_gen_and_tl(dst, dst, lo2);
2422
2423     tcg_temp_free(lo1);
2424     tcg_temp_free(lo2);
2425     tcg_temp_free(t1);
2426     tcg_temp_free(t2);
2427 }
2428
2429 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2430 {
2431     TCGv tmp = tcg_temp_new();
2432
2433     tcg_gen_add_tl(tmp, s1, s2);
2434     tcg_gen_andi_tl(dst, tmp, -8);
2435     if (left) {
2436         tcg_gen_neg_tl(tmp, tmp);
2437     }
2438     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2439
2440     tcg_temp_free(tmp);
2441 }
2442
2443 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2444 {
2445     TCGv t1, t2, shift;
2446
2447     t1 = tcg_temp_new();
2448     t2 = tcg_temp_new();
2449     shift = tcg_temp_new();
2450
2451     tcg_gen_andi_tl(shift, gsr, 7);
2452     tcg_gen_shli_tl(shift, shift, 3);
2453     tcg_gen_shl_tl(t1, s1, shift);
2454
2455     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2456        shift of (up to 63) followed by a constant shift of 1.  */
2457     tcg_gen_xori_tl(shift, shift, 63);
2458     tcg_gen_shr_tl(t2, s2, shift);
2459     tcg_gen_shri_tl(t2, t2, 1);
2460
2461     tcg_gen_or_tl(dst, t1, t2);
2462
2463     tcg_temp_free(t1);
2464     tcg_temp_free(t2);
2465     tcg_temp_free(shift);
2466 }
2467 #endif
2468
2469 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2470     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2471         goto illegal_insn;
2472 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2473     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2474         goto nfpu_insn;
2475
2476 /* before an instruction, dc->pc must be static */
2477 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2478 {
2479     unsigned int opc, rs1, rs2, rd;
2480     TCGv cpu_src1, cpu_src2;
2481     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2482     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2483     target_long simm;
2484
2485     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2486         tcg_gen_debug_insn_start(dc->pc);
2487     }
2488
2489     opc = GET_FIELD(insn, 0, 1);
2490     rd = GET_FIELD(insn, 2, 6);
2491
2492     switch (opc) {
2493     case 0:                     /* branches/sethi */
2494         {
2495             unsigned int xop = GET_FIELD(insn, 7, 9);
2496             int32_t target;
2497             switch (xop) {
2498 #ifdef TARGET_SPARC64
2499             case 0x1:           /* V9 BPcc */
2500                 {
2501                     int cc;
2502
2503                     target = GET_FIELD_SP(insn, 0, 18);
2504                     target = sign_extend(target, 19);
2505                     target <<= 2;
2506                     cc = GET_FIELD_SP(insn, 20, 21);
2507                     if (cc == 0)
2508                         do_branch(dc, target, insn, 0);
2509                     else if (cc == 2)
2510                         do_branch(dc, target, insn, 1);
2511                     else
2512                         goto illegal_insn;
2513                     goto jmp_insn;
2514                 }
2515             case 0x3:           /* V9 BPr */
2516                 {
2517                     target = GET_FIELD_SP(insn, 0, 13) |
2518                         (GET_FIELD_SP(insn, 20, 21) << 14);
2519                     target = sign_extend(target, 16);
2520                     target <<= 2;
2521                     cpu_src1 = get_src1(dc, insn);
2522                     do_branch_reg(dc, target, insn, cpu_src1);
2523                     goto jmp_insn;
2524                 }
2525             case 0x5:           /* V9 FBPcc */
2526                 {
2527                     int cc = GET_FIELD_SP(insn, 20, 21);
2528                     if (gen_trap_ifnofpu(dc)) {
2529                         goto jmp_insn;
2530                     }
2531                     target = GET_FIELD_SP(insn, 0, 18);
2532                     target = sign_extend(target, 19);
2533                     target <<= 2;
2534                     do_fbranch(dc, target, insn, cc);
2535                     goto jmp_insn;
2536                 }
2537 #else
2538             case 0x7:           /* CBN+x */
2539                 {
2540                     goto ncp_insn;
2541                 }
2542 #endif
2543             case 0x2:           /* BN+x */
2544                 {
2545                     target = GET_FIELD(insn, 10, 31);
2546                     target = sign_extend(target, 22);
2547                     target <<= 2;
2548                     do_branch(dc, target, insn, 0);
2549                     goto jmp_insn;
2550                 }
2551             case 0x6:           /* FBN+x */
2552                 {
2553                     if (gen_trap_ifnofpu(dc)) {
2554                         goto jmp_insn;
2555                     }
2556                     target = GET_FIELD(insn, 10, 31);
2557                     target = sign_extend(target, 22);
2558                     target <<= 2;
2559                     do_fbranch(dc, target, insn, 0);
2560                     goto jmp_insn;
2561                 }
2562             case 0x4:           /* SETHI */
2563                 /* Special-case %g0 because that's the canonical nop.  */
2564                 if (rd) {
2565                     uint32_t value = GET_FIELD(insn, 10, 31);
2566                     TCGv t = gen_dest_gpr(dc, rd);
2567                     tcg_gen_movi_tl(t, value << 10);
2568                     gen_store_gpr(dc, rd, t);
2569                 }
2570                 break;
2571             case 0x0:           /* UNIMPL */
2572             default:
2573                 goto illegal_insn;
2574             }
2575             break;
2576         }
2577         break;
2578     case 1:                     /*CALL*/
2579         {
2580             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2581             TCGv o7 = gen_dest_gpr(dc, 15);
2582
2583             tcg_gen_movi_tl(o7, dc->pc);
2584             gen_store_gpr(dc, 15, o7);
2585             target += dc->pc;
2586             gen_mov_pc_npc(dc);
2587 #ifdef TARGET_SPARC64
2588             if (unlikely(AM_CHECK(dc))) {
2589                 target &= 0xffffffffULL;
2590             }
2591 #endif
2592             dc->npc = target;
2593         }
2594         goto jmp_insn;
2595     case 2:                     /* FPU & Logical Operations */
2596         {
2597             unsigned int xop = GET_FIELD(insn, 7, 12);
2598             TCGv cpu_dst = get_temp_tl(dc);
2599             TCGv cpu_tmp0;
2600
2601             if (xop == 0x3a) {  /* generate trap */
2602                 int cond = GET_FIELD(insn, 3, 6);
2603                 TCGv_i32 trap;
2604                 TCGLabel *l1 = NULL;
2605                 int mask;
2606
2607                 if (cond == 0) {
2608                     /* Trap never.  */
2609                     break;
2610                 }
2611
2612                 save_state(dc);
2613
2614                 if (cond != 8) {
2615                     /* Conditional trap.  */
2616                     DisasCompare cmp;
2617 #ifdef TARGET_SPARC64
2618                     /* V9 icc/xcc */
2619                     int cc = GET_FIELD_SP(insn, 11, 12);
2620                     if (cc == 0) {
2621                         gen_compare(&cmp, 0, cond, dc);
2622                     } else if (cc == 2) {
2623                         gen_compare(&cmp, 1, cond, dc);
2624                     } else {
2625                         goto illegal_insn;
2626                     }
2627 #else
2628                     gen_compare(&cmp, 0, cond, dc);
2629 #endif
2630                     l1 = gen_new_label();
2631                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2632                                       cmp.c1, cmp.c2, l1);
2633                     free_compare(&cmp);
2634                 }
2635
2636                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2637                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2638
2639                 /* Don't use the normal temporaries, as they may well have
2640                    gone out of scope with the branch above.  While we're
2641                    doing that we might as well pre-truncate to 32-bit.  */
2642                 trap = tcg_temp_new_i32();
2643
2644                 rs1 = GET_FIELD_SP(insn, 14, 18);
2645                 if (IS_IMM) {
2646                     rs2 = GET_FIELD_SP(insn, 0, 6);
2647                     if (rs1 == 0) {
2648                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2649                         /* Signal that the trap value is fully constant.  */
2650                         mask = 0;
2651                     } else {
2652                         TCGv t1 = gen_load_gpr(dc, rs1);
2653                         tcg_gen_trunc_tl_i32(trap, t1);
2654                         tcg_gen_addi_i32(trap, trap, rs2);
2655                     }
2656                 } else {
2657                     TCGv t1, t2;
2658                     rs2 = GET_FIELD_SP(insn, 0, 4);
2659                     t1 = gen_load_gpr(dc, rs1);
2660                     t2 = gen_load_gpr(dc, rs2);
2661                     tcg_gen_add_tl(t1, t1, t2);
2662                     tcg_gen_trunc_tl_i32(trap, t1);
2663                 }
2664                 if (mask != 0) {
2665                     tcg_gen_andi_i32(trap, trap, mask);
2666                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
2667                 }
2668
2669                 gen_helper_raise_exception(cpu_env, trap);
2670                 tcg_temp_free_i32(trap);
2671
2672                 if (cond == 8) {
2673                     /* An unconditional trap ends the TB.  */
2674                     dc->is_br = 1;
2675                     goto jmp_insn;
2676                 } else {
2677                     /* A conditional trap falls through to the next insn.  */
2678                     gen_set_label(l1);
2679                     break;
2680                 }
2681             } else if (xop == 0x28) {
2682                 rs1 = GET_FIELD(insn, 13, 17);
2683                 switch(rs1) {
2684                 case 0: /* rdy */
2685 #ifndef TARGET_SPARC64
2686                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2687                                        manual, rdy on the microSPARC
2688                                        II */
2689                 case 0x0f:          /* stbar in the SPARCv8 manual,
2690                                        rdy on the microSPARC II */
2691                 case 0x10 ... 0x1f: /* implementation-dependent in the
2692                                        SPARCv8 manual, rdy on the
2693                                        microSPARC II */
2694                     /* Read Asr17 */
2695                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2696                         TCGv t = gen_dest_gpr(dc, rd);
2697                         /* Read Asr17 for a Leon3 monoprocessor */
2698                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2699                         gen_store_gpr(dc, rd, t);
2700                         break;
2701                     }
2702 #endif
2703                     gen_store_gpr(dc, rd, cpu_y);
2704                     break;
2705 #ifdef TARGET_SPARC64
2706                 case 0x2: /* V9 rdccr */
2707                     update_psr(dc);
2708                     gen_helper_rdccr(cpu_dst, cpu_env);
2709                     gen_store_gpr(dc, rd, cpu_dst);
2710                     break;
2711                 case 0x3: /* V9 rdasi */
2712                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2713                     gen_store_gpr(dc, rd, cpu_dst);
2714                     break;
2715                 case 0x4: /* V9 rdtick */
2716                     {
2717                         TCGv_ptr r_tickptr;
2718
2719                         r_tickptr = tcg_temp_new_ptr();
2720                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2721                                        offsetof(CPUSPARCState, tick));
2722                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2723                         tcg_temp_free_ptr(r_tickptr);
2724                         gen_store_gpr(dc, rd, cpu_dst);
2725                     }
2726                     break;
2727                 case 0x5: /* V9 rdpc */
2728                     {
2729                         TCGv t = gen_dest_gpr(dc, rd);
2730                         if (unlikely(AM_CHECK(dc))) {
2731                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2732                         } else {
2733                             tcg_gen_movi_tl(t, dc->pc);
2734                         }
2735                         gen_store_gpr(dc, rd, t);
2736                     }
2737                     break;
2738                 case 0x6: /* V9 rdfprs */
2739                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2740                     gen_store_gpr(dc, rd, cpu_dst);
2741                     break;
2742                 case 0xf: /* V9 membar */
2743                     break; /* no effect */
2744                 case 0x13: /* Graphics Status */
2745                     if (gen_trap_ifnofpu(dc)) {
2746                         goto jmp_insn;
2747                     }
2748                     gen_store_gpr(dc, rd, cpu_gsr);
2749                     break;
2750                 case 0x16: /* Softint */
2751                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2752                     gen_store_gpr(dc, rd, cpu_dst);
2753                     break;
2754                 case 0x17: /* Tick compare */
2755                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
2756                     break;
2757                 case 0x18: /* System tick */
2758                     {
2759                         TCGv_ptr r_tickptr;
2760
2761                         r_tickptr = tcg_temp_new_ptr();
2762                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2763                                        offsetof(CPUSPARCState, stick));
2764                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2765                         tcg_temp_free_ptr(r_tickptr);
2766                         gen_store_gpr(dc, rd, cpu_dst);
2767                     }
2768                     break;
2769                 case 0x19: /* System tick compare */
2770                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
2771                     break;
2772                 case 0x10: /* Performance Control */
2773                 case 0x11: /* Performance Instrumentation Counter */
2774                 case 0x12: /* Dispatch Control */
2775                 case 0x14: /* Softint set, WO */
2776                 case 0x15: /* Softint clear, WO */
2777 #endif
2778                 default:
2779                     goto illegal_insn;
2780                 }
2781 #if !defined(CONFIG_USER_ONLY)
2782             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2783 #ifndef TARGET_SPARC64
2784                 if (!supervisor(dc)) {
2785                     goto priv_insn;
2786                 }
2787                 update_psr(dc);
2788                 gen_helper_rdpsr(cpu_dst, cpu_env);
2789 #else
2790                 CHECK_IU_FEATURE(dc, HYPV);
2791                 if (!hypervisor(dc))
2792                     goto priv_insn;
2793                 rs1 = GET_FIELD(insn, 13, 17);
2794                 switch (rs1) {
2795                 case 0: // hpstate
2796                     // gen_op_rdhpstate();
2797                     break;
2798                 case 1: // htstate
2799                     // gen_op_rdhtstate();
2800                     break;
2801                 case 3: // hintp
2802                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2803                     break;
2804                 case 5: // htba
2805                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2806                     break;
2807                 case 6: // hver
2808                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2809                     break;
2810                 case 31: // hstick_cmpr
2811                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2812                     break;
2813                 default:
2814                     goto illegal_insn;
2815                 }
2816 #endif
2817                 gen_store_gpr(dc, rd, cpu_dst);
2818                 break;
2819             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2820                 if (!supervisor(dc)) {
2821                     goto priv_insn;
2822                 }
2823                 cpu_tmp0 = get_temp_tl(dc);
2824 #ifdef TARGET_SPARC64
2825                 rs1 = GET_FIELD(insn, 13, 17);
2826                 switch (rs1) {
2827                 case 0: // tpc
2828                     {
2829                         TCGv_ptr r_tsptr;
2830
2831                         r_tsptr = tcg_temp_new_ptr();
2832                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2833                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2834                                       offsetof(trap_state, tpc));
2835                         tcg_temp_free_ptr(r_tsptr);
2836                     }
2837                     break;
2838                 case 1: // tnpc
2839                     {
2840                         TCGv_ptr r_tsptr;
2841
2842                         r_tsptr = tcg_temp_new_ptr();
2843                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2844                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2845                                       offsetof(trap_state, tnpc));
2846                         tcg_temp_free_ptr(r_tsptr);
2847                     }
2848                     break;
2849                 case 2: // tstate
2850                     {
2851                         TCGv_ptr r_tsptr;
2852
2853                         r_tsptr = tcg_temp_new_ptr();
2854                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856                                       offsetof(trap_state, tstate));
2857                         tcg_temp_free_ptr(r_tsptr);
2858                     }
2859                     break;
2860                 case 3: // tt
2861                     {
2862                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2863
2864                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2865                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2866                                          offsetof(trap_state, tt));
2867                         tcg_temp_free_ptr(r_tsptr);
2868                     }
2869                     break;
2870                 case 4: // tick
2871                     {
2872                         TCGv_ptr r_tickptr;
2873
2874                         r_tickptr = tcg_temp_new_ptr();
2875                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2876                                        offsetof(CPUSPARCState, tick));
2877                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2878                         tcg_temp_free_ptr(r_tickptr);
2879                     }
2880                     break;
2881                 case 5: // tba
2882                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2883                     break;
2884                 case 6: // pstate
2885                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2886                                      offsetof(CPUSPARCState, pstate));
2887                     break;
2888                 case 7: // tl
2889                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2890                                      offsetof(CPUSPARCState, tl));
2891                     break;
2892                 case 8: // pil
2893                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2894                                      offsetof(CPUSPARCState, psrpil));
2895                     break;
2896                 case 9: // cwp
2897                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
2898                     break;
2899                 case 10: // cansave
2900                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2901                                      offsetof(CPUSPARCState, cansave));
2902                     break;
2903                 case 11: // canrestore
2904                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2905                                      offsetof(CPUSPARCState, canrestore));
2906                     break;
2907                 case 12: // cleanwin
2908                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2909                                      offsetof(CPUSPARCState, cleanwin));
2910                     break;
2911                 case 13: // otherwin
2912                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2913                                      offsetof(CPUSPARCState, otherwin));
2914                     break;
2915                 case 14: // wstate
2916                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2917                                      offsetof(CPUSPARCState, wstate));
2918                     break;
2919                 case 16: // UA2005 gl
2920                     CHECK_IU_FEATURE(dc, GL);
2921                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2922                                      offsetof(CPUSPARCState, gl));
2923                     break;
2924                 case 26: // UA2005 strand status
2925                     CHECK_IU_FEATURE(dc, HYPV);
2926                     if (!hypervisor(dc))
2927                         goto priv_insn;
2928                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2929                     break;
2930                 case 31: // ver
2931                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2932                     break;
2933                 case 15: // fq
2934                 default:
2935                     goto illegal_insn;
2936                 }
2937 #else
2938                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2939 #endif
2940                 gen_store_gpr(dc, rd, cpu_tmp0);
2941                 break;
2942             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2943 #ifdef TARGET_SPARC64
2944                 save_state(dc);
2945                 gen_helper_flushw(cpu_env);
2946 #else
2947                 if (!supervisor(dc))
2948                     goto priv_insn;
2949                 gen_store_gpr(dc, rd, cpu_tbr);
2950 #endif
2951                 break;
2952 #endif
2953             } else if (xop == 0x34) {   /* FPU Operations */
2954                 if (gen_trap_ifnofpu(dc)) {
2955                     goto jmp_insn;
2956                 }
2957                 gen_op_clear_ieee_excp_and_FTT();
2958                 rs1 = GET_FIELD(insn, 13, 17);
2959                 rs2 = GET_FIELD(insn, 27, 31);
2960                 xop = GET_FIELD(insn, 18, 26);
2961                 save_state(dc);
2962                 switch (xop) {
2963                 case 0x1: /* fmovs */
2964                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2965                     gen_store_fpr_F(dc, rd, cpu_src1_32);
2966                     break;
2967                 case 0x5: /* fnegs */
2968                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2969                     break;
2970                 case 0x9: /* fabss */
2971                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2972                     break;
2973                 case 0x29: /* fsqrts */
2974                     CHECK_FPU_FEATURE(dc, FSQRT);
2975                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2976                     break;
2977                 case 0x2a: /* fsqrtd */
2978                     CHECK_FPU_FEATURE(dc, FSQRT);
2979                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2980                     break;
2981                 case 0x2b: /* fsqrtq */
2982                     CHECK_FPU_FEATURE(dc, FLOAT128);
2983                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2984                     break;
2985                 case 0x41: /* fadds */
2986                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2987                     break;
2988                 case 0x42: /* faddd */
2989                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2990                     break;
2991                 case 0x43: /* faddq */
2992                     CHECK_FPU_FEATURE(dc, FLOAT128);
2993                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2994                     break;
2995                 case 0x45: /* fsubs */
2996                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2997                     break;
2998                 case 0x46: /* fsubd */
2999                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3000                     break;
3001                 case 0x47: /* fsubq */
3002                     CHECK_FPU_FEATURE(dc, FLOAT128);
3003                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3004                     break;
3005                 case 0x49: /* fmuls */
3006                     CHECK_FPU_FEATURE(dc, FMUL);
3007                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3008                     break;
3009                 case 0x4a: /* fmuld */
3010                     CHECK_FPU_FEATURE(dc, FMUL);
3011                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3012                     break;
3013                 case 0x4b: /* fmulq */
3014                     CHECK_FPU_FEATURE(dc, FLOAT128);
3015                     CHECK_FPU_FEATURE(dc, FMUL);
3016                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3017                     break;
3018                 case 0x4d: /* fdivs */
3019                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3020                     break;
3021                 case 0x4e: /* fdivd */
3022                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3023                     break;
3024                 case 0x4f: /* fdivq */
3025                     CHECK_FPU_FEATURE(dc, FLOAT128);
3026                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3027                     break;
3028                 case 0x69: /* fsmuld */
3029                     CHECK_FPU_FEATURE(dc, FSMULD);
3030                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3031                     break;
3032                 case 0x6e: /* fdmulq */
3033                     CHECK_FPU_FEATURE(dc, FLOAT128);
3034                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3035                     break;
3036                 case 0xc4: /* fitos */
3037                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3038                     break;
3039                 case 0xc6: /* fdtos */
3040                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3041                     break;
3042                 case 0xc7: /* fqtos */
3043                     CHECK_FPU_FEATURE(dc, FLOAT128);
3044                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3045                     break;
3046                 case 0xc8: /* fitod */
3047                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3048                     break;
3049                 case 0xc9: /* fstod */
3050                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3051                     break;
3052                 case 0xcb: /* fqtod */
3053                     CHECK_FPU_FEATURE(dc, FLOAT128);
3054                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3055                     break;
3056                 case 0xcc: /* fitoq */
3057                     CHECK_FPU_FEATURE(dc, FLOAT128);
3058                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3059                     break;
3060                 case 0xcd: /* fstoq */
3061                     CHECK_FPU_FEATURE(dc, FLOAT128);
3062                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3063                     break;
3064                 case 0xce: /* fdtoq */
3065                     CHECK_FPU_FEATURE(dc, FLOAT128);
3066                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3067                     break;
3068                 case 0xd1: /* fstoi */
3069                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3070                     break;
3071                 case 0xd2: /* fdtoi */
3072                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3073                     break;
3074                 case 0xd3: /* fqtoi */
3075                     CHECK_FPU_FEATURE(dc, FLOAT128);
3076                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3077                     break;
3078 #ifdef TARGET_SPARC64
3079                 case 0x2: /* V9 fmovd */
3080                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3081                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3082                     break;
3083                 case 0x3: /* V9 fmovq */
3084                     CHECK_FPU_FEATURE(dc, FLOAT128);
3085                     gen_move_Q(rd, rs2);
3086                     break;
3087                 case 0x6: /* V9 fnegd */
3088                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3089                     break;
3090                 case 0x7: /* V9 fnegq */
3091                     CHECK_FPU_FEATURE(dc, FLOAT128);
3092                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3093                     break;
3094                 case 0xa: /* V9 fabsd */
3095                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3096                     break;
3097                 case 0xb: /* V9 fabsq */
3098                     CHECK_FPU_FEATURE(dc, FLOAT128);
3099                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3100                     break;
3101                 case 0x81: /* V9 fstox */
3102                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3103                     break;
3104                 case 0x82: /* V9 fdtox */
3105                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3106                     break;
3107                 case 0x83: /* V9 fqtox */
3108                     CHECK_FPU_FEATURE(dc, FLOAT128);
3109                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3110                     break;
3111                 case 0x84: /* V9 fxtos */
3112                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3113                     break;
3114                 case 0x88: /* V9 fxtod */
3115                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3116                     break;
3117                 case 0x8c: /* V9 fxtoq */
3118                     CHECK_FPU_FEATURE(dc, FLOAT128);
3119                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3120                     break;
3121 #endif
3122                 default:
3123                     goto illegal_insn;
3124                 }
3125             } else if (xop == 0x35) {   /* FPU Operations */
3126 #ifdef TARGET_SPARC64
3127                 int cond;
3128 #endif
3129                 if (gen_trap_ifnofpu(dc)) {
3130                     goto jmp_insn;
3131                 }
3132                 gen_op_clear_ieee_excp_and_FTT();
3133                 rs1 = GET_FIELD(insn, 13, 17);
3134                 rs2 = GET_FIELD(insn, 27, 31);
3135                 xop = GET_FIELD(insn, 18, 26);
3136                 save_state(dc);
3137
3138 #ifdef TARGET_SPARC64
3139 #define FMOVR(sz)                                                  \
3140                 do {                                               \
3141                     DisasCompare cmp;                              \
3142                     cond = GET_FIELD_SP(insn, 10, 12);             \
3143                     cpu_src1 = get_src1(dc, insn);                 \
3144                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3145                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3146                     free_compare(&cmp);                            \
3147                 } while (0)
3148
3149                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3150                     FMOVR(s);
3151                     break;
3152                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3153                     FMOVR(d);
3154                     break;
3155                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3156                     CHECK_FPU_FEATURE(dc, FLOAT128);
3157                     FMOVR(q);
3158                     break;
3159                 }
3160 #undef FMOVR
3161 #endif
3162                 switch (xop) {
3163 #ifdef TARGET_SPARC64
3164 #define FMOVCC(fcc, sz)                                                 \
3165                     do {                                                \
3166                         DisasCompare cmp;                               \
3167                         cond = GET_FIELD_SP(insn, 14, 17);              \
3168                         gen_fcompare(&cmp, fcc, cond);                  \
3169                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3170                         free_compare(&cmp);                             \
3171                     } while (0)
3172
3173                     case 0x001: /* V9 fmovscc %fcc0 */
3174                         FMOVCC(0, s);
3175                         break;
3176                     case 0x002: /* V9 fmovdcc %fcc0 */
3177                         FMOVCC(0, d);
3178                         break;
3179                     case 0x003: /* V9 fmovqcc %fcc0 */
3180                         CHECK_FPU_FEATURE(dc, FLOAT128);
3181                         FMOVCC(0, q);
3182                         break;
3183                     case 0x041: /* V9 fmovscc %fcc1 */
3184                         FMOVCC(1, s);
3185                         break;
3186                     case 0x042: /* V9 fmovdcc %fcc1 */
3187                         FMOVCC(1, d);
3188                         break;
3189                     case 0x043: /* V9 fmovqcc %fcc1 */
3190                         CHECK_FPU_FEATURE(dc, FLOAT128);
3191                         FMOVCC(1, q);
3192                         break;
3193                     case 0x081: /* V9 fmovscc %fcc2 */
3194                         FMOVCC(2, s);
3195                         break;
3196                     case 0x082: /* V9 fmovdcc %fcc2 */
3197                         FMOVCC(2, d);
3198                         break;
3199                     case 0x083: /* V9 fmovqcc %fcc2 */
3200                         CHECK_FPU_FEATURE(dc, FLOAT128);
3201                         FMOVCC(2, q);
3202                         break;
3203                     case 0x0c1: /* V9 fmovscc %fcc3 */
3204                         FMOVCC(3, s);
3205                         break;
3206                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3207                         FMOVCC(3, d);
3208                         break;
3209                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3210                         CHECK_FPU_FEATURE(dc, FLOAT128);
3211                         FMOVCC(3, q);
3212                         break;
3213 #undef FMOVCC
3214 #define FMOVCC(xcc, sz)                                                 \
3215                     do {                                                \
3216                         DisasCompare cmp;                               \
3217                         cond = GET_FIELD_SP(insn, 14, 17);              \
3218                         gen_compare(&cmp, xcc, cond, dc);               \
3219                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3220                         free_compare(&cmp);                             \
3221                     } while (0)
3222
3223                     case 0x101: /* V9 fmovscc %icc */
3224                         FMOVCC(0, s);
3225                         break;
3226                     case 0x102: /* V9 fmovdcc %icc */
3227                         FMOVCC(0, d);
3228                         break;
3229                     case 0x103: /* V9 fmovqcc %icc */
3230                         CHECK_FPU_FEATURE(dc, FLOAT128);
3231                         FMOVCC(0, q);
3232                         break;
3233                     case 0x181: /* V9 fmovscc %xcc */
3234                         FMOVCC(1, s);
3235                         break;
3236                     case 0x182: /* V9 fmovdcc %xcc */
3237                         FMOVCC(1, d);
3238                         break;
3239                     case 0x183: /* V9 fmovqcc %xcc */
3240                         CHECK_FPU_FEATURE(dc, FLOAT128);
3241                         FMOVCC(1, q);
3242                         break;
3243 #undef FMOVCC
3244 #endif
3245                     case 0x51: /* fcmps, V9 %fcc */
3246                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3247                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3248                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3249                         break;
3250                     case 0x52: /* fcmpd, V9 %fcc */
3251                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3252                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3253                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3254                         break;
3255                     case 0x53: /* fcmpq, V9 %fcc */
3256                         CHECK_FPU_FEATURE(dc, FLOAT128);
3257                         gen_op_load_fpr_QT0(QFPREG(rs1));
3258                         gen_op_load_fpr_QT1(QFPREG(rs2));
3259                         gen_op_fcmpq(rd & 3);
3260                         break;
3261                     case 0x55: /* fcmpes, V9 %fcc */
3262                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3263                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3264                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3265                         break;
3266                     case 0x56: /* fcmped, V9 %fcc */
3267                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3268                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3269                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3270                         break;
3271                     case 0x57: /* fcmpeq, V9 %fcc */
3272                         CHECK_FPU_FEATURE(dc, FLOAT128);
3273                         gen_op_load_fpr_QT0(QFPREG(rs1));
3274                         gen_op_load_fpr_QT1(QFPREG(rs2));
3275                         gen_op_fcmpeq(rd & 3);
3276                         break;
3277                     default:
3278                         goto illegal_insn;
3279                 }
3280             } else if (xop == 0x2) {
3281                 TCGv dst = gen_dest_gpr(dc, rd);
3282                 rs1 = GET_FIELD(insn, 13, 17);
3283                 if (rs1 == 0) {
3284                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3285                     if (IS_IMM) {       /* immediate */
3286                         simm = GET_FIELDs(insn, 19, 31);
3287                         tcg_gen_movi_tl(dst, simm);
3288                         gen_store_gpr(dc, rd, dst);
3289                     } else {            /* register */
3290                         rs2 = GET_FIELD(insn, 27, 31);
3291                         if (rs2 == 0) {
3292                             tcg_gen_movi_tl(dst, 0);
3293                             gen_store_gpr(dc, rd, dst);
3294                         } else {
3295                             cpu_src2 = gen_load_gpr(dc, rs2);
3296                             gen_store_gpr(dc, rd, cpu_src2);
3297                         }
3298                     }
3299                 } else {
3300                     cpu_src1 = get_src1(dc, insn);
3301                     if (IS_IMM) {       /* immediate */
3302                         simm = GET_FIELDs(insn, 19, 31);
3303                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3304                         gen_store_gpr(dc, rd, dst);
3305                     } else {            /* register */
3306                         rs2 = GET_FIELD(insn, 27, 31);
3307                         if (rs2 == 0) {
3308                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3309                             gen_store_gpr(dc, rd, cpu_src1);
3310                         } else {
3311                             cpu_src2 = gen_load_gpr(dc, rs2);
3312                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3313                             gen_store_gpr(dc, rd, dst);
3314                         }
3315                     }
3316                 }
3317 #ifdef TARGET_SPARC64
3318             } else if (xop == 0x25) { /* sll, V9 sllx */
3319                 cpu_src1 = get_src1(dc, insn);
3320                 if (IS_IMM) {   /* immediate */
3321                     simm = GET_FIELDs(insn, 20, 31);
3322                     if (insn & (1 << 12)) {
3323                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3324                     } else {
3325                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3326                     }
3327                 } else {                /* register */
3328                     rs2 = GET_FIELD(insn, 27, 31);
3329                     cpu_src2 = gen_load_gpr(dc, rs2);
3330                     cpu_tmp0 = get_temp_tl(dc);
3331                     if (insn & (1 << 12)) {
3332                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3333                     } else {
3334                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3335                     }
3336                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3337                 }
3338                 gen_store_gpr(dc, rd, cpu_dst);
3339             } else if (xop == 0x26) { /* srl, V9 srlx */
3340                 cpu_src1 = get_src1(dc, insn);
3341                 if (IS_IMM) {   /* immediate */
3342                     simm = GET_FIELDs(insn, 20, 31);
3343                     if (insn & (1 << 12)) {
3344                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3345                     } else {
3346                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3347                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3348                     }
3349                 } else {                /* register */
3350                     rs2 = GET_FIELD(insn, 27, 31);
3351                     cpu_src2 = gen_load_gpr(dc, rs2);
3352                     cpu_tmp0 = get_temp_tl(dc);
3353                     if (insn & (1 << 12)) {
3354                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3355                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3356                     } else {
3357                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3358                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3359                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3360                     }
3361                 }
3362                 gen_store_gpr(dc, rd, cpu_dst);
3363             } else if (xop == 0x27) { /* sra, V9 srax */
3364                 cpu_src1 = get_src1(dc, insn);
3365                 if (IS_IMM) {   /* immediate */
3366                     simm = GET_FIELDs(insn, 20, 31);
3367                     if (insn & (1 << 12)) {
3368                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3369                     } else {
3370                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3371                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3372                     }
3373                 } else {                /* register */
3374                     rs2 = GET_FIELD(insn, 27, 31);
3375                     cpu_src2 = gen_load_gpr(dc, rs2);
3376                     cpu_tmp0 = get_temp_tl(dc);
3377                     if (insn & (1 << 12)) {
3378                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3379                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3380                     } else {
3381                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3382                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3383                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3384                     }
3385                 }
3386                 gen_store_gpr(dc, rd, cpu_dst);
3387 #endif
3388             } else if (xop < 0x36) {
3389                 if (xop < 0x20) {
3390                     cpu_src1 = get_src1(dc, insn);
3391                     cpu_src2 = get_src2(dc, insn);
3392                     switch (xop & ~0x10) {
3393                     case 0x0: /* add */
3394                         if (xop & 0x10) {
3395                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3396                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3397                             dc->cc_op = CC_OP_ADD;
3398                         } else {
3399                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3400                         }
3401                         break;
3402                     case 0x1: /* and */
3403                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3404                         if (xop & 0x10) {
3405                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3406                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3407                             dc->cc_op = CC_OP_LOGIC;
3408                         }
3409                         break;
3410                     case 0x2: /* or */
3411                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3412                         if (xop & 0x10) {
3413                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3414                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3415                             dc->cc_op = CC_OP_LOGIC;
3416                         }
3417                         break;
3418                     case 0x3: /* xor */
3419                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3420                         if (xop & 0x10) {
3421                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3422                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3423                             dc->cc_op = CC_OP_LOGIC;
3424                         }
3425                         break;
3426                     case 0x4: /* sub */
3427                         if (xop & 0x10) {
3428                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3429                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3430                             dc->cc_op = CC_OP_SUB;
3431                         } else {
3432                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3433                         }
3434                         break;
3435                     case 0x5: /* andn */
3436                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3437                         if (xop & 0x10) {
3438                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3439                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3440                             dc->cc_op = CC_OP_LOGIC;
3441                         }
3442                         break;
3443                     case 0x6: /* orn */
3444                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3445                         if (xop & 0x10) {
3446                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3447                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3448                             dc->cc_op = CC_OP_LOGIC;
3449                         }
3450                         break;
3451                     case 0x7: /* xorn */
3452                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3453                         if (xop & 0x10) {
3454                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3455                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3456                             dc->cc_op = CC_OP_LOGIC;
3457                         }
3458                         break;
3459                     case 0x8: /* addx, V9 addc */
3460                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3461                                         (xop & 0x10));
3462                         break;
3463 #ifdef TARGET_SPARC64
3464                     case 0x9: /* V9 mulx */
3465                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3466                         break;
3467 #endif
3468                     case 0xa: /* umul */
3469                         CHECK_IU_FEATURE(dc, MUL);
3470                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3471                         if (xop & 0x10) {
3472                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3473                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3474                             dc->cc_op = CC_OP_LOGIC;
3475                         }
3476                         break;
3477                     case 0xb: /* smul */
3478                         CHECK_IU_FEATURE(dc, MUL);
3479                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3480                         if (xop & 0x10) {
3481                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3482                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3483                             dc->cc_op = CC_OP_LOGIC;
3484                         }
3485                         break;
3486                     case 0xc: /* subx, V9 subc */
3487                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3488                                         (xop & 0x10));
3489                         break;
3490 #ifdef TARGET_SPARC64
3491                     case 0xd: /* V9 udivx */
3492                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3493                         break;
3494 #endif
3495                     case 0xe: /* udiv */
3496                         CHECK_IU_FEATURE(dc, DIV);
3497                         if (xop & 0x10) {
3498                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3499                                                cpu_src2);
3500                             dc->cc_op = CC_OP_DIV;
3501                         } else {
3502                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3503                                             cpu_src2);
3504                         }
3505                         break;
3506                     case 0xf: /* sdiv */
3507                         CHECK_IU_FEATURE(dc, DIV);
3508                         if (xop & 0x10) {
3509                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3510                                                cpu_src2);
3511                             dc->cc_op = CC_OP_DIV;
3512                         } else {
3513                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3514                                             cpu_src2);
3515                         }
3516                         break;
3517                     default:
3518                         goto illegal_insn;
3519                     }
3520                     gen_store_gpr(dc, rd, cpu_dst);
3521                 } else {
3522                     cpu_src1 = get_src1(dc, insn);
3523                     cpu_src2 = get_src2(dc, insn);
3524                     switch (xop) {
3525                     case 0x20: /* taddcc */
3526                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3527                         gen_store_gpr(dc, rd, cpu_dst);
3528                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3529                         dc->cc_op = CC_OP_TADD;
3530                         break;
3531                     case 0x21: /* tsubcc */
3532                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3533                         gen_store_gpr(dc, rd, cpu_dst);
3534                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3535                         dc->cc_op = CC_OP_TSUB;
3536                         break;
3537                     case 0x22: /* taddcctv */
3538                         gen_helper_taddcctv(cpu_dst, cpu_env,
3539                                             cpu_src1, cpu_src2);
3540                         gen_store_gpr(dc, rd, cpu_dst);
3541                         dc->cc_op = CC_OP_TADDTV;
3542                         break;
3543                     case 0x23: /* tsubcctv */
3544                         gen_helper_tsubcctv(cpu_dst, cpu_env,
3545                                             cpu_src1, cpu_src2);
3546                         gen_store_gpr(dc, rd, cpu_dst);
3547                         dc->cc_op = CC_OP_TSUBTV;
3548                         break;
3549                     case 0x24: /* mulscc */
3550                         update_psr(dc);
3551                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3552                         gen_store_gpr(dc, rd, cpu_dst);
3553                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3554                         dc->cc_op = CC_OP_ADD;
3555                         break;
3556 #ifndef TARGET_SPARC64
3557                     case 0x25:  /* sll */
3558                         if (IS_IMM) { /* immediate */
3559                             simm = GET_FIELDs(insn, 20, 31);
3560                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3561                         } else { /* register */
3562                             cpu_tmp0 = get_temp_tl(dc);
3563                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3564                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3565                         }
3566                         gen_store_gpr(dc, rd, cpu_dst);
3567                         break;
3568                     case 0x26:  /* srl */
3569                         if (IS_IMM) { /* immediate */
3570                             simm = GET_FIELDs(insn, 20, 31);
3571                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3572                         } else { /* register */
3573                             cpu_tmp0 = get_temp_tl(dc);
3574                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3575                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3576                         }
3577                         gen_store_gpr(dc, rd, cpu_dst);
3578                         break;
3579                     case 0x27:  /* sra */
3580                         if (IS_IMM) { /* immediate */
3581                             simm = GET_FIELDs(insn, 20, 31);
3582                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3583                         } else { /* register */
3584                             cpu_tmp0 = get_temp_tl(dc);
3585                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3586                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3587                         }
3588                         gen_store_gpr(dc, rd, cpu_dst);
3589                         break;
3590 #endif
3591                     case 0x30:
3592                         {
3593                             cpu_tmp0 = get_temp_tl(dc);
3594                             switch(rd) {
3595                             case 0: /* wry */
3596                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3597                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3598                                 break;
3599 #ifndef TARGET_SPARC64
3600                             case 0x01 ... 0x0f: /* undefined in the
3601                                                    SPARCv8 manual, nop
3602                                                    on the microSPARC
3603                                                    II */
3604                             case 0x10 ... 0x1f: /* implementation-dependent
3605                                                    in the SPARCv8
3606                                                    manual, nop on the
3607                                                    microSPARC II */
3608                                 if ((rd == 0x13) && (dc->def->features &
3609                                                      CPU_FEATURE_POWERDOWN)) {
3610                                     /* LEON3 power-down */
3611                                     save_state(dc);
3612                                     gen_helper_power_down(cpu_env);
3613                                 }
3614                                 break;
3615 #else
3616                             case 0x2: /* V9 wrccr */
3617                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3618                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
3619                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3620                                 dc->cc_op = CC_OP_FLAGS;
3621                                 break;
3622                             case 0x3: /* V9 wrasi */
3623                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3624                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3625                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3626                                 break;
3627                             case 0x6: /* V9 wrfprs */
3628                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3629                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3630                                 save_state(dc);
3631                                 gen_op_next_insn();
3632                                 tcg_gen_exit_tb(0);
3633                                 dc->is_br = 1;
3634                                 break;
3635                             case 0xf: /* V9 sir, nop if user */
3636 #if !defined(CONFIG_USER_ONLY)
3637                                 if (supervisor(dc)) {
3638                                     ; // XXX
3639                                 }
3640 #endif
3641                                 break;
3642                             case 0x13: /* Graphics Status */
3643                                 if (gen_trap_ifnofpu(dc)) {
3644                                     goto jmp_insn;
3645                                 }
3646                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3647                                 break;
3648                             case 0x14: /* Softint set */
3649                                 if (!supervisor(dc))
3650                                     goto illegal_insn;
3651                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3652                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
3653                                 break;
3654                             case 0x15: /* Softint clear */
3655                                 if (!supervisor(dc))
3656                                     goto illegal_insn;
3657                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3658                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3659                                 break;
3660                             case 0x16: /* Softint write */
3661                                 if (!supervisor(dc))
3662                                     goto illegal_insn;
3663                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3664                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
3665                                 break;
3666                             case 0x17: /* Tick compare */
3667 #if !defined(CONFIG_USER_ONLY)
3668                                 if (!supervisor(dc))
3669                                     goto illegal_insn;
3670 #endif
3671                                 {
3672                                     TCGv_ptr r_tickptr;
3673
3674                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3675                                                    cpu_src2);
3676                                     r_tickptr = tcg_temp_new_ptr();
3677                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3678                                                    offsetof(CPUSPARCState, tick));
3679                                     gen_helper_tick_set_limit(r_tickptr,
3680                                                               cpu_tick_cmpr);
3681                                     tcg_temp_free_ptr(r_tickptr);
3682                                 }
3683                                 break;
3684                             case 0x18: /* System tick */
3685 #if !defined(CONFIG_USER_ONLY)
3686                                 if (!supervisor(dc))
3687                                     goto illegal_insn;
3688 #endif
3689                                 {
3690                                     TCGv_ptr r_tickptr;
3691
3692                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3693                                                    cpu_src2);
3694                                     r_tickptr = tcg_temp_new_ptr();
3695                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3696                                                    offsetof(CPUSPARCState, stick));
3697                                     gen_helper_tick_set_count(r_tickptr,
3698                                                               cpu_tmp0);
3699                                     tcg_temp_free_ptr(r_tickptr);
3700                                 }
3701                                 break;
3702                             case 0x19: /* System tick compare */
3703 #if !defined(CONFIG_USER_ONLY)
3704                                 if (!supervisor(dc))
3705                                     goto illegal_insn;
3706 #endif
3707                                 {
3708                                     TCGv_ptr r_tickptr;
3709
3710                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3711                                                    cpu_src2);
3712                                     r_tickptr = tcg_temp_new_ptr();
3713                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3714                                                    offsetof(CPUSPARCState, stick));
3715                                     gen_helper_tick_set_limit(r_tickptr,
3716                                                               cpu_stick_cmpr);
3717                                     tcg_temp_free_ptr(r_tickptr);
3718                                 }
3719                                 break;
3720
3721                             case 0x10: /* Performance Control */
3722                             case 0x11: /* Performance Instrumentation
3723                                           Counter */
3724                             case 0x12: /* Dispatch Control */
3725 #endif
3726                             default:
3727                                 goto illegal_insn;
3728                             }
3729                         }
3730                         break;
3731 #if !defined(CONFIG_USER_ONLY)
3732                     case 0x31: /* wrpsr, V9 saved, restored */
3733                         {
3734                             if (!supervisor(dc))
3735                                 goto priv_insn;
3736 #ifdef TARGET_SPARC64
3737                             switch (rd) {
3738                             case 0:
3739                                 gen_helper_saved(cpu_env);
3740                                 break;
3741                             case 1:
3742                                 gen_helper_restored(cpu_env);
3743                                 break;
3744                             case 2: /* UA2005 allclean */
3745                             case 3: /* UA2005 otherw */
3746                             case 4: /* UA2005 normalw */
3747                             case 5: /* UA2005 invalw */
3748                                 // XXX
3749                             default:
3750                                 goto illegal_insn;
3751                             }
3752 #else
3753                             cpu_tmp0 = get_temp_tl(dc);
3754                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3755                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
3756                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3757                             dc->cc_op = CC_OP_FLAGS;
3758                             save_state(dc);
3759                             gen_op_next_insn();
3760                             tcg_gen_exit_tb(0);
3761                             dc->is_br = 1;
3762 #endif
3763                         }
3764                         break;
3765                     case 0x32: /* wrwim, V9 wrpr */
3766                         {
3767                             if (!supervisor(dc))
3768                                 goto priv_insn;
3769                             cpu_tmp0 = get_temp_tl(dc);
3770                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3771 #ifdef TARGET_SPARC64
3772                             switch (rd) {
3773                             case 0: // tpc
3774                                 {
3775                                     TCGv_ptr r_tsptr;
3776
3777                                     r_tsptr = tcg_temp_new_ptr();
3778                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3779                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3780                                                   offsetof(trap_state, tpc));
3781                                     tcg_temp_free_ptr(r_tsptr);
3782                                 }
3783                                 break;
3784                             case 1: // tnpc
3785                                 {
3786                                     TCGv_ptr r_tsptr;
3787
3788                                     r_tsptr = tcg_temp_new_ptr();
3789                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3790                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3791                                                   offsetof(trap_state, tnpc));
3792                                     tcg_temp_free_ptr(r_tsptr);
3793                                 }
3794                                 break;
3795                             case 2: // tstate
3796                                 {
3797                                     TCGv_ptr r_tsptr;
3798
3799                                     r_tsptr = tcg_temp_new_ptr();
3800                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3801                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3802                                                   offsetof(trap_state,
3803                                                            tstate));
3804                                     tcg_temp_free_ptr(r_tsptr);
3805                                 }
3806                                 break;
3807                             case 3: // tt
3808                                 {
3809                                     TCGv_ptr r_tsptr;
3810
3811                                     r_tsptr = tcg_temp_new_ptr();
3812                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3813                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3814                                                     offsetof(trap_state, tt));
3815                                     tcg_temp_free_ptr(r_tsptr);
3816                                 }
3817                                 break;
3818                             case 4: // tick
3819                                 {
3820                                     TCGv_ptr r_tickptr;
3821
3822                                     r_tickptr = tcg_temp_new_ptr();
3823                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3824                                                    offsetof(CPUSPARCState, tick));
3825                                     gen_helper_tick_set_count(r_tickptr,
3826                                                               cpu_tmp0);
3827                                     tcg_temp_free_ptr(r_tickptr);
3828                                 }
3829                                 break;
3830                             case 5: // tba
3831                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3832                                 break;
3833                             case 6: // pstate
3834                                 save_state(dc);
3835                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3836                                 dc->npc = DYNAMIC_PC;
3837                                 break;
3838                             case 7: // tl
3839                                 save_state(dc);
3840                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3841                                                offsetof(CPUSPARCState, tl));
3842                                 dc->npc = DYNAMIC_PC;
3843                                 break;
3844                             case 8: // pil
3845                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
3846                                 break;
3847                             case 9: // cwp
3848                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3849                                 break;
3850                             case 10: // cansave
3851                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3852                                                 offsetof(CPUSPARCState,
3853                                                          cansave));
3854                                 break;
3855                             case 11: // canrestore
3856                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3857                                                 offsetof(CPUSPARCState,
3858                                                          canrestore));
3859                                 break;
3860                             case 12: // cleanwin
3861                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3862                                                 offsetof(CPUSPARCState,
3863                                                          cleanwin));
3864                                 break;
3865                             case 13: // otherwin
3866                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3867                                                 offsetof(CPUSPARCState,
3868                                                          otherwin));
3869                                 break;
3870                             case 14: // wstate
3871                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3872                                                 offsetof(CPUSPARCState,
3873                                                          wstate));
3874                                 break;
3875                             case 16: // UA2005 gl
3876                                 CHECK_IU_FEATURE(dc, GL);
3877                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3878                                                 offsetof(CPUSPARCState, gl));
3879                                 break;
3880                             case 26: // UA2005 strand status
3881                                 CHECK_IU_FEATURE(dc, HYPV);
3882                                 if (!hypervisor(dc))
3883                                     goto priv_insn;
3884                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3885                                 break;
3886                             default:
3887                                 goto illegal_insn;
3888                             }
3889 #else
3890                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3891                             if (dc->def->nwindows != 32) {
3892                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3893                                                 (1 << dc->def->nwindows) - 1);
3894                             }
3895 #endif
3896                         }
3897                         break;
3898                     case 0x33: /* wrtbr, UA2005 wrhpr */
3899                         {
3900 #ifndef TARGET_SPARC64
3901                             if (!supervisor(dc))
3902                                 goto priv_insn;
3903                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3904 #else
3905                             CHECK_IU_FEATURE(dc, HYPV);
3906                             if (!hypervisor(dc))
3907                                 goto priv_insn;
3908                             cpu_tmp0 = get_temp_tl(dc);
3909                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3910                             switch (rd) {
3911                             case 0: // hpstate
3912                                 // XXX gen_op_wrhpstate();
3913                                 save_state(dc);
3914                                 gen_op_next_insn();
3915                                 tcg_gen_exit_tb(0);
3916                                 dc->is_br = 1;
3917                                 break;
3918                             case 1: // htstate
3919                                 // XXX gen_op_wrhtstate();
3920                                 break;
3921                             case 3: // hintp
3922                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3923                                 break;
3924                             case 5: // htba
3925                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3926                                 break;
3927                             case 31: // hstick_cmpr
3928                                 {
3929                                     TCGv_ptr r_tickptr;
3930
3931                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3932                                     r_tickptr = tcg_temp_new_ptr();
3933                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3934                                                    offsetof(CPUSPARCState, hstick));
3935                                     gen_helper_tick_set_limit(r_tickptr,
3936                                                               cpu_hstick_cmpr);
3937                                     tcg_temp_free_ptr(r_tickptr);
3938                                 }
3939                                 break;
3940                             case 6: // hver readonly
3941                             default:
3942                                 goto illegal_insn;
3943                             }
3944 #endif
3945                         }
3946                         break;
3947 #endif
3948 #ifdef TARGET_SPARC64
3949                     case 0x2c: /* V9 movcc */
3950                         {
3951                             int cc = GET_FIELD_SP(insn, 11, 12);
3952                             int cond = GET_FIELD_SP(insn, 14, 17);
3953                             DisasCompare cmp;
3954                             TCGv dst;
3955
3956                             if (insn & (1 << 18)) {
3957                                 if (cc == 0) {
3958                                     gen_compare(&cmp, 0, cond, dc);
3959                                 } else if (cc == 2) {
3960                                     gen_compare(&cmp, 1, cond, dc);
3961                                 } else {
3962                                     goto illegal_insn;
3963                                 }
3964                             } else {
3965                                 gen_fcompare(&cmp, cc, cond);
3966                             }
3967
3968                             /* The get_src2 above loaded the normal 13-bit
3969                                immediate field, not the 11-bit field we have
3970                                in movcc.  But it did handle the reg case.  */
3971                             if (IS_IMM) {
3972                                 simm = GET_FIELD_SPs(insn, 0, 10);
3973                                 tcg_gen_movi_tl(cpu_src2, simm);
3974                             }
3975
3976                             dst = gen_load_gpr(dc, rd);
3977                             tcg_gen_movcond_tl(cmp.cond, dst,
3978                                                cmp.c1, cmp.c2,
3979                                                cpu_src2, dst);
3980                             free_compare(&cmp);
3981                             gen_store_gpr(dc, rd, dst);
3982                             break;
3983                         }
3984                     case 0x2d: /* V9 sdivx */
3985                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3986                         gen_store_gpr(dc, rd, cpu_dst);
3987                         break;
3988                     case 0x2e: /* V9 popc */
3989                         gen_helper_popc(cpu_dst, cpu_src2);
3990                         gen_store_gpr(dc, rd, cpu_dst);
3991                         break;
3992                     case 0x2f: /* V9 movr */
3993                         {
3994                             int cond = GET_FIELD_SP(insn, 10, 12);
3995                             DisasCompare cmp;
3996                             TCGv dst;
3997
3998                             gen_compare_reg(&cmp, cond, cpu_src1);
3999
4000                             /* The get_src2 above loaded the normal 13-bit
4001                                immediate field, not the 10-bit field we have
4002                                in movr.  But it did handle the reg case.  */
4003                             if (IS_IMM) {
4004                                 simm = GET_FIELD_SPs(insn, 0, 9);
4005                                 tcg_gen_movi_tl(cpu_src2, simm);
4006                             }
4007
4008                             dst = gen_load_gpr(dc, rd);
4009                             tcg_gen_movcond_tl(cmp.cond, dst,
4010                                                cmp.c1, cmp.c2,
4011                                                cpu_src2, dst);
4012                             free_compare(&cmp);
4013                             gen_store_gpr(dc, rd, dst);
4014                             break;
4015                         }
4016 #endif
4017                     default:
4018                         goto illegal_insn;
4019                     }
4020                 }
4021             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4022 #ifdef TARGET_SPARC64
4023                 int opf = GET_FIELD_SP(insn, 5, 13);
4024                 rs1 = GET_FIELD(insn, 13, 17);
4025                 rs2 = GET_FIELD(insn, 27, 31);
4026                 if (gen_trap_ifnofpu(dc)) {
4027                     goto jmp_insn;
4028                 }
4029
4030                 switch (opf) {
4031                 case 0x000: /* VIS I edge8cc */
4032                     CHECK_FPU_FEATURE(dc, VIS1);
4033                     cpu_src1 = gen_load_gpr(dc, rs1);
4034                     cpu_src2 = gen_load_gpr(dc, rs2);
4035                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4036                     gen_store_gpr(dc, rd, cpu_dst);
4037                     break;
4038                 case 0x001: /* VIS II edge8n */
4039                     CHECK_FPU_FEATURE(dc, VIS2);
4040                     cpu_src1 = gen_load_gpr(dc, rs1);
4041                     cpu_src2 = gen_load_gpr(dc, rs2);
4042                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4043                     gen_store_gpr(dc, rd, cpu_dst);
4044                     break;
4045                 case 0x002: /* VIS I edge8lcc */
4046                     CHECK_FPU_FEATURE(dc, VIS1);
4047                     cpu_src1 = gen_load_gpr(dc, rs1);
4048                     cpu_src2 = gen_load_gpr(dc, rs2);
4049                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4050                     gen_store_gpr(dc, rd, cpu_dst);
4051                     break;
4052                 case 0x003: /* VIS II edge8ln */
4053                     CHECK_FPU_FEATURE(dc, VIS2);
4054                     cpu_src1 = gen_load_gpr(dc, rs1);
4055                     cpu_src2 = gen_load_gpr(dc, rs2);
4056                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4057                     gen_store_gpr(dc, rd, cpu_dst);
4058                     break;
4059                 case 0x004: /* VIS I edge16cc */
4060                     CHECK_FPU_FEATURE(dc, VIS1);
4061                     cpu_src1 = gen_load_gpr(dc, rs1);
4062                     cpu_src2 = gen_load_gpr(dc, rs2);
4063                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4064                     gen_store_gpr(dc, rd, cpu_dst);
4065                     break;
4066                 case 0x005: /* VIS II edge16n */
4067                     CHECK_FPU_FEATURE(dc, VIS2);
4068                     cpu_src1 = gen_load_gpr(dc, rs1);
4069                     cpu_src2 = gen_load_gpr(dc, rs2);
4070                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4071                     gen_store_gpr(dc, rd, cpu_dst);
4072                     break;
4073                 case 0x006: /* VIS I edge16lcc */
4074                     CHECK_FPU_FEATURE(dc, VIS1);
4075                     cpu_src1 = gen_load_gpr(dc, rs1);
4076                     cpu_src2 = gen_load_gpr(dc, rs2);
4077                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4078                     gen_store_gpr(dc, rd, cpu_dst);
4079                     break;
4080                 case 0x007: /* VIS II edge16ln */
4081                     CHECK_FPU_FEATURE(dc, VIS2);
4082                     cpu_src1 = gen_load_gpr(dc, rs1);
4083                     cpu_src2 = gen_load_gpr(dc, rs2);
4084                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4085                     gen_store_gpr(dc, rd, cpu_dst);
4086                     break;
4087                 case 0x008: /* VIS I edge32cc */
4088                     CHECK_FPU_FEATURE(dc, VIS1);
4089                     cpu_src1 = gen_load_gpr(dc, rs1);
4090                     cpu_src2 = gen_load_gpr(dc, rs2);
4091                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4092                     gen_store_gpr(dc, rd, cpu_dst);
4093                     break;
4094                 case 0x009: /* VIS II edge32n */
4095                     CHECK_FPU_FEATURE(dc, VIS2);
4096                     cpu_src1 = gen_load_gpr(dc, rs1);
4097                     cpu_src2 = gen_load_gpr(dc, rs2);
4098                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4099                     gen_store_gpr(dc, rd, cpu_dst);
4100                     break;
4101                 case 0x00a: /* VIS I edge32lcc */
4102                     CHECK_FPU_FEATURE(dc, VIS1);
4103                     cpu_src1 = gen_load_gpr(dc, rs1);
4104                     cpu_src2 = gen_load_gpr(dc, rs2);
4105                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4106                     gen_store_gpr(dc, rd, cpu_dst);
4107                     break;
4108                 case 0x00b: /* VIS II edge32ln */
4109                     CHECK_FPU_FEATURE(dc, VIS2);
4110                     cpu_src1 = gen_load_gpr(dc, rs1);
4111                     cpu_src2 = gen_load_gpr(dc, rs2);
4112                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4113                     gen_store_gpr(dc, rd, cpu_dst);
4114                     break;
4115                 case 0x010: /* VIS I array8 */
4116                     CHECK_FPU_FEATURE(dc, VIS1);
4117                     cpu_src1 = gen_load_gpr(dc, rs1);
4118                     cpu_src2 = gen_load_gpr(dc, rs2);
4119                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4120                     gen_store_gpr(dc, rd, cpu_dst);
4121                     break;
4122                 case 0x012: /* VIS I array16 */
4123                     CHECK_FPU_FEATURE(dc, VIS1);
4124                     cpu_src1 = gen_load_gpr(dc, rs1);
4125                     cpu_src2 = gen_load_gpr(dc, rs2);
4126                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4127                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4128                     gen_store_gpr(dc, rd, cpu_dst);
4129                     break;
4130                 case 0x014: /* VIS I array32 */
4131                     CHECK_FPU_FEATURE(dc, VIS1);
4132                     cpu_src1 = gen_load_gpr(dc, rs1);
4133                     cpu_src2 = gen_load_gpr(dc, rs2);
4134                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4135                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4136                     gen_store_gpr(dc, rd, cpu_dst);
4137                     break;
4138                 case 0x018: /* VIS I alignaddr */
4139                     CHECK_FPU_FEATURE(dc, VIS1);
4140                     cpu_src1 = gen_load_gpr(dc, rs1);
4141                     cpu_src2 = gen_load_gpr(dc, rs2);
4142                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4143                     gen_store_gpr(dc, rd, cpu_dst);
4144                     break;
4145                 case 0x01a: /* VIS I alignaddrl */
4146                     CHECK_FPU_FEATURE(dc, VIS1);
4147                     cpu_src1 = gen_load_gpr(dc, rs1);
4148                     cpu_src2 = gen_load_gpr(dc, rs2);
4149                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4150                     gen_store_gpr(dc, rd, cpu_dst);
4151                     break;
4152                 case 0x019: /* VIS II bmask */
4153                     CHECK_FPU_FEATURE(dc, VIS2);
4154                     cpu_src1 = gen_load_gpr(dc, rs1);
4155                     cpu_src2 = gen_load_gpr(dc, rs2);
4156                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4157                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4158                     gen_store_gpr(dc, rd, cpu_dst);
4159                     break;
4160                 case 0x020: /* VIS I fcmple16 */
4161                     CHECK_FPU_FEATURE(dc, VIS1);
4162                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4163                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4164                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4165                     gen_store_gpr(dc, rd, cpu_dst);
4166                     break;
4167                 case 0x022: /* VIS I fcmpne16 */
4168                     CHECK_FPU_FEATURE(dc, VIS1);
4169                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4170                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4171                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4172                     gen_store_gpr(dc, rd, cpu_dst);
4173                     break;
4174                 case 0x024: /* VIS I fcmple32 */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4177                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4178                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4179                     gen_store_gpr(dc, rd, cpu_dst);
4180                     break;
4181                 case 0x026: /* VIS I fcmpne32 */
4182                     CHECK_FPU_FEATURE(dc, VIS1);
4183                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4184                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4185                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4186                     gen_store_gpr(dc, rd, cpu_dst);
4187                     break;
4188                 case 0x028: /* VIS I fcmpgt16 */
4189                     CHECK_FPU_FEATURE(dc, VIS1);
4190                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4191                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4192                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4193                     gen_store_gpr(dc, rd, cpu_dst);
4194                     break;
4195                 case 0x02a: /* VIS I fcmpeq16 */
4196                     CHECK_FPU_FEATURE(dc, VIS1);
4197                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4198                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4199                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4200                     gen_store_gpr(dc, rd, cpu_dst);
4201                     break;
4202                 case 0x02c: /* VIS I fcmpgt32 */
4203                     CHECK_FPU_FEATURE(dc, VIS1);
4204                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4205                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4206                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4207                     gen_store_gpr(dc, rd, cpu_dst);
4208                     break;
4209                 case 0x02e: /* VIS I fcmpeq32 */
4210                     CHECK_FPU_FEATURE(dc, VIS1);
4211                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4212                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4213                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4214                     gen_store_gpr(dc, rd, cpu_dst);
4215                     break;
4216                 case 0x031: /* VIS I fmul8x16 */
4217                     CHECK_FPU_FEATURE(dc, VIS1);
4218                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4219                     break;
4220                 case 0x033: /* VIS I fmul8x16au */
4221                     CHECK_FPU_FEATURE(dc, VIS1);
4222                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4223                     break;
4224                 case 0x035: /* VIS I fmul8x16al */
4225                     CHECK_FPU_FEATURE(dc, VIS1);
4226                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4227                     break;
4228                 case 0x036: /* VIS I fmul8sux16 */
4229                     CHECK_FPU_FEATURE(dc, VIS1);
4230                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4231                     break;
4232                 case 0x037: /* VIS I fmul8ulx16 */
4233                     CHECK_FPU_FEATURE(dc, VIS1);
4234                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4235                     break;
4236                 case 0x038: /* VIS I fmuld8sux16 */
4237                     CHECK_FPU_FEATURE(dc, VIS1);
4238                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4239                     break;
4240                 case 0x039: /* VIS I fmuld8ulx16 */
4241                     CHECK_FPU_FEATURE(dc, VIS1);
4242                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4243                     break;
4244                 case 0x03a: /* VIS I fpack32 */
4245                     CHECK_FPU_FEATURE(dc, VIS1);
4246                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4247                     break;
4248                 case 0x03b: /* VIS I fpack16 */
4249                     CHECK_FPU_FEATURE(dc, VIS1);
4250                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4251                     cpu_dst_32 = gen_dest_fpr_F(dc);
4252                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4253                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4254                     break;
4255                 case 0x03d: /* VIS I fpackfix */
4256                     CHECK_FPU_FEATURE(dc, VIS1);
4257                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4258                     cpu_dst_32 = gen_dest_fpr_F(dc);
4259                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4260                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4261                     break;
4262                 case 0x03e: /* VIS I pdist */
4263                     CHECK_FPU_FEATURE(dc, VIS1);
4264                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4265                     break;
4266                 case 0x048: /* VIS I faligndata */
4267                     CHECK_FPU_FEATURE(dc, VIS1);
4268                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4269                     break;
4270                 case 0x04b: /* VIS I fpmerge */
4271                     CHECK_FPU_FEATURE(dc, VIS1);
4272                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4273                     break;
4274                 case 0x04c: /* VIS II bshuffle */
4275                     CHECK_FPU_FEATURE(dc, VIS2);
4276                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4277                     break;
4278                 case 0x04d: /* VIS I fexpand */
4279                     CHECK_FPU_FEATURE(dc, VIS1);
4280                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4281                     break;
4282                 case 0x050: /* VIS I fpadd16 */
4283                     CHECK_FPU_FEATURE(dc, VIS1);
4284                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4285                     break;
4286                 case 0x051: /* VIS I fpadd16s */
4287                     CHECK_FPU_FEATURE(dc, VIS1);
4288                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4289                     break;
4290                 case 0x052: /* VIS I fpadd32 */
4291                     CHECK_FPU_FEATURE(dc, VIS1);
4292                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4293                     break;
4294                 case 0x053: /* VIS I fpadd32s */
4295                     CHECK_FPU_FEATURE(dc, VIS1);
4296                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4297                     break;
4298                 case 0x054: /* VIS I fpsub16 */
4299                     CHECK_FPU_FEATURE(dc, VIS1);
4300                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4301                     break;
4302                 case 0x055: /* VIS I fpsub16s */
4303                     CHECK_FPU_FEATURE(dc, VIS1);
4304                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4305                     break;
4306                 case 0x056: /* VIS I fpsub32 */
4307                     CHECK_FPU_FEATURE(dc, VIS1);
4308                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4309                     break;
4310                 case 0x057: /* VIS I fpsub32s */
4311                     CHECK_FPU_FEATURE(dc, VIS1);
4312                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4313                     break;
4314                 case 0x060: /* VIS I fzero */
4315                     CHECK_FPU_FEATURE(dc, VIS1);
4316                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4317                     tcg_gen_movi_i64(cpu_dst_64, 0);
4318                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4319                     break;
4320                 case 0x061: /* VIS I fzeros */
4321                     CHECK_FPU_FEATURE(dc, VIS1);
4322                     cpu_dst_32 = gen_dest_fpr_F(dc);
4323                     tcg_gen_movi_i32(cpu_dst_32, 0);
4324                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4325                     break;
4326                 case 0x062: /* VIS I fnor */
4327                     CHECK_FPU_FEATURE(dc, VIS1);
4328                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4329                     break;
4330                 case 0x063: /* VIS I fnors */
4331                     CHECK_FPU_FEATURE(dc, VIS1);
4332                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4333                     break;
4334                 case 0x064: /* VIS I fandnot2 */
4335                     CHECK_FPU_FEATURE(dc, VIS1);
4336                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4337                     break;
4338                 case 0x065: /* VIS I fandnot2s */
4339                     CHECK_FPU_FEATURE(dc, VIS1);
4340                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4341                     break;
4342                 case 0x066: /* VIS I fnot2 */
4343                     CHECK_FPU_FEATURE(dc, VIS1);
4344                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4345                     break;
4346                 case 0x067: /* VIS I fnot2s */
4347                     CHECK_FPU_FEATURE(dc, VIS1);
4348                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4349                     break;
4350                 case 0x068: /* VIS I fandnot1 */
4351                     CHECK_FPU_FEATURE(dc, VIS1);
4352                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4353                     break;
4354                 case 0x069: /* VIS I fandnot1s */
4355                     CHECK_FPU_FEATURE(dc, VIS1);
4356                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4357                     break;
4358                 case 0x06a: /* VIS I fnot1 */
4359                     CHECK_FPU_FEATURE(dc, VIS1);
4360                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4361                     break;
4362                 case 0x06b: /* VIS I fnot1s */
4363                     CHECK_FPU_FEATURE(dc, VIS1);
4364                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4365                     break;
4366                 case 0x06c: /* VIS I fxor */
4367                     CHECK_FPU_FEATURE(dc, VIS1);
4368                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4369                     break;
4370                 case 0x06d: /* VIS I fxors */
4371                     CHECK_FPU_FEATURE(dc, VIS1);
4372                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4373                     break;
4374                 case 0x06e: /* VIS I fnand */
4375                     CHECK_FPU_FEATURE(dc, VIS1);
4376                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4377                     break;
4378                 case 0x06f: /* VIS I fnands */
4379                     CHECK_FPU_FEATURE(dc, VIS1);
4380                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4381                     break;
4382                 case 0x070: /* VIS I fand */
4383                     CHECK_FPU_FEATURE(dc, VIS1);
4384                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4385                     break;
4386                 case 0x071: /* VIS I fands */
4387                     CHECK_FPU_FEATURE(dc, VIS1);
4388                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4389                     break;
4390                 case 0x072: /* VIS I fxnor */
4391                     CHECK_FPU_FEATURE(dc, VIS1);
4392                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4393                     break;
4394                 case 0x073: /* VIS I fxnors */
4395                     CHECK_FPU_FEATURE(dc, VIS1);
4396                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4397                     break;
4398                 case 0x074: /* VIS I fsrc1 */
4399                     CHECK_FPU_FEATURE(dc, VIS1);
4400                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4401                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4402                     break;
4403                 case 0x075: /* VIS I fsrc1s */
4404                     CHECK_FPU_FEATURE(dc, VIS1);
4405                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4406                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4407                     break;
4408                 case 0x076: /* VIS I fornot2 */
4409                     CHECK_FPU_FEATURE(dc, VIS1);
4410                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4411                     break;
4412                 case 0x077: /* VIS I fornot2s */
4413                     CHECK_FPU_FEATURE(dc, VIS1);
4414                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4415                     break;
4416                 case 0x078: /* VIS I fsrc2 */
4417                     CHECK_FPU_FEATURE(dc, VIS1);
4418                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4419                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4420                     break;
4421                 case 0x079: /* VIS I fsrc2s */
4422                     CHECK_FPU_FEATURE(dc, VIS1);
4423                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4424                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4425                     break;
4426                 case 0x07a: /* VIS I fornot1 */
4427                     CHECK_FPU_FEATURE(dc, VIS1);
4428                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4429                     break;
4430                 case 0x07b: /* VIS I fornot1s */
4431                     CHECK_FPU_FEATURE(dc, VIS1);
4432                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4433                     break;
4434                 case 0x07c: /* VIS I for */
4435                     CHECK_FPU_FEATURE(dc, VIS1);
4436                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4437                     break;
4438                 case 0x07d: /* VIS I fors */
4439                     CHECK_FPU_FEATURE(dc, VIS1);
4440                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4441                     break;
4442                 case 0x07e: /* VIS I fone */
4443                     CHECK_FPU_FEATURE(dc, VIS1);
4444                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4445                     tcg_gen_movi_i64(cpu_dst_64, -1);
4446                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4447                     break;
4448                 case 0x07f: /* VIS I fones */
4449                     CHECK_FPU_FEATURE(dc, VIS1);
4450                     cpu_dst_32 = gen_dest_fpr_F(dc);
4451                     tcg_gen_movi_i32(cpu_dst_32, -1);
4452                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4453                     break;
4454                 case 0x080: /* VIS I shutdown */
4455                 case 0x081: /* VIS II siam */
4456                     // XXX
4457                     goto illegal_insn;
4458                 default:
4459                     goto illegal_insn;
4460                 }
4461 #else
4462                 goto ncp_insn;
4463 #endif
4464             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4465 #ifdef TARGET_SPARC64
4466                 goto illegal_insn;
4467 #else
4468                 goto ncp_insn;
4469 #endif
4470 #ifdef TARGET_SPARC64
4471             } else if (xop == 0x39) { /* V9 return */
4472                 TCGv_i32 r_const;
4473
4474                 save_state(dc);
4475                 cpu_src1 = get_src1(dc, insn);
4476                 cpu_tmp0 = get_temp_tl(dc);
4477                 if (IS_IMM) {   /* immediate */
4478                     simm = GET_FIELDs(insn, 19, 31);
4479                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4480                 } else {                /* register */
4481                     rs2 = GET_FIELD(insn, 27, 31);
4482                     if (rs2) {
4483                         cpu_src2 = gen_load_gpr(dc, rs2);
4484                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4485                     } else {
4486                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4487                     }
4488                 }
4489                 gen_helper_restore(cpu_env);
4490                 gen_mov_pc_npc(dc);
4491                 r_const = tcg_const_i32(3);
4492                 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4493                 tcg_temp_free_i32(r_const);
4494                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4495                 dc->npc = DYNAMIC_PC;
4496                 goto jmp_insn;
4497 #endif
4498             } else {
4499                 cpu_src1 = get_src1(dc, insn);
4500                 cpu_tmp0 = get_temp_tl(dc);
4501                 if (IS_IMM) {   /* immediate */
4502                     simm = GET_FIELDs(insn, 19, 31);
4503                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4504                 } else {                /* register */
4505                     rs2 = GET_FIELD(insn, 27, 31);
4506                     if (rs2) {
4507                         cpu_src2 = gen_load_gpr(dc, rs2);
4508                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4509                     } else {
4510                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4511                     }
4512                 }
4513                 switch (xop) {
4514                 case 0x38:      /* jmpl */
4515                     {
4516                         TCGv t;
4517                         TCGv_i32 r_const;
4518
4519                         t = gen_dest_gpr(dc, rd);
4520                         tcg_gen_movi_tl(t, dc->pc);
4521                         gen_store_gpr(dc, rd, t);
4522                         gen_mov_pc_npc(dc);
4523                         r_const = tcg_const_i32(3);
4524                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4525                         tcg_temp_free_i32(r_const);
4526                         gen_address_mask(dc, cpu_tmp0);
4527                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4528                         dc->npc = DYNAMIC_PC;
4529                     }
4530                     goto jmp_insn;
4531 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4532                 case 0x39:      /* rett, V9 return */
4533                     {
4534                         TCGv_i32 r_const;
4535
4536                         if (!supervisor(dc))
4537                             goto priv_insn;
4538                         gen_mov_pc_npc(dc);
4539                         r_const = tcg_const_i32(3);
4540                         gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4541                         tcg_temp_free_i32(r_const);
4542                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4543                         dc->npc = DYNAMIC_PC;
4544                         gen_helper_rett(cpu_env);
4545                     }
4546                     goto jmp_insn;
4547 #endif
4548                 case 0x3b: /* flush */
4549                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4550                         goto unimp_flush;
4551                     /* nop */
4552                     break;
4553                 case 0x3c:      /* save */
4554                     save_state(dc);
4555                     gen_helper_save(cpu_env);
4556                     gen_store_gpr(dc, rd, cpu_tmp0);
4557                     break;
4558                 case 0x3d:      /* restore */
4559                     save_state(dc);
4560                     gen_helper_restore(cpu_env);
4561                     gen_store_gpr(dc, rd, cpu_tmp0);
4562                     break;
4563 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4564                 case 0x3e:      /* V9 done/retry */
4565                     {
4566                         switch (rd) {
4567                         case 0:
4568                             if (!supervisor(dc))
4569                                 goto priv_insn;
4570                             dc->npc = DYNAMIC_PC;
4571                             dc->pc = DYNAMIC_PC;
4572                             gen_helper_done(cpu_env);
4573                             goto jmp_insn;
4574                         case 1:
4575                             if (!supervisor(dc))
4576                                 goto priv_insn;
4577                             dc->npc = DYNAMIC_PC;
4578                             dc->pc = DYNAMIC_PC;
4579                             gen_helper_retry(cpu_env);
4580                             goto jmp_insn;
4581                         default:
4582                             goto illegal_insn;
4583                         }
4584                     }
4585                     break;
4586 #endif
4587                 default:
4588                     goto illegal_insn;
4589                 }
4590             }
4591             break;
4592         }
4593         break;
4594     case 3:                     /* load/store instructions */
4595         {
4596             unsigned int xop = GET_FIELD(insn, 7, 12);
4597             /* ??? gen_address_mask prevents us from using a source
4598                register directly.  Always generate a temporary.  */
4599             TCGv cpu_addr = get_temp_tl(dc);
4600
4601             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4602             if (xop == 0x3c || xop == 0x3e) {
4603                 /* V9 casa/casxa : no offset */
4604             } else if (IS_IMM) {     /* immediate */
4605                 simm = GET_FIELDs(insn, 19, 31);
4606                 if (simm != 0) {
4607                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4608                 }
4609             } else {            /* register */
4610                 rs2 = GET_FIELD(insn, 27, 31);
4611                 if (rs2 != 0) {
4612                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4613                 }
4614             }
4615             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4616                 (xop > 0x17 && xop <= 0x1d ) ||
4617                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4618                 TCGv cpu_val = gen_dest_gpr(dc, rd);
4619
4620                 switch (xop) {
4621                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4622                     gen_address_mask(dc, cpu_addr);
4623                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4624                     break;
4625                 case 0x1:       /* ldub, load unsigned byte */
4626                     gen_address_mask(dc, cpu_addr);
4627                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4628                     break;
4629                 case 0x2:       /* lduh, load unsigned halfword */
4630                     gen_address_mask(dc, cpu_addr);
4631                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4632                     break;
4633                 case 0x3:       /* ldd, load double word */
4634                     if (rd & 1)
4635                         goto illegal_insn;
4636                     else {
4637                         TCGv_i32 r_const;
4638                         TCGv_i64 t64;
4639
4640                         save_state(dc);
4641                         r_const = tcg_const_i32(7);
4642                         /* XXX remove alignment check */
4643                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4644                         tcg_temp_free_i32(r_const);
4645                         gen_address_mask(dc, cpu_addr);
4646                         t64 = tcg_temp_new_i64();
4647                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4648                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4649                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4650                         gen_store_gpr(dc, rd + 1, cpu_val);
4651                         tcg_gen_shri_i64(t64, t64, 32);
4652                         tcg_gen_trunc_i64_tl(cpu_val, t64);
4653                         tcg_temp_free_i64(t64);
4654                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
4655                     }
4656                     break;
4657                 case 0x9:       /* ldsb, load signed byte */
4658                     gen_address_mask(dc, cpu_addr);
4659                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4660                     break;
4661                 case 0xa:       /* ldsh, load signed halfword */
4662                     gen_address_mask(dc, cpu_addr);
4663                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4664                     break;
4665                 case 0xd:       /* ldstub -- XXX: should be atomically */
4666                     {
4667                         TCGv r_const;
4668
4669                         gen_address_mask(dc, cpu_addr);
4670                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4671                         r_const = tcg_const_tl(0xff);
4672                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4673                         tcg_temp_free(r_const);
4674                     }
4675                     break;
4676                 case 0x0f:
4677                     /* swap, swap register with memory. Also atomically */
4678                     {
4679                         TCGv t0 = get_temp_tl(dc);
4680                         CHECK_IU_FEATURE(dc, SWAP);
4681                         cpu_src1 = gen_load_gpr(dc, rd);
4682                         gen_address_mask(dc, cpu_addr);
4683                         tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4684                         tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4685                         tcg_gen_mov_tl(cpu_val, t0);
4686                     }
4687                     break;
4688 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4689                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4690 #ifndef TARGET_SPARC64
4691                     if (IS_IMM)
4692                         goto illegal_insn;
4693                     if (!supervisor(dc))
4694                         goto priv_insn;
4695 #endif
4696                     save_state(dc);
4697                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4698                     break;
4699                 case 0x11:      /* lduba, load unsigned byte alternate */
4700 #ifndef TARGET_SPARC64
4701                     if (IS_IMM)
4702                         goto illegal_insn;
4703                     if (!supervisor(dc))
4704                         goto priv_insn;
4705 #endif
4706                     save_state(dc);
4707                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4708                     break;
4709                 case 0x12:      /* lduha, load unsigned halfword alternate */
4710 #ifndef TARGET_SPARC64
4711                     if (IS_IMM)
4712                         goto illegal_insn;
4713                     if (!supervisor(dc))
4714                         goto priv_insn;
4715 #endif
4716                     save_state(dc);
4717                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4718                     break;
4719                 case 0x13:      /* ldda, load double word alternate */
4720 #ifndef TARGET_SPARC64
4721                     if (IS_IMM)
4722                         goto illegal_insn;
4723                     if (!supervisor(dc))
4724                         goto priv_insn;
4725 #endif
4726                     if (rd & 1)
4727                         goto illegal_insn;
4728                     save_state(dc);
4729                     gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4730                     goto skip_move;
4731                 case 0x19:      /* ldsba, load signed byte alternate */
4732 #ifndef TARGET_SPARC64
4733                     if (IS_IMM)
4734                         goto illegal_insn;
4735                     if (!supervisor(dc))
4736                         goto priv_insn;
4737 #endif
4738                     save_state(dc);
4739                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4740                     break;
4741                 case 0x1a:      /* ldsha, load signed halfword alternate */
4742 #ifndef TARGET_SPARC64
4743                     if (IS_IMM)
4744                         goto illegal_insn;
4745                     if (!supervisor(dc))
4746                         goto priv_insn;
4747 #endif
4748                     save_state(dc);
4749                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4750                     break;
4751                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4752 #ifndef TARGET_SPARC64
4753                     if (IS_IMM)
4754                         goto illegal_insn;
4755                     if (!supervisor(dc))
4756                         goto priv_insn;
4757 #endif
4758                     save_state(dc);
4759                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4760                     break;
4761                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4762                                    atomically */
4763                     CHECK_IU_FEATURE(dc, SWAP);
4764 #ifndef TARGET_SPARC64
4765                     if (IS_IMM)
4766                         goto illegal_insn;
4767                     if (!supervisor(dc))
4768                         goto priv_insn;
4769 #endif
4770                     save_state(dc);
4771                     cpu_src1 = gen_load_gpr(dc, rd);
4772                     gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4773                     break;
4774
4775 #ifndef TARGET_SPARC64
4776                 case 0x30: /* ldc */
4777                 case 0x31: /* ldcsr */
4778                 case 0x33: /* lddc */
4779                     goto ncp_insn;
4780 #endif
4781 #endif
4782 #ifdef TARGET_SPARC64
4783                 case 0x08: /* V9 ldsw */
4784                     gen_address_mask(dc, cpu_addr);
4785                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4786                     break;
4787                 case 0x0b: /* V9 ldx */
4788                     gen_address_mask(dc, cpu_addr);
4789                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4790                     break;
4791                 case 0x18: /* V9 ldswa */
4792                     save_state(dc);
4793                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4794                     break;
4795                 case 0x1b: /* V9 ldxa */
4796                     save_state(dc);
4797                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4798                     break;
4799                 case 0x2d: /* V9 prefetch, no effect */
4800                     goto skip_move;
4801                 case 0x30: /* V9 ldfa */
4802                     if (gen_trap_ifnofpu(dc)) {
4803                         goto jmp_insn;
4804                     }
4805                     save_state(dc);
4806                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4807                     gen_update_fprs_dirty(rd);
4808                     goto skip_move;
4809                 case 0x33: /* V9 lddfa */
4810                     if (gen_trap_ifnofpu(dc)) {
4811                         goto jmp_insn;
4812                     }
4813                     save_state(dc);
4814                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4815                     gen_update_fprs_dirty(DFPREG(rd));
4816                     goto skip_move;
4817                 case 0x3d: /* V9 prefetcha, no effect */
4818                     goto skip_move;
4819                 case 0x32: /* V9 ldqfa */
4820                     CHECK_FPU_FEATURE(dc, FLOAT128);
4821                     if (gen_trap_ifnofpu(dc)) {
4822                         goto jmp_insn;
4823                     }
4824                     save_state(dc);
4825                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4826                     gen_update_fprs_dirty(QFPREG(rd));
4827                     goto skip_move;
4828 #endif
4829                 default:
4830                     goto illegal_insn;
4831                 }
4832                 gen_store_gpr(dc, rd, cpu_val);
4833 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4834             skip_move: ;
4835 #endif
4836             } else if (xop >= 0x20 && xop < 0x24) {
4837                 TCGv t0;
4838
4839                 if (gen_trap_ifnofpu(dc)) {
4840                     goto jmp_insn;
4841                 }
4842                 save_state(dc);
4843                 switch (xop) {
4844                 case 0x20:      /* ldf, load fpreg */
4845                     gen_address_mask(dc, cpu_addr);
4846                     t0 = get_temp_tl(dc);
4847                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4848                     cpu_dst_32 = gen_dest_fpr_F(dc);
4849                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4850                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4851                     break;
4852                 case 0x21:      /* ldfsr, V9 ldxfsr */
4853 #ifdef TARGET_SPARC64
4854                     gen_address_mask(dc, cpu_addr);
4855                     if (rd == 1) {
4856                         TCGv_i64 t64 = tcg_temp_new_i64();
4857                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4858                         gen_helper_ldxfsr(cpu_env, t64);
4859                         tcg_temp_free_i64(t64);
4860                         break;
4861                     }
4862 #endif
4863                     cpu_dst_32 = get_temp_i32(dc);
4864                     t0 = get_temp_tl(dc);
4865                     tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4866                     tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4867                     gen_helper_ldfsr(cpu_env, cpu_dst_32);
4868                     break;
4869                 case 0x22:      /* ldqf, load quad fpreg */
4870                     {
4871                         TCGv_i32 r_const;
4872
4873                         CHECK_FPU_FEATURE(dc, FLOAT128);
4874                         r_const = tcg_const_i32(dc->mem_idx);
4875                         gen_address_mask(dc, cpu_addr);
4876                         gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4877                         tcg_temp_free_i32(r_const);
4878                         gen_op_store_QT0_fpr(QFPREG(rd));
4879                         gen_update_fprs_dirty(QFPREG(rd));
4880                     }
4881                     break;
4882                 case 0x23:      /* lddf, load double fpreg */
4883                     gen_address_mask(dc, cpu_addr);
4884                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4885                     tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4886                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4887                     break;
4888                 default:
4889                     goto illegal_insn;
4890                 }
4891             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4892                        xop == 0xe || xop == 0x1e) {
4893                 TCGv cpu_val = gen_load_gpr(dc, rd);
4894
4895                 switch (xop) {
4896                 case 0x4: /* st, store word */
4897                     gen_address_mask(dc, cpu_addr);
4898                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4899                     break;
4900                 case 0x5: /* stb, store byte */
4901                     gen_address_mask(dc, cpu_addr);
4902                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4903                     break;
4904                 case 0x6: /* sth, store halfword */
4905                     gen_address_mask(dc, cpu_addr);
4906                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4907                     break;
4908                 case 0x7: /* std, store double word */
4909                     if (rd & 1)
4910                         goto illegal_insn;
4911                     else {
4912                         TCGv_i32 r_const;
4913                         TCGv_i64 t64;
4914                         TCGv lo;
4915
4916                         save_state(dc);
4917                         gen_address_mask(dc, cpu_addr);
4918                         r_const = tcg_const_i32(7);
4919                         /* XXX remove alignment check */
4920                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
4921                         tcg_temp_free_i32(r_const);
4922                         lo = gen_load_gpr(dc, rd + 1);
4923
4924                         t64 = tcg_temp_new_i64();
4925                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4926                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4927                         tcg_temp_free_i64(t64);
4928                     }
4929                     break;
4930 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4931                 case 0x14: /* sta, V9 stwa, store word alternate */
4932 #ifndef TARGET_SPARC64
4933                     if (IS_IMM)
4934                         goto illegal_insn;
4935                     if (!supervisor(dc))
4936                         goto priv_insn;
4937 #endif
4938                     save_state(dc);
4939                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4940                     dc->npc = DYNAMIC_PC;
4941                     break;
4942                 case 0x15: /* stba, store byte alternate */
4943 #ifndef TARGET_SPARC64
4944                     if (IS_IMM)
4945                         goto illegal_insn;
4946                     if (!supervisor(dc))
4947                         goto priv_insn;
4948 #endif
4949                     save_state(dc);
4950                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4951                     dc->npc = DYNAMIC_PC;
4952                     break;
4953                 case 0x16: /* stha, store halfword alternate */
4954 #ifndef TARGET_SPARC64
4955                     if (IS_IMM)
4956                         goto illegal_insn;
4957                     if (!supervisor(dc))
4958                         goto priv_insn;
4959 #endif
4960                     save_state(dc);
4961                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4962                     dc->npc = DYNAMIC_PC;
4963                     break;
4964                 case 0x17: /* stda, store double word alternate */
4965 #ifndef TARGET_SPARC64
4966                     if (IS_IMM)
4967                         goto illegal_insn;
4968                     if (!supervisor(dc))
4969                         goto priv_insn;
4970 #endif
4971                     if (rd & 1)
4972                         goto illegal_insn;
4973                     else {
4974                         save_state(dc);
4975                         gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4976                     }
4977                     break;
4978 #endif
4979 #ifdef TARGET_SPARC64
4980                 case 0x0e: /* V9 stx */
4981                     gen_address_mask(dc, cpu_addr);
4982                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4983                     break;
4984                 case 0x1e: /* V9 stxa */
4985                     save_state(dc);
4986                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4987                     dc->npc = DYNAMIC_PC;
4988                     break;
4989 #endif
4990                 default:
4991                     goto illegal_insn;
4992                 }
4993             } else if (xop > 0x23 && xop < 0x28) {
4994                 if (gen_trap_ifnofpu(dc)) {
4995                     goto jmp_insn;
4996                 }
4997                 save_state(dc);
4998                 switch (xop) {
4999                 case 0x24: /* stf, store fpreg */
5000                     {
5001                         TCGv t = get_temp_tl(dc);
5002                         gen_address_mask(dc, cpu_addr);
5003                         cpu_src1_32 = gen_load_fpr_F(dc, rd);
5004                         tcg_gen_ext_i32_tl(t, cpu_src1_32);
5005                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5006                     }
5007                     break;
5008                 case 0x25: /* stfsr, V9 stxfsr */
5009                     {
5010                         TCGv t = get_temp_tl(dc);
5011
5012                         tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5013 #ifdef TARGET_SPARC64
5014                         gen_address_mask(dc, cpu_addr);
5015                         if (rd == 1) {
5016                             tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5017                             break;
5018                         }
5019 #endif
5020                         tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5021                     }
5022                     break;
5023                 case 0x26:
5024 #ifdef TARGET_SPARC64
5025                     /* V9 stqf, store quad fpreg */
5026                     {
5027                         TCGv_i32 r_const;
5028
5029                         CHECK_FPU_FEATURE(dc, FLOAT128);
5030                         gen_op_load_fpr_QT0(QFPREG(rd));
5031                         r_const = tcg_const_i32(dc->mem_idx);
5032                         gen_address_mask(dc, cpu_addr);
5033                         gen_helper_stqf(cpu_env, cpu_addr, r_const);
5034                         tcg_temp_free_i32(r_const);
5035                     }
5036                     break;
5037 #else /* !TARGET_SPARC64 */
5038                     /* stdfq, store floating point queue */
5039 #if defined(CONFIG_USER_ONLY)
5040                     goto illegal_insn;
5041 #else
5042                     if (!supervisor(dc))
5043                         goto priv_insn;
5044                     if (gen_trap_ifnofpu(dc)) {
5045                         goto jmp_insn;
5046                     }
5047                     goto nfq_insn;
5048 #endif
5049 #endif
5050                 case 0x27: /* stdf, store double fpreg */
5051                     gen_address_mask(dc, cpu_addr);
5052                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5053                     tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5054                     break;
5055                 default:
5056                     goto illegal_insn;
5057                 }
5058             } else if (xop > 0x33 && xop < 0x3f) {
5059                 save_state(dc);
5060                 switch (xop) {
5061 #ifdef TARGET_SPARC64
5062                 case 0x34: /* V9 stfa */
5063                     if (gen_trap_ifnofpu(dc)) {
5064                         goto jmp_insn;
5065                     }
5066                     gen_stf_asi(cpu_addr, insn, 4, rd);
5067                     break;
5068                 case 0x36: /* V9 stqfa */
5069                     {
5070                         TCGv_i32 r_const;
5071
5072                         CHECK_FPU_FEATURE(dc, FLOAT128);
5073                         if (gen_trap_ifnofpu(dc)) {
5074                             goto jmp_insn;
5075                         }
5076                         r_const = tcg_const_i32(7);
5077                         gen_helper_check_align(cpu_env, cpu_addr, r_const);
5078                         tcg_temp_free_i32(r_const);
5079                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5080                     }
5081                     break;
5082                 case 0x37: /* V9 stdfa */
5083                     if (gen_trap_ifnofpu(dc)) {
5084                         goto jmp_insn;
5085                     }
5086                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5087                     break;
5088                 case 0x3e: /* V9 casxa */
5089                     rs2 = GET_FIELD(insn, 27, 31);
5090                     cpu_src2 = gen_load_gpr(dc, rs2);
5091                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5092                     break;
5093 #else
5094                 case 0x34: /* stc */
5095                 case 0x35: /* stcsr */
5096                 case 0x36: /* stdcq */
5097                 case 0x37: /* stdc */
5098                     goto ncp_insn;
5099 #endif
5100 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5101                 case 0x3c: /* V9 or LEON3 casa */
5102 #ifndef TARGET_SPARC64
5103                     CHECK_IU_FEATURE(dc, CASA);
5104                     if (IS_IMM) {
5105                         goto illegal_insn;
5106                     }
5107                     if (!supervisor(dc)) {
5108                         goto priv_insn;
5109                     }
5110 #endif
5111                     rs2 = GET_FIELD(insn, 27, 31);
5112                     cpu_src2 = gen_load_gpr(dc, rs2);
5113                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5114                     break;
5115 #endif
5116                 default:
5117                     goto illegal_insn;
5118                 }
5119             } else {
5120                 goto illegal_insn;
5121             }
5122         }
5123         break;
5124     }
5125     /* default case for non jump instructions */
5126     if (dc->npc == DYNAMIC_PC) {
5127         dc->pc = DYNAMIC_PC;
5128         gen_op_next_insn();
5129     } else if (dc->npc == JUMP_PC) {
5130         /* we can do a static jump */
5131         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5132         dc->is_br = 1;
5133     } else {
5134         dc->pc = dc->npc;
5135         dc->npc = dc->npc + 4;
5136     }
5137  jmp_insn:
5138     goto egress;
5139  illegal_insn:
5140     {
5141         TCGv_i32 r_const;
5142
5143         save_state(dc);
5144         r_const = tcg_const_i32(TT_ILL_INSN);
5145         gen_helper_raise_exception(cpu_env, r_const);
5146         tcg_temp_free_i32(r_const);
5147         dc->is_br = 1;
5148     }
5149     goto egress;
5150  unimp_flush:
5151     {
5152         TCGv_i32 r_const;
5153
5154         save_state(dc);
5155         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5156         gen_helper_raise_exception(cpu_env, r_const);
5157         tcg_temp_free_i32(r_const);
5158         dc->is_br = 1;
5159     }
5160     goto egress;
5161 #if !defined(CONFIG_USER_ONLY)
5162  priv_insn:
5163     {
5164         TCGv_i32 r_const;
5165
5166         save_state(dc);
5167         r_const = tcg_const_i32(TT_PRIV_INSN);
5168         gen_helper_raise_exception(cpu_env, r_const);
5169         tcg_temp_free_i32(r_const);
5170         dc->is_br = 1;
5171     }
5172     goto egress;
5173 #endif
5174  nfpu_insn:
5175     save_state(dc);
5176     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5177     dc->is_br = 1;
5178     goto egress;
5179 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5180  nfq_insn:
5181     save_state(dc);
5182     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5183     dc->is_br = 1;
5184     goto egress;
5185 #endif
5186 #ifndef TARGET_SPARC64
5187  ncp_insn:
5188     {
5189         TCGv r_const;
5190
5191         save_state(dc);
5192         r_const = tcg_const_i32(TT_NCP_INSN);
5193         gen_helper_raise_exception(cpu_env, r_const);
5194         tcg_temp_free(r_const);
5195         dc->is_br = 1;
5196     }
5197     goto egress;
5198 #endif
5199  egress:
5200     if (dc->n_t32 != 0) {
5201         int i;
5202         for (i = dc->n_t32 - 1; i >= 0; --i) {
5203             tcg_temp_free_i32(dc->t32[i]);
5204         }
5205         dc->n_t32 = 0;
5206     }
5207     if (dc->n_ttl != 0) {
5208         int i;
5209         for (i = dc->n_ttl - 1; i >= 0; --i) {
5210             tcg_temp_free(dc->ttl[i]);
5211         }
5212         dc->n_ttl = 0;
5213     }
5214 }
5215
5216 static inline void gen_intermediate_code_internal(SPARCCPU *cpu,
5217                                                   TranslationBlock *tb,
5218                                                   bool spc)
5219 {
5220     CPUState *cs = CPU(cpu);
5221     CPUSPARCState *env = &cpu->env;
5222     target_ulong pc_start, last_pc;
5223     DisasContext dc1, *dc = &dc1;
5224     CPUBreakpoint *bp;
5225     int j, lj = -1;
5226     int num_insns;
5227     int max_insns;
5228     unsigned int insn;
5229
5230     memset(dc, 0, sizeof(DisasContext));
5231     dc->tb = tb;
5232     pc_start = tb->pc;
5233     dc->pc = pc_start;
5234     last_pc = dc->pc;
5235     dc->npc = (target_ulong) tb->cs_base;
5236     dc->cc_op = CC_OP_DYNAMIC;
5237     dc->mem_idx = cpu_mmu_index(env);
5238     dc->def = env->def;
5239     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5240     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5241     dc->singlestep = (cs->singlestep_enabled || singlestep);
5242
5243     num_insns = 0;
5244     max_insns = tb->cflags & CF_COUNT_MASK;
5245     if (max_insns == 0)
5246         max_insns = CF_COUNT_MASK;
5247     gen_tb_start(tb);
5248     do {
5249         if (unlikely(!QTAILQ_EMPTY(&cs->breakpoints))) {
5250             QTAILQ_FOREACH(bp, &cs->breakpoints, entry) {
5251                 if (bp->pc == dc->pc) {
5252                     if (dc->pc != pc_start)
5253                         save_state(dc);
5254                     gen_helper_debug(cpu_env);
5255                     tcg_gen_exit_tb(0);
5256                     dc->is_br = 1;
5257                     goto exit_gen_loop;
5258                 }
5259             }
5260         }
5261         if (spc) {
5262             qemu_log("Search PC...\n");
5263             j = tcg_op_buf_count();
5264             if (lj < j) {
5265                 lj++;
5266                 while (lj < j)
5267                     tcg_ctx.gen_opc_instr_start[lj++] = 0;
5268                 tcg_ctx.gen_opc_pc[lj] = dc->pc;
5269                 gen_opc_npc[lj] = dc->npc;
5270                 tcg_ctx.gen_opc_instr_start[lj] = 1;
5271                 tcg_ctx.gen_opc_icount[lj] = num_insns;
5272             }
5273         }
5274         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5275             gen_io_start();
5276         last_pc = dc->pc;
5277         insn = cpu_ldl_code(env, dc->pc);
5278
5279         disas_sparc_insn(dc, insn);
5280         num_insns++;
5281
5282         if (dc->is_br)
5283             break;
5284         /* if the next PC is different, we abort now */
5285         if (dc->pc != (last_pc + 4))
5286             break;
5287         /* if we reach a page boundary, we stop generation so that the
5288            PC of a TT_TFAULT exception is always in the right page */
5289         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5290             break;
5291         /* if single step mode, we generate only one instruction and
5292            generate an exception */
5293         if (dc->singlestep) {
5294             break;
5295         }
5296     } while (!tcg_op_buf_full() &&
5297              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5298              num_insns < max_insns);
5299
5300  exit_gen_loop:
5301     if (tb->cflags & CF_LAST_IO) {
5302         gen_io_end();
5303     }
5304     if (!dc->is_br) {
5305         if (dc->pc != DYNAMIC_PC &&
5306             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5307             /* static PC and NPC: we can use direct chaining */
5308             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5309         } else {
5310             if (dc->pc != DYNAMIC_PC) {
5311                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5312             }
5313             save_npc(dc);
5314             tcg_gen_exit_tb(0);
5315         }
5316     }
5317     gen_tb_end(tb, num_insns);
5318
5319     if (spc) {
5320         j = tcg_op_buf_count();
5321         lj++;
5322         while (lj <= j)
5323             tcg_ctx.gen_opc_instr_start[lj++] = 0;
5324 #if 0
5325         log_page_dump();
5326 #endif
5327         gen_opc_jump_pc[0] = dc->jump_pc[0];
5328         gen_opc_jump_pc[1] = dc->jump_pc[1];
5329     } else {
5330         tb->size = last_pc + 4 - pc_start;
5331         tb->icount = num_insns;
5332     }
5333 #ifdef DEBUG_DISAS
5334     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5335         qemu_log("--------------\n");
5336         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5337         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5338         qemu_log("\n");
5339     }
5340 #endif
5341 }
5342
5343 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5344 {
5345     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, false);
5346 }
5347
5348 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5349 {
5350     gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, true);
5351 }
5352
5353 void gen_intermediate_code_init(CPUSPARCState *env)
5354 {
5355     unsigned int i;
5356     static int inited;
5357     static const char * const gregnames[8] = {
5358         NULL, // g0 not used
5359         "g1",
5360         "g2",
5361         "g3",
5362         "g4",
5363         "g5",
5364         "g6",
5365         "g7",
5366     };
5367     static const char * const fregnames[32] = {
5368         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5369         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5370         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5371         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5372     };
5373
5374     /* init various static tables */
5375     if (!inited) {
5376         inited = 1;
5377
5378         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5379         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5380                                              offsetof(CPUSPARCState, regwptr),
5381                                              "regwptr");
5382 #ifdef TARGET_SPARC64
5383         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5384                                          "xcc");
5385         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5386                                          "asi");
5387         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5388                                           "fprs");
5389         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5390                                      "gsr");
5391         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5392                                            offsetof(CPUSPARCState, tick_cmpr),
5393                                            "tick_cmpr");
5394         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5395                                             offsetof(CPUSPARCState, stick_cmpr),
5396                                             "stick_cmpr");
5397         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5398                                              offsetof(CPUSPARCState, hstick_cmpr),
5399                                              "hstick_cmpr");
5400         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5401                                        "hintp");
5402         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5403                                       "htba");
5404         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5405                                       "hver");
5406         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5407                                      offsetof(CPUSPARCState, ssr), "ssr");
5408         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5409                                      offsetof(CPUSPARCState, version), "ver");
5410         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5411                                              offsetof(CPUSPARCState, softint),
5412                                              "softint");
5413 #else
5414         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5415                                      "wim");
5416 #endif
5417         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5418                                       "cond");
5419         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5420                                         "cc_src");
5421         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5422                                          offsetof(CPUSPARCState, cc_src2),
5423                                          "cc_src2");
5424         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5425                                         "cc_dst");
5426         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5427                                            "cc_op");
5428         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5429                                          "psr");
5430         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5431                                      "fsr");
5432         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5433                                     "pc");
5434         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5435                                      "npc");
5436         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5437 #ifndef CONFIG_USER_ONLY
5438         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5439                                      "tbr");
5440 #endif
5441         for (i = 1; i < 8; i++) {
5442             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5443                                               offsetof(CPUSPARCState, gregs[i]),
5444                                               gregnames[i]);
5445         }
5446         for (i = 0; i < TARGET_DPREGS; i++) {
5447             cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5448                                                 offsetof(CPUSPARCState, fpr[i]),
5449                                                 fregnames[i]);
5450         }
5451     }
5452 }
5453
5454 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5455 {
5456     target_ulong npc;
5457     env->pc = tcg_ctx.gen_opc_pc[pc_pos];
5458     npc = gen_opc_npc[pc_pos];
5459     if (npc == 1) {
5460         /* dynamic NPC: already stored */
5461     } else if (npc == 2) {
5462         /* jump PC: use 'cond' and the jump targets of the translation */
5463         if (env->cond) {
5464             env->npc = gen_opc_jump_pc[0];
5465         } else {
5466             env->npc = gen_opc_jump_pc[1];
5467         }
5468     } else {
5469         env->npc = npc;
5470     }
5471 }