Add qemu 2.4.0
[kvmfornfv.git] / qemu / tcg / tcg-op.c
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24
25 #include "tcg.h"
26 #include "tcg-op.h"
27
28 /* Reduce the number of ifdefs below.  This assumes that all uses of
29    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
30    the compiler can eliminate.  */
31 #if TCG_TARGET_REG_BITS == 64
32 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
33 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
34 #define TCGV_LOW  TCGV_LOW_link_error
35 #define TCGV_HIGH TCGV_HIGH_link_error
36 #endif
37
38 /* Note that this is optimized for sequential allocation during translate.
39    Up to and including filling in the forward link immediately.  We'll do
40    proper termination of the end of the list after we finish translation.  */
41
42 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
43 {
44     int oi = ctx->gen_next_op_idx;
45     int ni = oi + 1;
46     int pi = oi - 1;
47
48     tcg_debug_assert(oi < OPC_BUF_SIZE);
49     ctx->gen_last_op_idx = oi;
50     ctx->gen_next_op_idx = ni;
51
52     ctx->gen_op_buf[oi] = (TCGOp){
53         .opc = opc,
54         .args = args,
55         .prev = pi,
56         .next = ni
57     };
58 }
59
60 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
61 {
62     int pi = ctx->gen_next_parm_idx;
63
64     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
65     ctx->gen_next_parm_idx = pi + 1;
66     ctx->gen_opparam_buf[pi] = a1;
67
68     tcg_emit_op(ctx, opc, pi);
69 }
70
71 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
72 {
73     int pi = ctx->gen_next_parm_idx;
74
75     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
76     ctx->gen_next_parm_idx = pi + 2;
77     ctx->gen_opparam_buf[pi + 0] = a1;
78     ctx->gen_opparam_buf[pi + 1] = a2;
79
80     tcg_emit_op(ctx, opc, pi);
81 }
82
83 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
84                  TCGArg a2, TCGArg a3)
85 {
86     int pi = ctx->gen_next_parm_idx;
87
88     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
89     ctx->gen_next_parm_idx = pi + 3;
90     ctx->gen_opparam_buf[pi + 0] = a1;
91     ctx->gen_opparam_buf[pi + 1] = a2;
92     ctx->gen_opparam_buf[pi + 2] = a3;
93
94     tcg_emit_op(ctx, opc, pi);
95 }
96
97 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
98                  TCGArg a2, TCGArg a3, TCGArg a4)
99 {
100     int pi = ctx->gen_next_parm_idx;
101
102     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
103     ctx->gen_next_parm_idx = pi + 4;
104     ctx->gen_opparam_buf[pi + 0] = a1;
105     ctx->gen_opparam_buf[pi + 1] = a2;
106     ctx->gen_opparam_buf[pi + 2] = a3;
107     ctx->gen_opparam_buf[pi + 3] = a4;
108
109     tcg_emit_op(ctx, opc, pi);
110 }
111
112 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
113                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
114 {
115     int pi = ctx->gen_next_parm_idx;
116
117     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
118     ctx->gen_next_parm_idx = pi + 5;
119     ctx->gen_opparam_buf[pi + 0] = a1;
120     ctx->gen_opparam_buf[pi + 1] = a2;
121     ctx->gen_opparam_buf[pi + 2] = a3;
122     ctx->gen_opparam_buf[pi + 3] = a4;
123     ctx->gen_opparam_buf[pi + 4] = a5;
124
125     tcg_emit_op(ctx, opc, pi);
126 }
127
128 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
129                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
130 {
131     int pi = ctx->gen_next_parm_idx;
132
133     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
134     ctx->gen_next_parm_idx = pi + 6;
135     ctx->gen_opparam_buf[pi + 0] = a1;
136     ctx->gen_opparam_buf[pi + 1] = a2;
137     ctx->gen_opparam_buf[pi + 2] = a3;
138     ctx->gen_opparam_buf[pi + 3] = a4;
139     ctx->gen_opparam_buf[pi + 4] = a5;
140     ctx->gen_opparam_buf[pi + 5] = a6;
141
142     tcg_emit_op(ctx, opc, pi);
143 }
144
145 /* 32 bit ops */
146
147 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
148 {
149     /* some cases can be optimized here */
150     if (arg2 == 0) {
151         tcg_gen_mov_i32(ret, arg1);
152     } else {
153         TCGv_i32 t0 = tcg_const_i32(arg2);
154         tcg_gen_add_i32(ret, arg1, t0);
155         tcg_temp_free_i32(t0);
156     }
157 }
158
159 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
160 {
161     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
162         /* Don't recurse with tcg_gen_neg_i32.  */
163         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
164     } else {
165         TCGv_i32 t0 = tcg_const_i32(arg1);
166         tcg_gen_sub_i32(ret, t0, arg2);
167         tcg_temp_free_i32(t0);
168     }
169 }
170
171 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
172 {
173     /* some cases can be optimized here */
174     if (arg2 == 0) {
175         tcg_gen_mov_i32(ret, arg1);
176     } else {
177         TCGv_i32 t0 = tcg_const_i32(arg2);
178         tcg_gen_sub_i32(ret, arg1, t0);
179         tcg_temp_free_i32(t0);
180     }
181 }
182
183 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
184 {
185     TCGv_i32 t0;
186     /* Some cases can be optimized here.  */
187     switch (arg2) {
188     case 0:
189         tcg_gen_movi_i32(ret, 0);
190         return;
191     case 0xffffffffu:
192         tcg_gen_mov_i32(ret, arg1);
193         return;
194     case 0xffu:
195         /* Don't recurse with tcg_gen_ext8u_i32.  */
196         if (TCG_TARGET_HAS_ext8u_i32) {
197             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
198             return;
199         }
200         break;
201     case 0xffffu:
202         if (TCG_TARGET_HAS_ext16u_i32) {
203             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
204             return;
205         }
206         break;
207     }
208     t0 = tcg_const_i32(arg2);
209     tcg_gen_and_i32(ret, arg1, t0);
210     tcg_temp_free_i32(t0);
211 }
212
213 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
214 {
215     /* Some cases can be optimized here.  */
216     if (arg2 == -1) {
217         tcg_gen_movi_i32(ret, -1);
218     } else if (arg2 == 0) {
219         tcg_gen_mov_i32(ret, arg1);
220     } else {
221         TCGv_i32 t0 = tcg_const_i32(arg2);
222         tcg_gen_or_i32(ret, arg1, t0);
223         tcg_temp_free_i32(t0);
224     }
225 }
226
227 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
228 {
229     /* Some cases can be optimized here.  */
230     if (arg2 == 0) {
231         tcg_gen_mov_i32(ret, arg1);
232     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
233         /* Don't recurse with tcg_gen_not_i32.  */
234         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
235     } else {
236         TCGv_i32 t0 = tcg_const_i32(arg2);
237         tcg_gen_xor_i32(ret, arg1, t0);
238         tcg_temp_free_i32(t0);
239     }
240 }
241
242 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
243 {
244     tcg_debug_assert(arg2 < 32);
245     if (arg2 == 0) {
246         tcg_gen_mov_i32(ret, arg1);
247     } else {
248         TCGv_i32 t0 = tcg_const_i32(arg2);
249         tcg_gen_shl_i32(ret, arg1, t0);
250         tcg_temp_free_i32(t0);
251     }
252 }
253
254 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
255 {
256     tcg_debug_assert(arg2 < 32);
257     if (arg2 == 0) {
258         tcg_gen_mov_i32(ret, arg1);
259     } else {
260         TCGv_i32 t0 = tcg_const_i32(arg2);
261         tcg_gen_shr_i32(ret, arg1, t0);
262         tcg_temp_free_i32(t0);
263     }
264 }
265
266 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
267 {
268     tcg_debug_assert(arg2 < 32);
269     if (arg2 == 0) {
270         tcg_gen_mov_i32(ret, arg1);
271     } else {
272         TCGv_i32 t0 = tcg_const_i32(arg2);
273         tcg_gen_sar_i32(ret, arg1, t0);
274         tcg_temp_free_i32(t0);
275     }
276 }
277
278 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
279 {
280     if (cond == TCG_COND_ALWAYS) {
281         tcg_gen_br(l);
282     } else if (cond != TCG_COND_NEVER) {
283         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
284     }
285 }
286
287 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
288 {
289     if (cond == TCG_COND_ALWAYS) {
290         tcg_gen_br(l);
291     } else if (cond != TCG_COND_NEVER) {
292         TCGv_i32 t0 = tcg_const_i32(arg2);
293         tcg_gen_brcond_i32(cond, arg1, t0, l);
294         tcg_temp_free_i32(t0);
295     }
296 }
297
298 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
299                          TCGv_i32 arg1, TCGv_i32 arg2)
300 {
301     if (cond == TCG_COND_ALWAYS) {
302         tcg_gen_movi_i32(ret, 1);
303     } else if (cond == TCG_COND_NEVER) {
304         tcg_gen_movi_i32(ret, 0);
305     } else {
306         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
307     }
308 }
309
310 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
311                           TCGv_i32 arg1, int32_t arg2)
312 {
313     TCGv_i32 t0 = tcg_const_i32(arg2);
314     tcg_gen_setcond_i32(cond, ret, arg1, t0);
315     tcg_temp_free_i32(t0);
316 }
317
318 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
319 {
320     TCGv_i32 t0 = tcg_const_i32(arg2);
321     tcg_gen_mul_i32(ret, arg1, t0);
322     tcg_temp_free_i32(t0);
323 }
324
325 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
326 {
327     if (TCG_TARGET_HAS_div_i32) {
328         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
329     } else if (TCG_TARGET_HAS_div2_i32) {
330         TCGv_i32 t0 = tcg_temp_new_i32();
331         tcg_gen_sari_i32(t0, arg1, 31);
332         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
333         tcg_temp_free_i32(t0);
334     } else {
335         gen_helper_div_i32(ret, arg1, arg2);
336     }
337 }
338
339 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
340 {
341     if (TCG_TARGET_HAS_rem_i32) {
342         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
343     } else if (TCG_TARGET_HAS_div_i32) {
344         TCGv_i32 t0 = tcg_temp_new_i32();
345         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
346         tcg_gen_mul_i32(t0, t0, arg2);
347         tcg_gen_sub_i32(ret, arg1, t0);
348         tcg_temp_free_i32(t0);
349     } else if (TCG_TARGET_HAS_div2_i32) {
350         TCGv_i32 t0 = tcg_temp_new_i32();
351         tcg_gen_sari_i32(t0, arg1, 31);
352         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
353         tcg_temp_free_i32(t0);
354     } else {
355         gen_helper_rem_i32(ret, arg1, arg2);
356     }
357 }
358
359 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
360 {
361     if (TCG_TARGET_HAS_div_i32) {
362         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
363     } else if (TCG_TARGET_HAS_div2_i32) {
364         TCGv_i32 t0 = tcg_temp_new_i32();
365         tcg_gen_movi_i32(t0, 0);
366         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
367         tcg_temp_free_i32(t0);
368     } else {
369         gen_helper_divu_i32(ret, arg1, arg2);
370     }
371 }
372
373 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 {
375     if (TCG_TARGET_HAS_rem_i32) {
376         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
377     } else if (TCG_TARGET_HAS_div_i32) {
378         TCGv_i32 t0 = tcg_temp_new_i32();
379         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
380         tcg_gen_mul_i32(t0, t0, arg2);
381         tcg_gen_sub_i32(ret, arg1, t0);
382         tcg_temp_free_i32(t0);
383     } else if (TCG_TARGET_HAS_div2_i32) {
384         TCGv_i32 t0 = tcg_temp_new_i32();
385         tcg_gen_movi_i32(t0, 0);
386         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
387         tcg_temp_free_i32(t0);
388     } else {
389         gen_helper_remu_i32(ret, arg1, arg2);
390     }
391 }
392
393 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
394 {
395     if (TCG_TARGET_HAS_andc_i32) {
396         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
397     } else {
398         TCGv_i32 t0 = tcg_temp_new_i32();
399         tcg_gen_not_i32(t0, arg2);
400         tcg_gen_and_i32(ret, arg1, t0);
401         tcg_temp_free_i32(t0);
402     }
403 }
404
405 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
406 {
407     if (TCG_TARGET_HAS_eqv_i32) {
408         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
409     } else {
410         tcg_gen_xor_i32(ret, arg1, arg2);
411         tcg_gen_not_i32(ret, ret);
412     }
413 }
414
415 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
416 {
417     if (TCG_TARGET_HAS_nand_i32) {
418         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
419     } else {
420         tcg_gen_and_i32(ret, arg1, arg2);
421         tcg_gen_not_i32(ret, ret);
422     }
423 }
424
425 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
426 {
427     if (TCG_TARGET_HAS_nor_i32) {
428         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
429     } else {
430         tcg_gen_or_i32(ret, arg1, arg2);
431         tcg_gen_not_i32(ret, ret);
432     }
433 }
434
435 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
436 {
437     if (TCG_TARGET_HAS_orc_i32) {
438         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
439     } else {
440         TCGv_i32 t0 = tcg_temp_new_i32();
441         tcg_gen_not_i32(t0, arg2);
442         tcg_gen_or_i32(ret, arg1, t0);
443         tcg_temp_free_i32(t0);
444     }
445 }
446
447 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
448 {
449     if (TCG_TARGET_HAS_rot_i32) {
450         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
451     } else {
452         TCGv_i32 t0, t1;
453
454         t0 = tcg_temp_new_i32();
455         t1 = tcg_temp_new_i32();
456         tcg_gen_shl_i32(t0, arg1, arg2);
457         tcg_gen_subfi_i32(t1, 32, arg2);
458         tcg_gen_shr_i32(t1, arg1, t1);
459         tcg_gen_or_i32(ret, t0, t1);
460         tcg_temp_free_i32(t0);
461         tcg_temp_free_i32(t1);
462     }
463 }
464
465 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
466 {
467     tcg_debug_assert(arg2 < 32);
468     /* some cases can be optimized here */
469     if (arg2 == 0) {
470         tcg_gen_mov_i32(ret, arg1);
471     } else if (TCG_TARGET_HAS_rot_i32) {
472         TCGv_i32 t0 = tcg_const_i32(arg2);
473         tcg_gen_rotl_i32(ret, arg1, t0);
474         tcg_temp_free_i32(t0);
475     } else {
476         TCGv_i32 t0, t1;
477         t0 = tcg_temp_new_i32();
478         t1 = tcg_temp_new_i32();
479         tcg_gen_shli_i32(t0, arg1, arg2);
480         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
481         tcg_gen_or_i32(ret, t0, t1);
482         tcg_temp_free_i32(t0);
483         tcg_temp_free_i32(t1);
484     }
485 }
486
487 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
488 {
489     if (TCG_TARGET_HAS_rot_i32) {
490         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
491     } else {
492         TCGv_i32 t0, t1;
493
494         t0 = tcg_temp_new_i32();
495         t1 = tcg_temp_new_i32();
496         tcg_gen_shr_i32(t0, arg1, arg2);
497         tcg_gen_subfi_i32(t1, 32, arg2);
498         tcg_gen_shl_i32(t1, arg1, t1);
499         tcg_gen_or_i32(ret, t0, t1);
500         tcg_temp_free_i32(t0);
501         tcg_temp_free_i32(t1);
502     }
503 }
504
505 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
506 {
507     tcg_debug_assert(arg2 < 32);
508     /* some cases can be optimized here */
509     if (arg2 == 0) {
510         tcg_gen_mov_i32(ret, arg1);
511     } else {
512         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
513     }
514 }
515
516 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
517                          unsigned int ofs, unsigned int len)
518 {
519     uint32_t mask;
520     TCGv_i32 t1;
521
522     tcg_debug_assert(ofs < 32);
523     tcg_debug_assert(len <= 32);
524     tcg_debug_assert(ofs + len <= 32);
525
526     if (ofs == 0 && len == 32) {
527         tcg_gen_mov_i32(ret, arg2);
528         return;
529     }
530     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
531         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
532         return;
533     }
534
535     mask = (1u << len) - 1;
536     t1 = tcg_temp_new_i32();
537
538     if (ofs + len < 32) {
539         tcg_gen_andi_i32(t1, arg2, mask);
540         tcg_gen_shli_i32(t1, t1, ofs);
541     } else {
542         tcg_gen_shli_i32(t1, arg2, ofs);
543     }
544     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
545     tcg_gen_or_i32(ret, ret, t1);
546
547     tcg_temp_free_i32(t1);
548 }
549
550 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
551                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
552 {
553     if (cond == TCG_COND_ALWAYS) {
554         tcg_gen_mov_i32(ret, v1);
555     } else if (cond == TCG_COND_NEVER) {
556         tcg_gen_mov_i32(ret, v2);
557     } else if (TCG_TARGET_HAS_movcond_i32) {
558         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
559     } else {
560         TCGv_i32 t0 = tcg_temp_new_i32();
561         TCGv_i32 t1 = tcg_temp_new_i32();
562         tcg_gen_setcond_i32(cond, t0, c1, c2);
563         tcg_gen_neg_i32(t0, t0);
564         tcg_gen_and_i32(t1, v1, t0);
565         tcg_gen_andc_i32(ret, v2, t0);
566         tcg_gen_or_i32(ret, ret, t1);
567         tcg_temp_free_i32(t0);
568         tcg_temp_free_i32(t1);
569     }
570 }
571
572 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
573                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
574 {
575     if (TCG_TARGET_HAS_add2_i32) {
576         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
577     } else {
578         TCGv_i64 t0 = tcg_temp_new_i64();
579         TCGv_i64 t1 = tcg_temp_new_i64();
580         tcg_gen_concat_i32_i64(t0, al, ah);
581         tcg_gen_concat_i32_i64(t1, bl, bh);
582         tcg_gen_add_i64(t0, t0, t1);
583         tcg_gen_extr_i64_i32(rl, rh, t0);
584         tcg_temp_free_i64(t0);
585         tcg_temp_free_i64(t1);
586     }
587 }
588
589 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
590                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
591 {
592     if (TCG_TARGET_HAS_sub2_i32) {
593         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
594     } else {
595         TCGv_i64 t0 = tcg_temp_new_i64();
596         TCGv_i64 t1 = tcg_temp_new_i64();
597         tcg_gen_concat_i32_i64(t0, al, ah);
598         tcg_gen_concat_i32_i64(t1, bl, bh);
599         tcg_gen_sub_i64(t0, t0, t1);
600         tcg_gen_extr_i64_i32(rl, rh, t0);
601         tcg_temp_free_i64(t0);
602         tcg_temp_free_i64(t1);
603     }
604 }
605
606 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
607 {
608     if (TCG_TARGET_HAS_mulu2_i32) {
609         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
610     } else if (TCG_TARGET_HAS_muluh_i32) {
611         TCGv_i32 t = tcg_temp_new_i32();
612         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
613         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
614         tcg_gen_mov_i32(rl, t);
615         tcg_temp_free_i32(t);
616     } else {
617         TCGv_i64 t0 = tcg_temp_new_i64();
618         TCGv_i64 t1 = tcg_temp_new_i64();
619         tcg_gen_extu_i32_i64(t0, arg1);
620         tcg_gen_extu_i32_i64(t1, arg2);
621         tcg_gen_mul_i64(t0, t0, t1);
622         tcg_gen_extr_i64_i32(rl, rh, t0);
623         tcg_temp_free_i64(t0);
624         tcg_temp_free_i64(t1);
625     }
626 }
627
628 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
629 {
630     if (TCG_TARGET_HAS_muls2_i32) {
631         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
632     } else if (TCG_TARGET_HAS_mulsh_i32) {
633         TCGv_i32 t = tcg_temp_new_i32();
634         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
635         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
636         tcg_gen_mov_i32(rl, t);
637         tcg_temp_free_i32(t);
638     } else if (TCG_TARGET_REG_BITS == 32) {
639         TCGv_i32 t0 = tcg_temp_new_i32();
640         TCGv_i32 t1 = tcg_temp_new_i32();
641         TCGv_i32 t2 = tcg_temp_new_i32();
642         TCGv_i32 t3 = tcg_temp_new_i32();
643         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
644         /* Adjust for negative inputs.  */
645         tcg_gen_sari_i32(t2, arg1, 31);
646         tcg_gen_sari_i32(t3, arg2, 31);
647         tcg_gen_and_i32(t2, t2, arg2);
648         tcg_gen_and_i32(t3, t3, arg1);
649         tcg_gen_sub_i32(rh, t1, t2);
650         tcg_gen_sub_i32(rh, rh, t3);
651         tcg_gen_mov_i32(rl, t0);
652         tcg_temp_free_i32(t0);
653         tcg_temp_free_i32(t1);
654         tcg_temp_free_i32(t2);
655         tcg_temp_free_i32(t3);
656     } else {
657         TCGv_i64 t0 = tcg_temp_new_i64();
658         TCGv_i64 t1 = tcg_temp_new_i64();
659         tcg_gen_ext_i32_i64(t0, arg1);
660         tcg_gen_ext_i32_i64(t1, arg2);
661         tcg_gen_mul_i64(t0, t0, t1);
662         tcg_gen_extr_i64_i32(rl, rh, t0);
663         tcg_temp_free_i64(t0);
664         tcg_temp_free_i64(t1);
665     }
666 }
667
668 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
669 {
670     if (TCG_TARGET_HAS_ext8s_i32) {
671         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
672     } else {
673         tcg_gen_shli_i32(ret, arg, 24);
674         tcg_gen_sari_i32(ret, ret, 24);
675     }
676 }
677
678 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
679 {
680     if (TCG_TARGET_HAS_ext16s_i32) {
681         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
682     } else {
683         tcg_gen_shli_i32(ret, arg, 16);
684         tcg_gen_sari_i32(ret, ret, 16);
685     }
686 }
687
688 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
689 {
690     if (TCG_TARGET_HAS_ext8u_i32) {
691         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
692     } else {
693         tcg_gen_andi_i32(ret, arg, 0xffu);
694     }
695 }
696
697 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
698 {
699     if (TCG_TARGET_HAS_ext16u_i32) {
700         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
701     } else {
702         tcg_gen_andi_i32(ret, arg, 0xffffu);
703     }
704 }
705
706 /* Note: we assume the two high bytes are set to zero */
707 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
708 {
709     if (TCG_TARGET_HAS_bswap16_i32) {
710         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
711     } else {
712         TCGv_i32 t0 = tcg_temp_new_i32();
713
714         tcg_gen_ext8u_i32(t0, arg);
715         tcg_gen_shli_i32(t0, t0, 8);
716         tcg_gen_shri_i32(ret, arg, 8);
717         tcg_gen_or_i32(ret, ret, t0);
718         tcg_temp_free_i32(t0);
719     }
720 }
721
722 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
723 {
724     if (TCG_TARGET_HAS_bswap32_i32) {
725         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
726     } else {
727         TCGv_i32 t0, t1;
728         t0 = tcg_temp_new_i32();
729         t1 = tcg_temp_new_i32();
730
731         tcg_gen_shli_i32(t0, arg, 24);
732
733         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
734         tcg_gen_shli_i32(t1, t1, 8);
735         tcg_gen_or_i32(t0, t0, t1);
736
737         tcg_gen_shri_i32(t1, arg, 8);
738         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
739         tcg_gen_or_i32(t0, t0, t1);
740
741         tcg_gen_shri_i32(t1, arg, 24);
742         tcg_gen_or_i32(ret, t0, t1);
743         tcg_temp_free_i32(t0);
744         tcg_temp_free_i32(t1);
745     }
746 }
747
748 /* 64-bit ops */
749
750 #if TCG_TARGET_REG_BITS == 32
751 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
752
753 void tcg_gen_discard_i64(TCGv_i64 arg)
754 {
755     tcg_gen_discard_i32(TCGV_LOW(arg));
756     tcg_gen_discard_i32(TCGV_HIGH(arg));
757 }
758
759 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
760 {
761     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
762     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
763 }
764
765 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
766 {
767     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
768     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
769 }
770
771 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
772 {
773     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
774     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
775 }
776
777 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
778 {
779     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
780     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
781 }
782
783 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
784 {
785     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
786     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
787 }
788
789 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
790 {
791     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
792     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
793 }
794
795 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
796 {
797     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
798     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
799 }
800
801 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
802 {
803     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
804     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
805 }
806
807 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
808 {
809     /* Since arg2 and ret have different types,
810        they cannot be the same temporary */
811 #ifdef HOST_WORDS_BIGENDIAN
812     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
813     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
814 #else
815     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
816     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
817 #endif
818 }
819
820 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
821 {
822 #ifdef HOST_WORDS_BIGENDIAN
823     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
824     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
825 #else
826     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
827     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
828 #endif
829 }
830
831 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
832 {
833     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
834     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
835 }
836
837 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
838 {
839     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
840     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
841 }
842
843 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
844 {
845     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
846     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
847 }
848
849 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
850 {
851     gen_helper_shl_i64(ret, arg1, arg2);
852 }
853
854 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
855 {
856     gen_helper_shr_i64(ret, arg1, arg2);
857 }
858
859 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
860 {
861     gen_helper_sar_i64(ret, arg1, arg2);
862 }
863
864 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
865 {
866     TCGv_i64 t0;
867     TCGv_i32 t1;
868
869     t0 = tcg_temp_new_i64();
870     t1 = tcg_temp_new_i32();
871
872     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
873                       TCGV_LOW(arg1), TCGV_LOW(arg2));
874
875     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
876     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
877     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
878     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
879
880     tcg_gen_mov_i64(ret, t0);
881     tcg_temp_free_i64(t0);
882     tcg_temp_free_i32(t1);
883 }
884 #endif /* TCG_TARGET_REG_SIZE == 32 */
885
886 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
887 {
888     /* some cases can be optimized here */
889     if (arg2 == 0) {
890         tcg_gen_mov_i64(ret, arg1);
891     } else {
892         TCGv_i64 t0 = tcg_const_i64(arg2);
893         tcg_gen_add_i64(ret, arg1, t0);
894         tcg_temp_free_i64(t0);
895     }
896 }
897
898 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
899 {
900     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
901         /* Don't recurse with tcg_gen_neg_i64.  */
902         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
903     } else {
904         TCGv_i64 t0 = tcg_const_i64(arg1);
905         tcg_gen_sub_i64(ret, t0, arg2);
906         tcg_temp_free_i64(t0);
907     }
908 }
909
910 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
911 {
912     /* some cases can be optimized here */
913     if (arg2 == 0) {
914         tcg_gen_mov_i64(ret, arg1);
915     } else {
916         TCGv_i64 t0 = tcg_const_i64(arg2);
917         tcg_gen_sub_i64(ret, arg1, t0);
918         tcg_temp_free_i64(t0);
919     }
920 }
921
922 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
923 {
924     TCGv_i64 t0;
925
926     if (TCG_TARGET_REG_BITS == 32) {
927         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
928         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
929         return;
930     }
931
932     /* Some cases can be optimized here.  */
933     switch (arg2) {
934     case 0:
935         tcg_gen_movi_i64(ret, 0);
936         return;
937     case 0xffffffffffffffffull:
938         tcg_gen_mov_i64(ret, arg1);
939         return;
940     case 0xffull:
941         /* Don't recurse with tcg_gen_ext8u_i64.  */
942         if (TCG_TARGET_HAS_ext8u_i64) {
943             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
944             return;
945         }
946         break;
947     case 0xffffu:
948         if (TCG_TARGET_HAS_ext16u_i64) {
949             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
950             return;
951         }
952         break;
953     case 0xffffffffull:
954         if (TCG_TARGET_HAS_ext32u_i64) {
955             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
956             return;
957         }
958         break;
959     }
960     t0 = tcg_const_i64(arg2);
961     tcg_gen_and_i64(ret, arg1, t0);
962     tcg_temp_free_i64(t0);
963 }
964
965 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
966 {
967     if (TCG_TARGET_REG_BITS == 32) {
968         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
969         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
970         return;
971     }
972     /* Some cases can be optimized here.  */
973     if (arg2 == -1) {
974         tcg_gen_movi_i64(ret, -1);
975     } else if (arg2 == 0) {
976         tcg_gen_mov_i64(ret, arg1);
977     } else {
978         TCGv_i64 t0 = tcg_const_i64(arg2);
979         tcg_gen_or_i64(ret, arg1, t0);
980         tcg_temp_free_i64(t0);
981     }
982 }
983
984 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
985 {
986     if (TCG_TARGET_REG_BITS == 32) {
987         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
988         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
989         return;
990     }
991     /* Some cases can be optimized here.  */
992     if (arg2 == 0) {
993         tcg_gen_mov_i64(ret, arg1);
994     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
995         /* Don't recurse with tcg_gen_not_i64.  */
996         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
997     } else {
998         TCGv_i64 t0 = tcg_const_i64(arg2);
999         tcg_gen_xor_i64(ret, arg1, t0);
1000         tcg_temp_free_i64(t0);
1001     }
1002 }
1003
1004 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1005                                       unsigned c, bool right, bool arith)
1006 {
1007     tcg_debug_assert(c < 64);
1008     if (c == 0) {
1009         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1010         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1011     } else if (c >= 32) {
1012         c -= 32;
1013         if (right) {
1014             if (arith) {
1015                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1016                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1017             } else {
1018                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1019                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1020             }
1021         } else {
1022             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1023             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1024         }
1025     } else {
1026         TCGv_i32 t0, t1;
1027
1028         t0 = tcg_temp_new_i32();
1029         t1 = tcg_temp_new_i32();
1030         if (right) {
1031             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1032             if (arith) {
1033                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1034             } else {
1035                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1036             }
1037             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1038             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1039             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1040         } else {
1041             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1042             /* Note: ret can be the same as arg1, so we use t1 */
1043             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1044             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1045             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1046             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1047         }
1048         tcg_temp_free_i32(t0);
1049         tcg_temp_free_i32(t1);
1050     }
1051 }
1052
1053 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1054 {
1055     tcg_debug_assert(arg2 < 64);
1056     if (TCG_TARGET_REG_BITS == 32) {
1057         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1058     } else if (arg2 == 0) {
1059         tcg_gen_mov_i64(ret, arg1);
1060     } else {
1061         TCGv_i64 t0 = tcg_const_i64(arg2);
1062         tcg_gen_shl_i64(ret, arg1, t0);
1063         tcg_temp_free_i64(t0);
1064     }
1065 }
1066
1067 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1068 {
1069     tcg_debug_assert(arg2 < 64);
1070     if (TCG_TARGET_REG_BITS == 32) {
1071         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1072     } else if (arg2 == 0) {
1073         tcg_gen_mov_i64(ret, arg1);
1074     } else {
1075         TCGv_i64 t0 = tcg_const_i64(arg2);
1076         tcg_gen_shr_i64(ret, arg1, t0);
1077         tcg_temp_free_i64(t0);
1078     }
1079 }
1080
1081 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1082 {
1083     tcg_debug_assert(arg2 < 64);
1084     if (TCG_TARGET_REG_BITS == 32) {
1085         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1086     } else if (arg2 == 0) {
1087         tcg_gen_mov_i64(ret, arg1);
1088     } else {
1089         TCGv_i64 t0 = tcg_const_i64(arg2);
1090         tcg_gen_sar_i64(ret, arg1, t0);
1091         tcg_temp_free_i64(t0);
1092     }
1093 }
1094
1095 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1096 {
1097     if (cond == TCG_COND_ALWAYS) {
1098         tcg_gen_br(l);
1099     } else if (cond != TCG_COND_NEVER) {
1100         if (TCG_TARGET_REG_BITS == 32) {
1101             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1102                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1103                               TCGV_HIGH(arg2), cond, label_arg(l));
1104         } else {
1105             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1106                               label_arg(l));
1107         }
1108     }
1109 }
1110
1111 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1112 {
1113     if (cond == TCG_COND_ALWAYS) {
1114         tcg_gen_br(l);
1115     } else if (cond != TCG_COND_NEVER) {
1116         TCGv_i64 t0 = tcg_const_i64(arg2);
1117         tcg_gen_brcond_i64(cond, arg1, t0, l);
1118         tcg_temp_free_i64(t0);
1119     }
1120 }
1121
1122 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1123                          TCGv_i64 arg1, TCGv_i64 arg2)
1124 {
1125     if (cond == TCG_COND_ALWAYS) {
1126         tcg_gen_movi_i64(ret, 1);
1127     } else if (cond == TCG_COND_NEVER) {
1128         tcg_gen_movi_i64(ret, 0);
1129     } else {
1130         if (TCG_TARGET_REG_BITS == 32) {
1131             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1132                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1133                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1134             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1135         } else {
1136             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1137         }
1138     }
1139 }
1140
1141 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1142                           TCGv_i64 arg1, int64_t arg2)
1143 {
1144     TCGv_i64 t0 = tcg_const_i64(arg2);
1145     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1146     tcg_temp_free_i64(t0);
1147 }
1148
1149 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1150 {
1151     TCGv_i64 t0 = tcg_const_i64(arg2);
1152     tcg_gen_mul_i64(ret, arg1, t0);
1153     tcg_temp_free_i64(t0);
1154 }
1155
1156 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1157 {
1158     if (TCG_TARGET_HAS_div_i64) {
1159         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1160     } else if (TCG_TARGET_HAS_div2_i64) {
1161         TCGv_i64 t0 = tcg_temp_new_i64();
1162         tcg_gen_sari_i64(t0, arg1, 63);
1163         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1164         tcg_temp_free_i64(t0);
1165     } else {
1166         gen_helper_div_i64(ret, arg1, arg2);
1167     }
1168 }
1169
1170 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1171 {
1172     if (TCG_TARGET_HAS_rem_i64) {
1173         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1174     } else if (TCG_TARGET_HAS_div_i64) {
1175         TCGv_i64 t0 = tcg_temp_new_i64();
1176         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1177         tcg_gen_mul_i64(t0, t0, arg2);
1178         tcg_gen_sub_i64(ret, arg1, t0);
1179         tcg_temp_free_i64(t0);
1180     } else if (TCG_TARGET_HAS_div2_i64) {
1181         TCGv_i64 t0 = tcg_temp_new_i64();
1182         tcg_gen_sari_i64(t0, arg1, 63);
1183         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1184         tcg_temp_free_i64(t0);
1185     } else {
1186         gen_helper_rem_i64(ret, arg1, arg2);
1187     }
1188 }
1189
1190 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1191 {
1192     if (TCG_TARGET_HAS_div_i64) {
1193         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1194     } else if (TCG_TARGET_HAS_div2_i64) {
1195         TCGv_i64 t0 = tcg_temp_new_i64();
1196         tcg_gen_movi_i64(t0, 0);
1197         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1198         tcg_temp_free_i64(t0);
1199     } else {
1200         gen_helper_divu_i64(ret, arg1, arg2);
1201     }
1202 }
1203
1204 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1205 {
1206     if (TCG_TARGET_HAS_rem_i64) {
1207         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1208     } else if (TCG_TARGET_HAS_div_i64) {
1209         TCGv_i64 t0 = tcg_temp_new_i64();
1210         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1211         tcg_gen_mul_i64(t0, t0, arg2);
1212         tcg_gen_sub_i64(ret, arg1, t0);
1213         tcg_temp_free_i64(t0);
1214     } else if (TCG_TARGET_HAS_div2_i64) {
1215         TCGv_i64 t0 = tcg_temp_new_i64();
1216         tcg_gen_movi_i64(t0, 0);
1217         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1218         tcg_temp_free_i64(t0);
1219     } else {
1220         gen_helper_remu_i64(ret, arg1, arg2);
1221     }
1222 }
1223
1224 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1225 {
1226     if (TCG_TARGET_REG_BITS == 32) {
1227         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1228         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1229     } else if (TCG_TARGET_HAS_ext8s_i64) {
1230         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1231     } else {
1232         tcg_gen_shli_i64(ret, arg, 56);
1233         tcg_gen_sari_i64(ret, ret, 56);
1234     }
1235 }
1236
1237 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1238 {
1239     if (TCG_TARGET_REG_BITS == 32) {
1240         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1241         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1242     } else if (TCG_TARGET_HAS_ext16s_i64) {
1243         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1244     } else {
1245         tcg_gen_shli_i64(ret, arg, 48);
1246         tcg_gen_sari_i64(ret, ret, 48);
1247     }
1248 }
1249
1250 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1251 {
1252     if (TCG_TARGET_REG_BITS == 32) {
1253         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1254         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1255     } else if (TCG_TARGET_HAS_ext32s_i64) {
1256         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1257     } else {
1258         tcg_gen_shli_i64(ret, arg, 32);
1259         tcg_gen_sari_i64(ret, ret, 32);
1260     }
1261 }
1262
1263 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1264 {
1265     if (TCG_TARGET_REG_BITS == 32) {
1266         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1267         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1268     } else if (TCG_TARGET_HAS_ext8u_i64) {
1269         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1270     } else {
1271         tcg_gen_andi_i64(ret, arg, 0xffu);
1272     }
1273 }
1274
1275 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1276 {
1277     if (TCG_TARGET_REG_BITS == 32) {
1278         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1279         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1280     } else if (TCG_TARGET_HAS_ext16u_i64) {
1281         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1282     } else {
1283         tcg_gen_andi_i64(ret, arg, 0xffffu);
1284     }
1285 }
1286
1287 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1288 {
1289     if (TCG_TARGET_REG_BITS == 32) {
1290         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1291         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1292     } else if (TCG_TARGET_HAS_ext32u_i64) {
1293         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1294     } else {
1295         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1296     }
1297 }
1298
1299 /* Note: we assume the six high bytes are set to zero */
1300 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1301 {
1302     if (TCG_TARGET_REG_BITS == 32) {
1303         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1304         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1305     } else if (TCG_TARGET_HAS_bswap16_i64) {
1306         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1307     } else {
1308         TCGv_i64 t0 = tcg_temp_new_i64();
1309
1310         tcg_gen_ext8u_i64(t0, arg);
1311         tcg_gen_shli_i64(t0, t0, 8);
1312         tcg_gen_shri_i64(ret, arg, 8);
1313         tcg_gen_or_i64(ret, ret, t0);
1314         tcg_temp_free_i64(t0);
1315     }
1316 }
1317
1318 /* Note: we assume the four high bytes are set to zero */
1319 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1320 {
1321     if (TCG_TARGET_REG_BITS == 32) {
1322         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1323         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1324     } else if (TCG_TARGET_HAS_bswap32_i64) {
1325         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1326     } else {
1327         TCGv_i64 t0, t1;
1328         t0 = tcg_temp_new_i64();
1329         t1 = tcg_temp_new_i64();
1330
1331         tcg_gen_shli_i64(t0, arg, 24);
1332         tcg_gen_ext32u_i64(t0, t0);
1333
1334         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1335         tcg_gen_shli_i64(t1, t1, 8);
1336         tcg_gen_or_i64(t0, t0, t1);
1337
1338         tcg_gen_shri_i64(t1, arg, 8);
1339         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1340         tcg_gen_or_i64(t0, t0, t1);
1341
1342         tcg_gen_shri_i64(t1, arg, 24);
1343         tcg_gen_or_i64(ret, t0, t1);
1344         tcg_temp_free_i64(t0);
1345         tcg_temp_free_i64(t1);
1346     }
1347 }
1348
1349 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1350 {
1351     if (TCG_TARGET_REG_BITS == 32) {
1352         TCGv_i32 t0, t1;
1353         t0 = tcg_temp_new_i32();
1354         t1 = tcg_temp_new_i32();
1355
1356         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1357         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1358         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1359         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1360         tcg_temp_free_i32(t0);
1361         tcg_temp_free_i32(t1);
1362     } else if (TCG_TARGET_HAS_bswap64_i64) {
1363         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1364     } else {
1365         TCGv_i64 t0 = tcg_temp_new_i64();
1366         TCGv_i64 t1 = tcg_temp_new_i64();
1367
1368         tcg_gen_shli_i64(t0, arg, 56);
1369
1370         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1371         tcg_gen_shli_i64(t1, t1, 40);
1372         tcg_gen_or_i64(t0, t0, t1);
1373
1374         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1375         tcg_gen_shli_i64(t1, t1, 24);
1376         tcg_gen_or_i64(t0, t0, t1);
1377
1378         tcg_gen_andi_i64(t1, arg, 0xff000000);
1379         tcg_gen_shli_i64(t1, t1, 8);
1380         tcg_gen_or_i64(t0, t0, t1);
1381
1382         tcg_gen_shri_i64(t1, arg, 8);
1383         tcg_gen_andi_i64(t1, t1, 0xff000000);
1384         tcg_gen_or_i64(t0, t0, t1);
1385
1386         tcg_gen_shri_i64(t1, arg, 24);
1387         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1388         tcg_gen_or_i64(t0, t0, t1);
1389
1390         tcg_gen_shri_i64(t1, arg, 40);
1391         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1392         tcg_gen_or_i64(t0, t0, t1);
1393
1394         tcg_gen_shri_i64(t1, arg, 56);
1395         tcg_gen_or_i64(ret, t0, t1);
1396         tcg_temp_free_i64(t0);
1397         tcg_temp_free_i64(t1);
1398     }
1399 }
1400
1401 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1402 {
1403     if (TCG_TARGET_REG_BITS == 32) {
1404         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1405         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1406     } else if (TCG_TARGET_HAS_not_i64) {
1407         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1408     } else {
1409         tcg_gen_xori_i64(ret, arg, -1);
1410     }
1411 }
1412
1413 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1414 {
1415     if (TCG_TARGET_REG_BITS == 32) {
1416         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1417         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1418     } else if (TCG_TARGET_HAS_andc_i64) {
1419         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1420     } else {
1421         TCGv_i64 t0 = tcg_temp_new_i64();
1422         tcg_gen_not_i64(t0, arg2);
1423         tcg_gen_and_i64(ret, arg1, t0);
1424         tcg_temp_free_i64(t0);
1425     }
1426 }
1427
1428 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1429 {
1430     if (TCG_TARGET_REG_BITS == 32) {
1431         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1432         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1433     } else if (TCG_TARGET_HAS_eqv_i64) {
1434         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1435     } else {
1436         tcg_gen_xor_i64(ret, arg1, arg2);
1437         tcg_gen_not_i64(ret, ret);
1438     }
1439 }
1440
1441 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1442 {
1443     if (TCG_TARGET_REG_BITS == 32) {
1444         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1445         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1446     } else if (TCG_TARGET_HAS_nand_i64) {
1447         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1448     } else {
1449         tcg_gen_and_i64(ret, arg1, arg2);
1450         tcg_gen_not_i64(ret, ret);
1451     }
1452 }
1453
1454 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1455 {
1456     if (TCG_TARGET_REG_BITS == 32) {
1457         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1458         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1459     } else if (TCG_TARGET_HAS_nor_i64) {
1460         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1461     } else {
1462         tcg_gen_or_i64(ret, arg1, arg2);
1463         tcg_gen_not_i64(ret, ret);
1464     }
1465 }
1466
1467 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1468 {
1469     if (TCG_TARGET_REG_BITS == 32) {
1470         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1471         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1472     } else if (TCG_TARGET_HAS_orc_i64) {
1473         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1474     } else {
1475         TCGv_i64 t0 = tcg_temp_new_i64();
1476         tcg_gen_not_i64(t0, arg2);
1477         tcg_gen_or_i64(ret, arg1, t0);
1478         tcg_temp_free_i64(t0);
1479     }
1480 }
1481
1482 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1483 {
1484     if (TCG_TARGET_HAS_rot_i64) {
1485         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1486     } else {
1487         TCGv_i64 t0, t1;
1488         t0 = tcg_temp_new_i64();
1489         t1 = tcg_temp_new_i64();
1490         tcg_gen_shl_i64(t0, arg1, arg2);
1491         tcg_gen_subfi_i64(t1, 64, arg2);
1492         tcg_gen_shr_i64(t1, arg1, t1);
1493         tcg_gen_or_i64(ret, t0, t1);
1494         tcg_temp_free_i64(t0);
1495         tcg_temp_free_i64(t1);
1496     }
1497 }
1498
1499 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1500 {
1501     tcg_debug_assert(arg2 < 64);
1502     /* some cases can be optimized here */
1503     if (arg2 == 0) {
1504         tcg_gen_mov_i64(ret, arg1);
1505     } else if (TCG_TARGET_HAS_rot_i64) {
1506         TCGv_i64 t0 = tcg_const_i64(arg2);
1507         tcg_gen_rotl_i64(ret, arg1, t0);
1508         tcg_temp_free_i64(t0);
1509     } else {
1510         TCGv_i64 t0, t1;
1511         t0 = tcg_temp_new_i64();
1512         t1 = tcg_temp_new_i64();
1513         tcg_gen_shli_i64(t0, arg1, arg2);
1514         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1515         tcg_gen_or_i64(ret, t0, t1);
1516         tcg_temp_free_i64(t0);
1517         tcg_temp_free_i64(t1);
1518     }
1519 }
1520
1521 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1522 {
1523     if (TCG_TARGET_HAS_rot_i64) {
1524         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1525     } else {
1526         TCGv_i64 t0, t1;
1527         t0 = tcg_temp_new_i64();
1528         t1 = tcg_temp_new_i64();
1529         tcg_gen_shr_i64(t0, arg1, arg2);
1530         tcg_gen_subfi_i64(t1, 64, arg2);
1531         tcg_gen_shl_i64(t1, arg1, t1);
1532         tcg_gen_or_i64(ret, t0, t1);
1533         tcg_temp_free_i64(t0);
1534         tcg_temp_free_i64(t1);
1535     }
1536 }
1537
1538 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1539 {
1540     tcg_debug_assert(arg2 < 64);
1541     /* some cases can be optimized here */
1542     if (arg2 == 0) {
1543         tcg_gen_mov_i64(ret, arg1);
1544     } else {
1545         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1546     }
1547 }
1548
1549 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1550                          unsigned int ofs, unsigned int len)
1551 {
1552     uint64_t mask;
1553     TCGv_i64 t1;
1554
1555     tcg_debug_assert(ofs < 64);
1556     tcg_debug_assert(len <= 64);
1557     tcg_debug_assert(ofs + len <= 64);
1558
1559     if (ofs == 0 && len == 64) {
1560         tcg_gen_mov_i64(ret, arg2);
1561         return;
1562     }
1563     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1564         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1565         return;
1566     }
1567
1568     if (TCG_TARGET_REG_BITS == 32) {
1569         if (ofs >= 32) {
1570             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1571                                 TCGV_LOW(arg2), ofs - 32, len);
1572             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1573             return;
1574         }
1575         if (ofs + len <= 32) {
1576             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1577                                 TCGV_LOW(arg2), ofs, len);
1578             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1579             return;
1580         }
1581     }
1582
1583     mask = (1ull << len) - 1;
1584     t1 = tcg_temp_new_i64();
1585
1586     if (ofs + len < 64) {
1587         tcg_gen_andi_i64(t1, arg2, mask);
1588         tcg_gen_shli_i64(t1, t1, ofs);
1589     } else {
1590         tcg_gen_shli_i64(t1, arg2, ofs);
1591     }
1592     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1593     tcg_gen_or_i64(ret, ret, t1);
1594
1595     tcg_temp_free_i64(t1);
1596 }
1597
1598 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1599                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1600 {
1601     if (cond == TCG_COND_ALWAYS) {
1602         tcg_gen_mov_i64(ret, v1);
1603     } else if (cond == TCG_COND_NEVER) {
1604         tcg_gen_mov_i64(ret, v2);
1605     } else if (TCG_TARGET_REG_BITS == 32) {
1606         TCGv_i32 t0 = tcg_temp_new_i32();
1607         TCGv_i32 t1 = tcg_temp_new_i32();
1608         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1609                          TCGV_LOW(c1), TCGV_HIGH(c1),
1610                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1611
1612         if (TCG_TARGET_HAS_movcond_i32) {
1613             tcg_gen_movi_i32(t1, 0);
1614             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1615                                 TCGV_LOW(v1), TCGV_LOW(v2));
1616             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1617                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
1618         } else {
1619             tcg_gen_neg_i32(t0, t0);
1620
1621             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1622             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1623             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1624
1625             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1626             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1627             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1628         }
1629         tcg_temp_free_i32(t0);
1630         tcg_temp_free_i32(t1);
1631     } else if (TCG_TARGET_HAS_movcond_i64) {
1632         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1633     } else {
1634         TCGv_i64 t0 = tcg_temp_new_i64();
1635         TCGv_i64 t1 = tcg_temp_new_i64();
1636         tcg_gen_setcond_i64(cond, t0, c1, c2);
1637         tcg_gen_neg_i64(t0, t0);
1638         tcg_gen_and_i64(t1, v1, t0);
1639         tcg_gen_andc_i64(ret, v2, t0);
1640         tcg_gen_or_i64(ret, ret, t1);
1641         tcg_temp_free_i64(t0);
1642         tcg_temp_free_i64(t1);
1643     }
1644 }
1645
1646 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1647                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1648 {
1649     if (TCG_TARGET_HAS_add2_i64) {
1650         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1651     } else {
1652         TCGv_i64 t0 = tcg_temp_new_i64();
1653         TCGv_i64 t1 = tcg_temp_new_i64();
1654         tcg_gen_add_i64(t0, al, bl);
1655         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1656         tcg_gen_add_i64(rh, ah, bh);
1657         tcg_gen_add_i64(rh, rh, t1);
1658         tcg_gen_mov_i64(rl, t0);
1659         tcg_temp_free_i64(t0);
1660         tcg_temp_free_i64(t1);
1661     }
1662 }
1663
1664 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1665                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1666 {
1667     if (TCG_TARGET_HAS_sub2_i64) {
1668         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1669     } else {
1670         TCGv_i64 t0 = tcg_temp_new_i64();
1671         TCGv_i64 t1 = tcg_temp_new_i64();
1672         tcg_gen_sub_i64(t0, al, bl);
1673         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1674         tcg_gen_sub_i64(rh, ah, bh);
1675         tcg_gen_sub_i64(rh, rh, t1);
1676         tcg_gen_mov_i64(rl, t0);
1677         tcg_temp_free_i64(t0);
1678         tcg_temp_free_i64(t1);
1679     }
1680 }
1681
1682 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1683 {
1684     if (TCG_TARGET_HAS_mulu2_i64) {
1685         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1686     } else if (TCG_TARGET_HAS_muluh_i64) {
1687         TCGv_i64 t = tcg_temp_new_i64();
1688         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1689         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1690         tcg_gen_mov_i64(rl, t);
1691         tcg_temp_free_i64(t);
1692     } else {
1693         TCGv_i64 t0 = tcg_temp_new_i64();
1694         tcg_gen_mul_i64(t0, arg1, arg2);
1695         gen_helper_muluh_i64(rh, arg1, arg2);
1696         tcg_gen_mov_i64(rl, t0);
1697         tcg_temp_free_i64(t0);
1698     }
1699 }
1700
1701 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1702 {
1703     if (TCG_TARGET_HAS_muls2_i64) {
1704         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1705     } else if (TCG_TARGET_HAS_mulsh_i64) {
1706         TCGv_i64 t = tcg_temp_new_i64();
1707         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1708         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1709         tcg_gen_mov_i64(rl, t);
1710         tcg_temp_free_i64(t);
1711     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1712         TCGv_i64 t0 = tcg_temp_new_i64();
1713         TCGv_i64 t1 = tcg_temp_new_i64();
1714         TCGv_i64 t2 = tcg_temp_new_i64();
1715         TCGv_i64 t3 = tcg_temp_new_i64();
1716         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1717         /* Adjust for negative inputs.  */
1718         tcg_gen_sari_i64(t2, arg1, 63);
1719         tcg_gen_sari_i64(t3, arg2, 63);
1720         tcg_gen_and_i64(t2, t2, arg2);
1721         tcg_gen_and_i64(t3, t3, arg1);
1722         tcg_gen_sub_i64(rh, t1, t2);
1723         tcg_gen_sub_i64(rh, rh, t3);
1724         tcg_gen_mov_i64(rl, t0);
1725         tcg_temp_free_i64(t0);
1726         tcg_temp_free_i64(t1);
1727         tcg_temp_free_i64(t2);
1728         tcg_temp_free_i64(t3);
1729     } else {
1730         TCGv_i64 t0 = tcg_temp_new_i64();
1731         tcg_gen_mul_i64(t0, arg1, arg2);
1732         gen_helper_mulsh_i64(rh, arg1, arg2);
1733         tcg_gen_mov_i64(rl, t0);
1734         tcg_temp_free_i64(t0);
1735     }
1736 }
1737
1738 /* Size changing operations.  */
1739
1740 void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg, unsigned count)
1741 {
1742     tcg_debug_assert(count < 64);
1743     if (TCG_TARGET_REG_BITS == 32) {
1744         if (count >= 32) {
1745             tcg_gen_shri_i32(ret, TCGV_HIGH(arg), count - 32);
1746         } else if (count == 0) {
1747             tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1748         } else {
1749             TCGv_i64 t = tcg_temp_new_i64();
1750             tcg_gen_shri_i64(t, arg, count);
1751             tcg_gen_mov_i32(ret, TCGV_LOW(t));
1752             tcg_temp_free_i64(t);
1753         }
1754     } else if (TCG_TARGET_HAS_trunc_shr_i32) {
1755         tcg_gen_op3i_i32(INDEX_op_trunc_shr_i32, ret,
1756                          MAKE_TCGV_I32(GET_TCGV_I64(arg)), count);
1757     } else if (count == 0) {
1758         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1759     } else {
1760         TCGv_i64 t = tcg_temp_new_i64();
1761         tcg_gen_shri_i64(t, arg, count);
1762         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1763         tcg_temp_free_i64(t);
1764     }
1765 }
1766
1767 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1768 {
1769     if (TCG_TARGET_REG_BITS == 32) {
1770         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1771         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1772     } else {
1773         /* Note: we assume the target supports move between
1774            32 and 64 bit registers.  */
1775         tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1776     }
1777 }
1778
1779 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1780 {
1781     if (TCG_TARGET_REG_BITS == 32) {
1782         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1783         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1784     } else {
1785         /* Note: we assume the target supports move between
1786            32 and 64 bit registers.  */
1787         tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1788     }
1789 }
1790
1791 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1792 {
1793     TCGv_i64 tmp;
1794
1795     if (TCG_TARGET_REG_BITS == 32) {
1796         tcg_gen_mov_i32(TCGV_LOW(dest), low);
1797         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1798         return;
1799     }
1800
1801     tmp = tcg_temp_new_i64();
1802     /* These extensions are only needed for type correctness.
1803        We may be able to do better given target specific information.  */
1804     tcg_gen_extu_i32_i64(tmp, high);
1805     tcg_gen_extu_i32_i64(dest, low);
1806     /* If deposit is available, use it.  Otherwise use the extra
1807        knowledge that we have of the zero-extensions above.  */
1808     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1809         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1810     } else {
1811         tcg_gen_shli_i64(tmp, tmp, 32);
1812         tcg_gen_or_i64(dest, dest, tmp);
1813     }
1814     tcg_temp_free_i64(tmp);
1815 }
1816
1817 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1818 {
1819     if (TCG_TARGET_REG_BITS == 32) {
1820         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1821         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1822     } else {
1823         tcg_gen_trunc_shr_i64_i32(lo, arg, 0);
1824         tcg_gen_trunc_shr_i64_i32(hi, arg, 32);
1825     }
1826 }
1827
1828 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1829 {
1830     tcg_gen_ext32u_i64(lo, arg);
1831     tcg_gen_shri_i64(hi, arg, 32);
1832 }
1833
1834 /* QEMU specific operations.  */
1835
1836 void tcg_gen_goto_tb(unsigned idx)
1837 {
1838     /* We only support two chained exits.  */
1839     tcg_debug_assert(idx <= 1);
1840 #ifdef CONFIG_DEBUG_TCG
1841     /* Verify that we havn't seen this numbered exit before.  */
1842     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1843     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1844 #endif
1845     tcg_gen_op1i(INDEX_op_goto_tb, idx);
1846 }
1847
1848 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1849 {
1850     switch (op & MO_SIZE) {
1851     case MO_8:
1852         op &= ~MO_BSWAP;
1853         break;
1854     case MO_16:
1855         break;
1856     case MO_32:
1857         if (!is64) {
1858             op &= ~MO_SIGN;
1859         }
1860         break;
1861     case MO_64:
1862         if (!is64) {
1863             tcg_abort();
1864         }
1865         break;
1866     }
1867     if (st) {
1868         op &= ~MO_SIGN;
1869     }
1870     return op;
1871 }
1872
1873 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1874                          TCGMemOp memop, TCGArg idx)
1875 {
1876     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1877 #if TARGET_LONG_BITS == 32
1878     tcg_gen_op3i_i32(opc, val, addr, oi);
1879 #else
1880     if (TCG_TARGET_REG_BITS == 32) {
1881         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1882     } else {
1883         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1884     }
1885 #endif
1886 }
1887
1888 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1889                          TCGMemOp memop, TCGArg idx)
1890 {
1891     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1892 #if TARGET_LONG_BITS == 32
1893     if (TCG_TARGET_REG_BITS == 32) {
1894         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1895     } else {
1896         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1897     }
1898 #else
1899     if (TCG_TARGET_REG_BITS == 32) {
1900         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1901                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1902     } else {
1903         tcg_gen_op3i_i64(opc, val, addr, oi);
1904     }
1905 #endif
1906 }
1907
1908 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1909 {
1910     memop = tcg_canonicalize_memop(memop, 0, 0);
1911     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1912 }
1913
1914 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1915 {
1916     memop = tcg_canonicalize_memop(memop, 0, 1);
1917     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1918 }
1919
1920 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1921 {
1922     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1923         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1924         if (memop & MO_SIGN) {
1925             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1926         } else {
1927             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1928         }
1929         return;
1930     }
1931
1932     memop = tcg_canonicalize_memop(memop, 1, 0);
1933     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1934 }
1935
1936 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1937 {
1938     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1939         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1940         return;
1941     }
1942
1943     memop = tcg_canonicalize_memop(memop, 1, 1);
1944     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1945 }