summaryrefslogtreecommitdiff
path: root/tcg/tci
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2021-02-01 08:28:06 -1000
committerRichard Henderson <richard.henderson@linaro.org>2021-03-17 07:24:44 -0600
commitae216c9747840f6365b97286e04fa3bc54e7ccd4 (patch)
tree93a73bcfb8014c673af04df55a0cddc40ce81e63 /tcg/tci
parent59964b4f98c74921d184d0d1119efcd055ce2881 (diff)
downloadqemu-ae216c9747840f6365b97286e04fa3bc54e7ccd4.zip
tcg/tci: Push opcode emit into each case
We're about to split out bytecode output into helpers, but we can't do that one at a time if tcg_out_op_t is being done outside of the switch. Reviewed-by: Philippe Mathieu-Daudé <f4bug@amsat.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'tcg/tci')
-rw-r--r--tcg/tci/tcg-target.c.inc35
1 files changed, 32 insertions, 3 deletions
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
index 7fb3b04eaf..c5b061fe76 100644
--- a/tcg/tci/tcg-target.c.inc
+++ b/tcg/tci/tcg-target.c.inc
@@ -385,40 +385,48 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
{
uint8_t *old_code_ptr = s->code_ptr;
- tcg_out_op_t(s, opc);
-
switch (opc) {
case INDEX_op_exit_tb:
+ tcg_out_op_t(s, opc);
tcg_out_i(s, args[0]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_goto_tb:
tcg_debug_assert(s->tb_jmp_insn_offset == 0);
/* indirect jump method. */
+ tcg_out_op_t(s, opc);
tcg_out_i(s, (uintptr_t)(s->tb_jmp_target_addr + args[0]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
set_jmp_reset_offset(s, args[0]);
break;
case INDEX_op_br:
+ tcg_out_op_t(s, opc);
tci_out_label(s, arg_label(args[0]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(setcond)
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out8(s, args[3]); /* condition */
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#if TCG_TARGET_REG_BITS == 32
case INDEX_op_setcond2_i32:
/* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out_r(s, args[4]);
tcg_out8(s, args[5]); /* condition */
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#endif
@@ -436,10 +444,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
CASE_64(st32)
CASE_64(st)
stack_bounds_check(args[1], args[2]);
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_debug_assert(args[2] == (int32_t)args[2]);
tcg_out32(s, args[2]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(add)
@@ -462,12 +472,15 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
+ tcg_out_op_t(s, opc);
{
TCGArg pos = args[3], len = args[4];
TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
@@ -481,13 +494,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
tcg_out8(s, pos);
tcg_out8(s, len);
}
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(brcond)
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out8(s, args[2]); /* condition */
tci_out_label(s, arg_label(args[3]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
@@ -503,48 +519,59 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */
CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */
CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#if TCG_TARGET_REG_BITS == 32
case INDEX_op_add2_i32:
case INDEX_op_sub2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out_r(s, args[4]);
tcg_out_r(s, args[5]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_brcond2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out8(s, args[4]); /* condition */
tci_out_label(s, arg_label(args[5]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mulu2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#endif
case INDEX_op_qemu_ld_i32:
case INDEX_op_qemu_st_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, *args++);
tcg_out_r(s, *args++);
if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
tcg_out_r(s, *args++);
}
tcg_out32(s, *args++);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_qemu_ld_i64:
case INDEX_op_qemu_st_i64:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, *args++);
if (TCG_TARGET_REG_BITS == 32) {
tcg_out_r(s, *args++);
@@ -554,9 +581,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
tcg_out_r(s, *args++);
}
tcg_out32(s, *args++);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mb:
+ tcg_out_op_t(s, opc);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
@@ -565,7 +595,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
default:
tcg_abort();
}
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
}
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,