summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Hildenbrand <david@redhat.com>2019-02-25 21:03:16 +0100
committerCornelia Huck <cohuck@redhat.com>2019-03-04 11:49:31 +0100
commit2a1cf844520748a18740a905e6286987d3e4e62d (patch)
tree3db516b2ffb2978b40037af99efd2cb0ec0e0a38
parent27197fec128d5ee97ec060f99b95df2c48668783 (diff)
downloadqemu-2a1cf844520748a18740a905e6286987d3e4e62d.zip
s390x/tcg: Factor out gen_addi_and_wrap_i64() from get_address()
Also properly wrap in 24bit mode. While at it, convert the comment (and drop the comment about fundamental TCG optimizations). Reviewed-by: Richard Henderson <richard.henderson@linaro.org> Signed-off-by: David Hildenbrand <david@redhat.com> Message-Id: <20190225200318.16102-6-david@redhat.com> Signed-off-by: Cornelia Huck <cohuck@redhat.com>
-rw-r--r--target/s390x/translate.c41
1 files changed, 26 insertions, 15 deletions
diff --git a/target/s390x/translate.c b/target/s390x/translate.c
index 933bf29331..f7de77192c 100644
--- a/target/s390x/translate.c
+++ b/target/s390x/translate.c
@@ -382,32 +382,43 @@ static inline void gen_trap(DisasContext *s)
gen_data_exception(0xff);
}
+static void gen_addi_and_wrap_i64(DisasContext *s, TCGv_i64 dst, TCGv_i64 src,
+ int64_t imm)
+{
+ tcg_gen_addi_i64(dst, src, imm);
+ if (!(s->base.tb->flags & FLAG_MASK_64)) {
+ if (s->base.tb->flags & FLAG_MASK_32) {
+ tcg_gen_andi_i64(dst, dst, 0x7fffffff);
+ } else {
+ tcg_gen_andi_i64(dst, dst, 0x00ffffff);
+ }
+ }
+}
+
static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
{
TCGv_i64 tmp = tcg_temp_new_i64();
- bool need_31 = !(s->base.tb->flags & FLAG_MASK_64);
-
- /* Note that d2 is limited to 20 bits, signed. If we crop negative
- displacements early we create larger immedate addends. */
- /* Note that addi optimizes the imm==0 case. */
+ /*
+ * Note that d2 is limited to 20 bits, signed. If we crop negative
+ * displacements early we create larger immedate addends.
+ */
if (b2 && x2) {
tcg_gen_add_i64(tmp, regs[b2], regs[x2]);
- tcg_gen_addi_i64(tmp, tmp, d2);
+ gen_addi_and_wrap_i64(s, tmp, tmp, d2);
} else if (b2) {
- tcg_gen_addi_i64(tmp, regs[b2], d2);
+ gen_addi_and_wrap_i64(s, tmp, regs[b2], d2);
} else if (x2) {
- tcg_gen_addi_i64(tmp, regs[x2], d2);
- } else {
- if (need_31) {
- d2 &= 0x7fffffff;
- need_31 = false;
+ gen_addi_and_wrap_i64(s, tmp, regs[x2], d2);
+ } else if (!(s->base.tb->flags & FLAG_MASK_64)) {
+ if (s->base.tb->flags & FLAG_MASK_32) {
+ tcg_gen_movi_i64(tmp, d2 & 0x7fffffff);
+ } else {
+ tcg_gen_movi_i64(tmp, d2 & 0x00ffffff);
}
+ } else {
tcg_gen_movi_i64(tmp, d2);
}
- if (need_31) {
- tcg_gen_andi_i64(tmp, tmp, 0x7fffffff);
- }
return tmp;
}