Implement real ext_i32_i64 and extu_i32_i64 ops. They ensure that a
32-bit value is always converted to a 64-bit value and not propagated
through the register allocator or the optimizer.
Cc: Andrzej Zaborowski <address@hidden>
Cc: Alexander Graf <address@hidden>
Cc: Blue Swirl <address@hidden>
Cc: Claudio Fontana <address@hidden>
Cc: Claudio Fontana <address@hidden>
Cc: Richard Henderson <address@hidden>
Cc: Stefan Weil <address@hidden>
Signed-off-by: Aurelien Jarno <address@hidden>
---
tcg/aarch64/tcg-target.c | 4 ++++
tcg/i386/tcg-target.c | 5 +++++
tcg/ia64/tcg-target.c | 4 ++++
tcg/ppc/tcg-target.c | 6 ++++++
tcg/s390/tcg-target.c | 5 +++++
tcg/sparc/tcg-target.c | 8 ++++++--
tcg/tcg-op.c | 10 ++++------
tcg/tcg-opc.h | 3 +++
tcg/tci/tcg-target.c | 4 ++++
tci.c | 6 ++++--
10 files changed, 45 insertions(+), 10 deletions(-)
diff --git a/tcg/aarch64/tcg-target.c b/tcg/aarch64/tcg-target.c
index b7ec4f5..7f7ab7e 100644
--- a/tcg/aarch64/tcg-target.c
+++ b/tcg/aarch64/tcg-target.c
@@ -1556,6 +1556,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_ext16s_i32:
tcg_out_sxt(s, ext, MO_16, a0, a1);
break;
+ case INDEX_op_ext_i32_i64:
case INDEX_op_ext32s_i64:
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
break;
@@ -1567,6 +1568,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_ext16u_i32:
tcg_out_uxt(s, MO_16, a0, a1);
break;
+ case INDEX_op_extu_i32_i64:
case INDEX_op_ext32u_i64:
tcg_out_movr(s, TCG_TYPE_I32, a0, a1);