Perform constant folding for NOT and EXT{8,16,32}{S,U} operations. Signed-off-by: Kirill Batuzov <batuz...@ispras.ru> --- tcg/optimize.c | 82 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 files changed, 82 insertions(+), 0 deletions(-)
diff --git a/tcg/optimize.c b/tcg/optimize.c index b6b0dc4..bda469a 100644 --- a/tcg/optimize.c +++ b/tcg/optimize.c @@ -104,6 +104,11 @@ static int op_bits(int op) case INDEX_op_sar_i32: case INDEX_op_rotl_i32: case INDEX_op_rotr_i32: + case INDEX_op_not_i32: + case INDEX_op_ext8s_i32: + case INDEX_op_ext16s_i32: + case INDEX_op_ext8u_i32: + case INDEX_op_ext16u_i32: return 32; #if TCG_TARGET_REG_BITS == 64 case INDEX_op_mov_i64: @@ -118,6 +123,13 @@ static int op_bits(int op) case INDEX_op_sar_i64: case INDEX_op_rotl_i64: case INDEX_op_rotr_i64: + case INDEX_op_not_i64: + case INDEX_op_ext8s_i64: + case INDEX_op_ext16s_i64: + case INDEX_op_ext32s_i64: + case INDEX_op_ext8u_i64: + case INDEX_op_ext16u_i64: + case INDEX_op_ext32u_i64: return 64; #endif default: @@ -245,6 +257,44 @@ static TCGArg do_constant_folding_2(int op, TCGArg x, TCGArg y) return x; #endif + case INDEX_op_not_i32: +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_not_i64: +#endif + return ~x; + + case INDEX_op_ext8s_i32: + return x & (1 << 7) ? x | ~0xff : x & 0xff; + + case INDEX_op_ext16s_i32: + return x & (1 << 15) ? x | ~0xffff : x & 0xffff; + + case INDEX_op_ext8u_i32: + return x & 0xff; + + case INDEX_op_ext16u_i32: + return x & 0xffff; + +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_ext8s_i64: + return x & (1 << 7) ? x | ~0xffULL : x & 0xff; + + case INDEX_op_ext16s_i64: + return x & (1 << 15) ? x | ~0xffffULL : x & 0xffff; + + case INDEX_op_ext32s_i64: + return x & (1U << 31) ? x | ~0xffffffffULL : x & 0xffffffff; + + case INDEX_op_ext8u_i64: + return x & 0xff; + + case INDEX_op_ext16u_i64: + return x & 0xffff; + + case INDEX_op_ext32u_i64: + return x & 0xffffffff; +#endif + default: fprintf(stderr, "Unrecognized operation %d in do_constant_folding.\n", op); @@ -345,6 +395,38 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr, gen_args += 2; args += 2; break; + case INDEX_op_not_i32: + case INDEX_op_ext8s_i32: + case INDEX_op_ext16s_i32: + case INDEX_op_ext8u_i32: + case INDEX_op_ext16u_i32: +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_not_i64: + case INDEX_op_ext8s_i64: + case INDEX_op_ext16s_i64: + case INDEX_op_ext32s_i64: + case INDEX_op_ext8u_i64: + case INDEX_op_ext16u_i64: + case INDEX_op_ext32u_i64: +#endif + if (state[args[1]] == TCG_TEMP_CONST) { + gen_opc_buf[op_index] = op_to_movi(op); + gen_args[0] = args[0]; + gen_args[1] = do_constant_folding(op, vals[args[1]], 0); + reset_temp(state, vals, gen_args[0], nb_temps, nb_globals); + state[gen_args[0]] = TCG_TEMP_CONST; + vals[gen_args[0]] = gen_args[1]; + gen_args += 2; + args += 2; + break; + } else { + reset_temp(state, vals, args[0], nb_temps, nb_globals); + gen_args[0] = args[0]; + gen_args[1] = args[1]; + gen_args += 2; + args += 2; + break; + } case INDEX_op_or_i32: case INDEX_op_and_i32: #if TCG_TARGET_REG_BITS == 64 -- 1.7.4.1