Make tcg_constant_folding do copy and constant propagation. It is a preparational work before actual constant folding.
Signed-off-by: Kirill Batuzov <batuz...@ispras.ru> --- tcg/optimize.c | 161 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 files changed, 161 insertions(+), 0 deletions(-) diff --git a/tcg/optimize.c b/tcg/optimize.c index 5daf131..7996798 100644 --- a/tcg/optimize.c +++ b/tcg/optimize.c @@ -31,23 +31,178 @@ #include "qemu-common.h" #include "tcg-op.h" +typedef enum { + TCG_TEMP_UNDEF = 0, + TCG_TEMP_CONST, + TCG_TEMP_COPY, + TCG_TEMP_ANY +} tcg_temp_state; + +struct tcg_temp_info { + tcg_temp_state state; + uint16_t num_copies; + tcg_target_ulong val; +}; + +/* Reset TEMP's state to TCG_TEMP_ANY. If TEMP was a representative of some + class of equivalent temp's, a new representative should be chosen in this + class. */ +static void reset_temp(struct tcg_temp_info *temps, TCGArg temp, int nb_temps, + int nb_globals) +{ + int i; + TCGArg new_base; + if (temps[temp].state == TCG_TEMP_COPY) { + temps[temps[temp].val].num_copies--; + } + if (temps[temp].num_copies > 0) { + new_base = (TCGArg)-1; + for (i = nb_globals; i < nb_temps; i++) { + if (temps[i].state == TCG_TEMP_COPY && temps[i].val == temp) { + if (new_base == ((TCGArg)-1)) { + new_base = (TCGArg)i; + temps[i].state = TCG_TEMP_ANY; + temps[i].num_copies = 0; + } else { + temps[i].val = new_base; + temps[new_base].num_copies++; + } + } + } + for (i = 0; i < nb_globals; i++) { + if (temps[i].state == TCG_TEMP_COPY && temps[i].val == temp) { + if (new_base == ((TCGArg)-1)) { + temps[i].state = TCG_TEMP_ANY; + temps[i].num_copies = 0; + } else { + temps[i].val = new_base; + temps[new_base].num_copies++; + } + } + } + } + temps[temp].state = TCG_TEMP_ANY; + temps[temp].num_copies = 0; +} + +static int op_bits(int op) +{ + switch (op) { + case INDEX_op_mov_i32: + return 32; +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_mov_i64: + return 64; +#endif + default: + fprintf(stderr, "Unrecognized operation %d in op_bits.\n", op); + tcg_abort(); + } +} + +static int op_to_movi(int op) +{ + if (op_bits(op) == 32) { + return INDEX_op_movi_i32; + } +#if TCG_TARGET_REG_BITS == 64 + if (op_bits(op) == 64) { + return INDEX_op_movi_i64; + } +#endif + tcg_abort(); +} + +/* Propagate constants and copies, fold constant expressions. */ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr, TCGArg *args, TCGOpDef *tcg_op_defs) { int i, nb_ops, op_index, op, nb_temps, nb_globals; const TCGOpDef *def; TCGArg *gen_args; + /* Array VALS has an element for each temp. + If this temp holds a constant then its value is kept in VALS' element. + If this temp is a copy of other ones then this equivalence class' + representative is kept in VALS' element. + If this temp is neither copy nor constant then corresponding VALS' + element is unused. */ + struct tcg_temp_info temps[TCG_MAX_TEMPS]; nb_temps = s->nb_temps; nb_globals = s->nb_globals; + memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); nb_ops = tcg_opc_ptr - gen_opc_buf; gen_args = args; for (op_index = 0; op_index < nb_ops; op_index++) { op = gen_opc_buf[op_index]; def = &tcg_op_defs[op]; + /* Do copy propagation */ + if (!(def->flags & (TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS))) { + assert(op != INDEX_op_call); + for (i = def->nb_oargs; i < def->nb_oargs + def->nb_iargs; i++) { + if (temps[args[i]].state == TCG_TEMP_COPY + && !(def->args_ct[i].ct & TCG_CT_IALIAS) + && (def->args_ct[i].ct & TCG_CT_REG)) { + args[i] = temps[args[i]].val; + } + } + } + + /* Propagate constants through copy operations and do constant + folding. Constants will be substituted to arguments by register + allocator where needed and possible. Also detect copies. */ switch (op) { + case INDEX_op_mov_i32: +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_mov_i64: +#endif + if ((temps[args[1]].state == TCG_TEMP_COPY + && temps[args[1]].val == args[0]) + || args[0] == args[1]) { + args += 2; + gen_opc_buf[op_index] = INDEX_op_nop; + break; + } + if (temps[args[1]].state != TCG_TEMP_CONST) { + reset_temp(temps, args[0], nb_temps, nb_globals); + if (args[1] >= s->nb_globals) { + temps[args[0]].state = TCG_TEMP_COPY; + temps[args[0]].val = args[1]; + temps[args[1]].num_copies++; + } + gen_args[0] = args[0]; + gen_args[1] = args[1]; + gen_args += 2; + args += 2; + break; + } + /* Source argument is constant. Rewrite the operation and + let movi case handle it. */ + op = op_to_movi(op); + gen_opc_buf[op_index] = op; + args[1] = temps[args[1]].val; + /* fallthrough */ + case INDEX_op_movi_i32: +#if TCG_TARGET_REG_BITS == 64 + case INDEX_op_movi_i64: +#endif + reset_temp(temps, args[0], nb_temps, nb_globals); + temps[args[0]].state = TCG_TEMP_CONST; + temps[args[0]].val = args[1]; + assert(temps[args[0]].num_copies == 0); + gen_args[0] = args[0]; + gen_args[1] = args[1]; + gen_args += 2; + args += 2; + break; case INDEX_op_call: + for (i = 0; i < nb_globals; i++) { + reset_temp(temps, i, nb_temps, nb_globals); + } + for (i = 0; i < (args[0] >> 16); i++) { + reset_temp(temps, args[i + 1], nb_temps, nb_globals); + } i = (args[0] >> 16) + (args[0] & 0xffff) + 3; while (i) { *gen_args = *args; @@ -63,6 +218,7 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr, #if TCG_TARGET_REG_BITS == 64 case INDEX_op_brcond_i64: #endif + memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info)); for (i = 0; i < def->nb_args; i++) { *gen_args = *args; args++; @@ -70,6 +226,11 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr, } break; default: + /* Default case: we do know nothing about operation so no + propagation is done. We only trash output args. */ + for (i = 0; i < def->nb_oargs; i++) { + reset_temp(temps, args[i], nb_temps, nb_globals); + } for (i = 0; i < def->nb_args; i++) { gen_args[i] = args[i]; } -- 1.7.4.1