Hi, In the expression reassociation pass, statements might get moved downwards to ensure that dependences are not violated after reassociation. This can increase the live range and, in a tight loop, result in spills. This patch simply does a code motion of those statements involved in reassociation and pushes them upwards in the BB as far as possible without violating dependences. Bootstraps and no tests regressions on a x86_64 machine running linux. Ok for trunk?
- Easwaran ----------- 2012-10-10 Easwaran Raman <era...@google.com> * tree-ssa-reassoc.c (move_stmt_upwards): New function. (rewrite_expr_tree): Record statements to be moved. (reassociate_bb): Move statements affected by reassociation as early as possible. Index: gcc/tree-ssa-reassoc.c =================================================================== --- gcc/tree-ssa-reassoc.c (revision 191879) +++ gcc/tree-ssa-reassoc.c (working copy) @@ -2250,13 +2250,51 @@ swap_ops_for_binary_stmt (VEC(operand_entry_t, hea } } +/* Move STMT up within its BB until it can not be moved any further. */ + +static void move_stmt_upwards (gimple stmt) +{ + gimple_stmt_iterator gsi, gsistmt; + tree rhs1, rhs2; + gimple rhs1def = NULL, rhs2def = NULL; + rhs1 = gimple_assign_rhs1 (stmt); + rhs2 = gimple_assign_rhs2 (stmt); + gcc_assert (rhs1); + if (TREE_CODE (rhs1) == SSA_NAME) + rhs1def = SSA_NAME_DEF_STMT (rhs1); + else if (TREE_CODE (rhs1) != REAL_CST + && TREE_CODE (rhs1) != INTEGER_CST) + return; + if (rhs2) + { + if (TREE_CODE (rhs2) == SSA_NAME) + rhs2def = SSA_NAME_DEF_STMT (rhs2); + else if (TREE_CODE (rhs1) != REAL_CST + && TREE_CODE (rhs1) != INTEGER_CST) + return; + } + gsi = gsi_for_stmt (stmt); + gsistmt = gsi; + gsi_prev (&gsi); + for (; !gsi_end_p (gsi); gsi_prev (&gsi)) + { + gimple curr_stmt = gsi_stmt (gsi); + if (curr_stmt == rhs1def || curr_stmt == rhs2def) { + gsi_move_after (&gsistmt, &gsi); + return; + } + } + +} + /* Recursively rewrite our linearized statements so that the operators match those in OPS[OPINDEX], putting the computation in rank order. */ static void rewrite_expr_tree (gimple stmt, unsigned int opindex, - VEC(operand_entry_t, heap) * ops, bool moved) + VEC(operand_entry_t, heap) * ops, bool moved, + VEC(gimple, heap) **stmts_to_move) { tree rhs1 = gimple_assign_rhs1 (stmt); tree rhs2 = gimple_assign_rhs2 (stmt); @@ -2299,6 +2337,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opind print_gimple_stmt (dump_file, stmt, 0, 0); } } + VEC_safe_push (gimple, heap, *stmts_to_move, stmt); return; } @@ -2346,7 +2385,9 @@ rewrite_expr_tree (gimple stmt, unsigned int opind } /* Recurse on the LHS of the binary operator, which is guaranteed to be the non-leaf side. */ - rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved); + rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved, + stmts_to_move); + VEC_safe_push (gimple, heap, *stmts_to_move, stmt); } /* Find out how many cycles we need to compute statements chain. @@ -3427,6 +3468,9 @@ reassociate_bb (basic_block bb) { gimple_stmt_iterator gsi; basic_block son; + VEC(gimple, heap) *stmts_to_move = NULL; + gimple stmt; + int i; for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) { @@ -3542,7 +3586,7 @@ reassociate_bb (basic_block bb) && VEC_length (operand_entry_t, ops) > 3) rewrite_expr_tree_parallel (stmt, width, ops); else - rewrite_expr_tree (stmt, 0, ops, false); + rewrite_expr_tree (stmt, 0, ops, false, &stmts_to_move); /* If we combined some repeated factors into a __builtin_powi call, multiply that result by the @@ -3560,6 +3604,7 @@ reassociate_bb (basic_block bb) target_ssa); gimple_set_location (mul_stmt, gimple_location (stmt)); gsi_insert_after (&gsi, mul_stmt, GSI_NEW_STMT); + VEC_safe_push (gimple, heap, stmts_to_move, mul_stmt); } } @@ -3567,6 +3612,11 @@ reassociate_bb (basic_block bb) } } } + + FOR_EACH_VEC_ELT (gimple, stmts_to_move, i, stmt) + move_stmt_upwards (stmt); + VEC_free (gimple, heap, stmts_to_move); + for (son = first_dom_son (CDI_POST_DOMINATORS, bb); son; son = next_dom_son (CDI_POST_DOMINATORS, son))