Hi, On Fri, 30 Jan 2015, Tom de Vries wrote:
> > Maybe you want to pick up the work? > > In principle yes, depending on the amount of work (at this point I have no > idea what remains to be done and how long that would take me). > > Michael, are your patches posted somewhere? I don't think I ever sent them. Pasted below, from somewhen October last year. This essentially moves expanding va_arg to pass_stdarg. But it does not yet make use of the possibilities this would bring, namely throwing away a whole lot of fragile code in pass_stdarg that tries to recover from expanding va_arg too early. To avoid having to touch each backend it retains expanding va_arg as a tree expression that needs to go through the gimplifier, which can create new basic blocks that need to be discovered after the fact, so there's some shuffling of code in tree-cfg as well. I also seem to remember that there was a problem with my using temporaries of the LHS for the new va_arg internal call, some types can't be copied and hence no temporaries can be created. I can't seem to trigger this right now, but this needs to be dealt with somehow I think (but that requires the final lvalue be available when lowering the VA_ARG_EXPR). I think that's about it, hence, updating to current compiler, fixing the above problem (if it's still one), and then cleaning up pass_stdarg to make use of the availability of IFN_VA_ARG. Ciao, Michael. ---------------- Index: gimplify.c =================================================================== --- gimplify.c (revision 216512) +++ gimplify.c (working copy) @@ -9001,6 +9001,39 @@ dummy_object (tree type) return build2 (MEM_REF, type, t, t); } +/* Call the target expander for evaluating a va_arg call of VALIST + and TYPE. */ + +tree +gimplify_va_arg_internal (tree valist, tree type, location_t loc, + gimple_seq *pre_p, gimple_seq *post_p) +{ + tree have_va_type = TREE_TYPE (valist); + have_va_type = targetm.canonical_va_list_type (have_va_type); + + /* Make it easier for the backends by protecting the valist argument + from multiple evaluations. */ + if (TREE_CODE (have_va_type) == ARRAY_TYPE) + { + /* For this case, the backends will be expecting a pointer to + TREE_TYPE (abi), but it's possible we've + actually been given an array (an actual TARGET_FN_ABI_VA_LIST). + So fix it. */ + if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) + { + tree p1 = build_pointer_type (TREE_TYPE (have_va_type)); + valist = fold_convert_loc (loc, p1, + build_fold_addr_expr_loc (loc, valist)); + } + + gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue); + } + else + gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue); + + return targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p); +} + /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a builtin function, but a very special sort of operator. */ @@ -9027,8 +9060,7 @@ gimplify_va_arg_expr (tree *expr_p, gimp /* Generate a diagnostic for requesting data of a type that cannot be passed through `...' due to type promotion at the call site. */ - if ((promoted_type = lang_hooks.types.type_promotes_to (type)) - != type) + if ((promoted_type = lang_hooks.types.type_promotes_to (type)) != type) { static bool gave_help; bool warned; @@ -9062,36 +9094,28 @@ gimplify_va_arg_expr (tree *expr_p, gimp *expr_p = dummy_object (type); return GS_ALL_DONE; } - else + else if (optimize && !optimize_debug) { - /* Make it easier for the backends by protecting the valist argument - from multiple evaluations. */ - if (TREE_CODE (have_va_type) == ARRAY_TYPE) + tree tmp, tag; + gimple call; + tmp = build_fold_addr_expr_loc (loc, valist); + if (gimplify_arg (&tmp, pre_p, loc) == GS_ERROR) + return GS_ERROR; + tag = build_int_cst (build_pointer_type (type), 0); + call = gimple_build_call_internal (IFN_VA_ARG, 2, tmp, tag); + gimple_seq_add_stmt (pre_p, call); + if (VOID_TYPE_P (type)) { - /* For this case, the backends will be expecting a pointer to - TREE_TYPE (abi), but it's possible we've - actually been given an array (an actual TARGET_FN_ABI_VA_LIST). - So fix it. */ - if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) - { - tree p1 = build_pointer_type (TREE_TYPE (have_va_type)); - valist = fold_convert_loc (loc, p1, - build_fold_addr_expr_loc (loc, valist)); - } - - gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue); + *expr_p = NULL; + return GS_ALL_DONE; } - else - gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue); - - if (!targetm.gimplify_va_arg_expr) - /* FIXME: Once most targets are converted we should merely - assert this is non-null. */ - return GS_ALL_DONE; - - *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p); - return GS_OK; + tmp = create_tmp_var (type, NULL); + gimple_set_lhs (call, tmp); + *expr_p = tmp; } + else + *expr_p = gimplify_va_arg_internal (valist, type, loc, pre_p, post_p); + return GS_OK; } /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. Index: gimplify.h =================================================================== --- gimplify.h (revision 216512) +++ gimplify.h (working copy) @@ -79,6 +79,8 @@ extern void gimplify_one_sizepos (tree * extern gimple gimplify_body (tree, bool); extern enum gimplify_status gimplify_arg (tree *, gimple_seq *, location_t); extern void gimplify_function_tree (tree); +extern tree gimplify_va_arg_internal (tree, tree, location_t, gimple_seq *, + gimple_seq *); extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *, gimple_seq *); gimple gimplify_assign (tree, tree, gimple_seq *); Index: internal-fn.c =================================================================== --- internal-fn.c (revision 216512) +++ internal-fn.c (working copy) @@ -923,6 +923,11 @@ expand_BUILTIN_EXPECT (gimple stmt) emit_move_insn (target, val); } +static void +expand_VA_ARG (gimple stmt ATTRIBUTE_UNUSED) +{ +} + /* Routines to expand each internal function, indexed by function number. Each routine has the prototype: Index: internal-fn.def =================================================================== --- internal-fn.def (revision 216512) +++ internal-fn.def (working copy) @@ -57,3 +57,4 @@ DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_ DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL) DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL) DEF_INTERNAL_FN (ASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, ".W...") +DEF_INTERNAL_FN (VA_ARG, 0, NULL) Index: targhooks.c =================================================================== --- targhooks.c (revision 216512) +++ targhooks.c (working copy) @@ -1714,6 +1714,12 @@ std_gimplify_va_arg_expr (tree valist, t if (boundary > align && !integer_zerop (TYPE_SIZE (type))) { + t = fold_build_pointer_plus_hwi (valist_tmp, boundary - 1); + t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), t, + build_int_cst (TREE_TYPE (valist), -boundary)); + gimplify_expr (&t, pre_p, post_p, is_gimple_val, fb_rvalue); + valist_tmp = t; + /*t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); gimplify_and_add (t, pre_p); @@ -1722,7 +1728,7 @@ std_gimplify_va_arg_expr (tree valist, t fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, build_int_cst (TREE_TYPE (valist), -boundary))); - gimplify_and_add (t, pre_p); + gimplify_and_add (t, pre_p);*/ } else boundary = align; Index: tree-cfg.c =================================================================== --- tree-cfg.c (revision 216512) +++ tree-cfg.c (working copy) @@ -472,14 +472,13 @@ gimple_call_initialize_ctrl_altering (gi /* Build a flowgraph for the sequence of stmts SEQ. */ -static void -make_blocks (gimple_seq seq) +static basic_block +make_blocks_1 (gimple_seq seq, basic_block bb) { gimple_stmt_iterator i = gsi_start (seq); gimple stmt = NULL; bool start_new_block = true; bool first_stmt_of_seq = true; - basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun); while (!gsi_end_p (i)) { @@ -536,8 +535,16 @@ make_blocks (gimple_seq seq) gsi_next (&i); first_stmt_of_seq = false; } + return bb; } +/* Build a flowgraph for the sequence of stmts SEQ. */ + +static void +make_blocks (gimple_seq seq) +{ + make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun)); +} /* Create and return a new empty basic block after bb AFTER. */ @@ -761,6 +768,113 @@ handle_abnormal_edges (basic_block *disp make_edge (*dispatcher, for_bb, EDGE_ABNORMAL); } +/* Creates outgoing edges for BB. Returns 1 when it ends with an + computed goto, returns 2 when it ends with a statement that + might return to this function via an nonlocal goto, otherwise + return 0. Updates *PCUR_REGION with the OMP region this BB is in. */ + +static int +make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index) +{ + gimple last = last_stmt (bb); + bool fallthru; + int ret = 0; + + if (last) + { + enum gimple_code code = gimple_code (last); + switch (code) + { + case GIMPLE_GOTO: + if (make_goto_expr_edges (bb)) + ret = 1; + fallthru = false; + break; + case GIMPLE_RETURN: + { + edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); + e->goto_locus = gimple_location (last); + fallthru = false; + } + break; + case GIMPLE_COND: + make_cond_expr_edges (bb); + fallthru = false; + break; + case GIMPLE_SWITCH: + make_gimple_switch_edges (bb); + fallthru = false; + break; + case GIMPLE_RESX: + make_eh_edges (last); + fallthru = false; + break; + case GIMPLE_EH_DISPATCH: + fallthru = make_eh_dispatch_edges (last); + break; + + case GIMPLE_CALL: + /* If this function receives a nonlocal goto, then we need to + make edges from this call site to all the nonlocal goto + handlers. */ + if (stmt_can_make_abnormal_goto (last)) + ret = 2; + + /* If this statement has reachable exception handlers, then + create abnormal edges to them. */ + make_eh_edges (last); + + /* BUILTIN_RETURN is really a return statement. */ + if (gimple_call_builtin_p (last, BUILT_IN_RETURN)) + { + make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); + fallthru = false; + } + /* Some calls are known not to return. */ + else + fallthru = !(gimple_call_flags (last) & ECF_NORETURN); + break; + + case GIMPLE_ASSIGN: + /* A GIMPLE_ASSIGN may throw internally and thus be considered + control-altering. */ + if (is_ctrl_altering_stmt (last)) + make_eh_edges (last); + fallthru = true; + break; + + case GIMPLE_ASM: + make_gimple_asm_edges (bb); + fallthru = true; + break; + + CASE_GIMPLE_OMP: + fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index); + break; + + case GIMPLE_TRANSACTION: + { + tree abort_label = gimple_transaction_label (last); + if (abort_label) + make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT); + fallthru = true; + } + break; + + default: + gcc_assert (!stmt_ends_bb_p (last)); + fallthru = true; + } + } + else + fallthru = true; + + if (fallthru) + make_edge (bb, bb->next_bb, EDGE_FALLTHRU); + + return ret; +} + /* Join all the blocks in the flowgraph. */ static void @@ -782,106 +896,19 @@ make_edges (void) /* Traverse the basic block array placing edges. */ FOR_EACH_BB_FN (bb, cfun) { - gimple last = last_stmt (bb); - bool fallthru; + int mer; if (bb_to_omp_idx) bb_to_omp_idx[bb->index] = cur_omp_region_idx; - if (last) - { - enum gimple_code code = gimple_code (last); - switch (code) - { - case GIMPLE_GOTO: - if (make_goto_expr_edges (bb)) - ab_edge_goto.safe_push (bb); - fallthru = false; - break; - case GIMPLE_RETURN: - { - edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); - e->goto_locus = gimple_location (last); - fallthru = false; - } - break; - case GIMPLE_COND: - make_cond_expr_edges (bb); - fallthru = false; - break; - case GIMPLE_SWITCH: - make_gimple_switch_edges (bb); - fallthru = false; - break; - case GIMPLE_RESX: - make_eh_edges (last); - fallthru = false; - break; - case GIMPLE_EH_DISPATCH: - fallthru = make_eh_dispatch_edges (last); - break; - - case GIMPLE_CALL: - /* If this function receives a nonlocal goto, then we need to - make edges from this call site to all the nonlocal goto - handlers. */ - if (stmt_can_make_abnormal_goto (last)) - ab_edge_call.safe_push (bb); - - /* If this statement has reachable exception handlers, then - create abnormal edges to them. */ - make_eh_edges (last); - - /* BUILTIN_RETURN is really a return statement. */ - if (gimple_call_builtin_p (last, BUILT_IN_RETURN)) - { - make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); - fallthru = false; - } - /* Some calls are known not to return. */ - else - fallthru = !(gimple_call_flags (last) & ECF_NORETURN); - break; - - case GIMPLE_ASSIGN: - /* A GIMPLE_ASSIGN may throw internally and thus be considered - control-altering. */ - if (is_ctrl_altering_stmt (last)) - make_eh_edges (last); - fallthru = true; - break; + mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx); + if (mer == 1) + ab_edge_goto.safe_push (bb); + else if (mer == 2) + ab_edge_call.safe_push (bb); - case GIMPLE_ASM: - make_gimple_asm_edges (bb); - fallthru = true; - break; - - CASE_GIMPLE_OMP: - fallthru = make_gimple_omp_edges (bb, &cur_region, - &cur_omp_region_idx); - if (cur_region && bb_to_omp_idx == NULL) - bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun)); - break; - - case GIMPLE_TRANSACTION: - { - tree abort_label = gimple_transaction_label (last); - if (abort_label) - make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT); - fallthru = true; - } - break; - - default: - gcc_assert (!stmt_ends_bb_p (last)); - fallthru = true; - } - } - else - fallthru = true; - - if (fallthru) - make_edge (bb, bb->next_bb, EDGE_FALLTHRU); + if (cur_region && bb_to_omp_idx == NULL) + bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun)); } /* Computed gotos are hell to deal with, especially if there are @@ -961,6 +988,37 @@ make_edges (void) fold_cond_expr_cond (); } +extern bool gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi); +bool +gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi) +{ + gimple stmt = gsi_stmt (*gsi); + basic_block bb = gimple_bb (stmt); + basic_block lastbb, afterbb; + int old_num_bbs = n_basic_blocks_for_fn (cfun); + edge e; + lastbb = make_blocks_1 (seq, bb); + if (old_num_bbs == n_basic_blocks_for_fn (cfun)) + return false; + e = split_block (bb, stmt); + /* Move e->dest to come after the new basic blocks. */ + afterbb = e->dest; + unlink_block (afterbb); + link_block (afterbb, lastbb); + redirect_edge_succ (e, bb->next_bb); + bb = bb->next_bb; + while (bb != afterbb) + { + struct omp_region *cur_region = NULL; + int cur_omp_region_idx = 0; + int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx); + gcc_assert (!mer && !cur_region); + add_bb_to_loop (bb, afterbb->loop_father); + bb = bb->next_bb; + } + return true; +} + /* Find the next available discriminator value for LOCUS. The discriminator distinguishes among several basic blocks that share a common locus, allowing for more accurate sample-based Index: tree-stdarg.c =================================================================== --- tree-stdarg.c (revision 216512) +++ tree-stdarg.c (working copy) @@ -43,10 +43,12 @@ along with GCC; see the file COPYING3. #include "gimple-iterator.h" #include "gimple-walk.h" #include "gimple-ssa.h" +#include "gimplify.h" #include "tree-phinodes.h" #include "ssa-iterators.h" #include "stringpool.h" #include "tree-ssanames.h" +#include "tree-into-ssa.h" #include "sbitmap.h" #include "tree-pass.h" #include "tree-stdarg.h" @@ -670,6 +672,9 @@ check_all_va_list_escapes (struct stdarg } +extern bool gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi); +extern void update_modified_stmts (gimple_seq seq); + namespace { const pass_data pass_data_stdarg = @@ -693,10 +698,13 @@ public: {} /* opt_pass methods: */ - virtual bool gate (function *fun) + virtual bool gate (function *) { /* This optimization is only for stdarg functions. */ - return fun->stdarg != 0; + /*return cfun->stdarg != 0;*/ + /* ??? cfun->stdarg is only set when the signature has "...". + But other functions can use va_arg too. */ + return true; } virtual unsigned int execute (function *); @@ -713,6 +721,7 @@ pass_stdarg::execute (function *fun) struct walk_stmt_info wi; const char *funcname = NULL; tree cfun_va_list; + unsigned int retflags = 0; fun->va_list_gpr_size = 0; fun->va_list_fpr_size = 0; @@ -729,6 +738,65 @@ pass_stdarg::execute (function *fun) || TREE_TYPE (cfun_va_list) == char_type_node); gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); + + FOR_EACH_BB_FN (bb, fun) + { + gimple_stmt_iterator i; + + for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) + { + gimple stmt = gsi_stmt (i); + + if (is_gimple_call (stmt) + && gimple_call_internal_p (stmt) + && gimple_call_internal_fn (stmt) == IFN_VA_ARG) + { + gimple_seq pre = NULL, post = NULL; + tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1))); + tree ap; + tree expr; + gimple g; + + ap = gimple_call_arg (stmt, 0); + ap = build_fold_indirect_ref (ap); + + push_gimplify_context (false); + expr = gimplify_va_arg_internal (ap, type, gimple_location (stmt), + &pre, &post); + + if (gimple_call_lhs (stmt)) + { + gcc_assert (useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)), type)); + gimplify_expr (&expr, &pre, &post, is_gimple_val, fb_rvalue); + g = gimple_build_assign (gimple_call_lhs (stmt), expr); + gimple_set_location (g, gimple_location (stmt)); + gimple_seq_add_stmt (&pre, g); + } + + pop_gimplify_context (NULL); + + gimple_seq_add_seq (&pre, post); + + update_modified_stmts (pre); + gimple_find_sub_bbs (pre, &i); + /*gsi_insert_seq_after (&i, pre, GSI_SAME_STMT);*/ + gsi_remove (&i, true); + retflags = TODO_update_ssa; + if (gsi_end_p (i)) + break; + } + } + } + + if (retflags) + { + free_dominance_info (CDI_DOMINATORS); + update_ssa (retflags); + } + + if (!cfun->stdarg) + goto finish; + FOR_EACH_BB_FN (bb, fun) { gimple_stmt_iterator i;