On Tue, Jul 24, 2018 at 12:01 PM Richard Sandiford <richard.sandif...@arm.com> wrote: > > This patch changes {REDUC,DR}_GROUP_{FIRST,NEXT} element from a > gimple stmt to stmt_vec_info.
OK. > > 2018-07-24 Richard Sandiford <richard.sandif...@arm.com> > > gcc/ > * tree-vectorizer.h (_stmt_vec_info::first_element): Change from > a gimple stmt to a stmt_vec_info. > (_stmt_vec_info::next_element): Likewise. > * tree-vect-data-refs.c (vect_update_misalignment_for_peel) > (vect_slp_analyze_and_verify_node_alignment) > (vect_analyze_group_access_1, vect_analyze_group_access) > (vect_small_gap_p, vect_prune_runtime_alias_test_list) > (vect_create_data_ref_ptr, vect_record_grouped_load_vectors) > (vect_supportable_dr_alignment): Update accordingly. > * tree-vect-loop.c (vect_fixup_reduc_chain): Likewise. > (vect_fixup_scalar_cycles_with_patterns, vect_is_slp_reduction) > (vect_is_simple_reduction, vectorizable_reduction): Likewise. > * tree-vect-patterns.c (vect_reassociating_reduction_p): Likewise. > * tree-vect-slp.c (vect_build_slp_tree_1) > (vect_attempt_slp_rearrange_stmts, vect_supported_load_permutation_p) > (vect_split_slp_store_group, vect_analyze_slp_instance) > (vect_analyze_slp, vect_transform_slp_perm_load): Likewise. > * tree-vect-stmts.c (vect_model_store_cost, vect_model_load_cost) > (get_group_load_store_type, get_load_store_type) > (get_group_alias_ptr_type, vectorizable_store, vectorizable_load) > (vect_transform_stmt, vect_remove_stores): Likewise. > > Index: gcc/tree-vectorizer.h > =================================================================== > --- gcc/tree-vectorizer.h 2018-07-24 10:23:04.033010396 +0100 > +++ gcc/tree-vectorizer.h 2018-07-24 10:23:08.536970400 +0100 > @@ -871,9 +871,9 @@ struct _stmt_vec_info { > > /* Interleaving and reduction chains info. */ > /* First element in the group. */ > - gimple *first_element; > + stmt_vec_info first_element; > /* Pointer to the next element in the group. */ > - gimple *next_element; > + stmt_vec_info next_element; > /* For data-refs, in case that two or more stmts share data-ref, this is > the > pointer to the previously detected stmt with the same dr. */ > gimple *same_dr_stmt; > Index: gcc/tree-vect-data-refs.c > =================================================================== > --- gcc/tree-vect-data-refs.c 2018-07-24 10:23:04.029010432 +0100 > +++ gcc/tree-vect-data-refs.c 2018-07-24 10:23:08.532970436 +0100 > @@ -1077,7 +1077,7 @@ vect_update_misalignment_for_peel (struc > /* For interleaved data accesses the step in the loop must be multiplied by > the size of the interleaving group. */ > if (STMT_VINFO_GROUPED_ACCESS (stmt_info)) > - dr_size *= DR_GROUP_SIZE (vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT > (stmt_info))); > + dr_size *= DR_GROUP_SIZE (DR_GROUP_FIRST_ELEMENT (stmt_info)); > if (STMT_VINFO_GROUPED_ACCESS (peel_stmt_info)) > dr_peel_size *= DR_GROUP_SIZE (peel_stmt_info); > > @@ -2370,12 +2370,11 @@ vect_slp_analyze_and_verify_node_alignme > the node is permuted in which case we start from the first > element in the group. */ > stmt_vec_info first_stmt_info = SLP_TREE_SCALAR_STMTS (node)[0]; > - gimple *first_stmt = first_stmt_info->stmt; > data_reference_p first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > if (SLP_TREE_LOAD_PERMUTATION (node).exists ()) > - first_stmt = DR_GROUP_FIRST_ELEMENT (first_stmt_info); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (first_stmt_info); > > - data_reference_p dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > + data_reference_p dr = STMT_VINFO_DATA_REF (first_stmt_info); > vect_compute_data_ref_alignment (dr); > /* For creating the data-ref pointer we need alignment of the > first element anyway. */ > @@ -2520,11 +2519,11 @@ vect_analyze_group_access_1 (struct data > if (DR_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info) > { > /* First stmt in the interleaving chain. Check the chain. */ > - gimple *next = DR_GROUP_NEXT_ELEMENT (stmt_info); > + stmt_vec_info next = DR_GROUP_NEXT_ELEMENT (stmt_info); > struct data_reference *data_ref = dr; > unsigned int count = 1; > tree prev_init = DR_INIT (data_ref); > - gimple *prev = stmt_info; > + stmt_vec_info prev = stmt_info; > HOST_WIDE_INT diff, gaps = 0; > > /* By construction, all group members have INTEGER_CST DR_INITs. */ > @@ -2535,8 +2534,7 @@ vect_analyze_group_access_1 (struct data > stmt, and the rest get their vectorized loads from the first > one. */ > if (!tree_int_cst_compare (DR_INIT (data_ref), > - DR_INIT (STMT_VINFO_DATA_REF ( > - vinfo_for_stmt (next))))) > + DR_INIT (STMT_VINFO_DATA_REF (next)))) > { > if (DR_IS_WRITE (data_ref)) > { > @@ -2550,16 +2548,16 @@ vect_analyze_group_access_1 (struct data > dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, > "Two or more load stmts share the same > dr.\n"); > > - /* For load use the same data-ref load. */ > - DR_GROUP_SAME_DR_STMT (vinfo_for_stmt (next)) = prev; > + /* For load use the same data-ref load. */ > + DR_GROUP_SAME_DR_STMT (next) = prev; > > - prev = next; > - next = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next)); > - continue; > + prev = next; > + next = DR_GROUP_NEXT_ELEMENT (next); > + continue; > } > > - prev = next; > - data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next)); > + prev = next; > + data_ref = STMT_VINFO_DATA_REF (next); > > /* All group members have the same STEP by construction. */ > gcc_checking_assert (operand_equal_p (DR_STEP (data_ref), step, 0)); > @@ -2587,12 +2585,12 @@ vect_analyze_group_access_1 (struct data > > /* Store the gap from the previous member of the group. If there > is no > gap in the access, DR_GROUP_GAP is always 1. */ > - DR_GROUP_GAP (vinfo_for_stmt (next)) = diff; > + DR_GROUP_GAP (next) = diff; > > - prev_init = DR_INIT (data_ref); > - next = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next)); > - /* Count the number of data-refs in the chain. */ > - count++; > + prev_init = DR_INIT (data_ref); > + next = DR_GROUP_NEXT_ELEMENT (next); > + /* Count the number of data-refs in the chain. */ > + count++; > } > > if (groupsize == 0) > @@ -2668,15 +2666,13 @@ vect_analyze_group_access (struct data_r > if (!vect_analyze_group_access_1 (dr)) > { > /* Dissolve the group if present. */ > - gimple *next; > - gimple *stmt = DR_GROUP_FIRST_ELEMENT (vect_dr_stmt (dr)); > - while (stmt) > - { > - stmt_vec_info vinfo = vinfo_for_stmt (stmt); > - next = DR_GROUP_NEXT_ELEMENT (vinfo); > - DR_GROUP_FIRST_ELEMENT (vinfo) = NULL; > - DR_GROUP_NEXT_ELEMENT (vinfo) = NULL; > - stmt = next; > + stmt_vec_info stmt_info = DR_GROUP_FIRST_ELEMENT (vect_dr_stmt (dr)); > + while (stmt_info) > + { > + stmt_vec_info next = DR_GROUP_NEXT_ELEMENT (stmt_info); > + DR_GROUP_FIRST_ELEMENT (stmt_info) = NULL; > + DR_GROUP_NEXT_ELEMENT (stmt_info) = NULL; > + stmt_info = next; > } > return false; > } > @@ -3281,7 +3277,7 @@ vect_small_gap_p (loop_vec_info loop_vin > HOST_WIDE_INT count > = estimated_poly_value (LOOP_VINFO_VECT_FACTOR (loop_vinfo)); > if (DR_GROUP_FIRST_ELEMENT (stmt_info)) > - count *= DR_GROUP_SIZE (vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT > (stmt_info))); > + count *= DR_GROUP_SIZE (DR_GROUP_FIRST_ELEMENT (stmt_info)); > return estimated_poly_value (gap) <= count * vect_get_scalar_dr_size (dr); > } > > @@ -3379,11 +3375,9 @@ vect_prune_runtime_alias_test_list (loop > int comp_res; > poly_uint64 lower_bound; > struct data_reference *dr_a, *dr_b; > - gimple *dr_group_first_a, *dr_group_first_b; > tree segment_length_a, segment_length_b; > unsigned HOST_WIDE_INT access_size_a, access_size_b; > unsigned int align_a, align_b; > - gimple *stmt_a, *stmt_b; > > /* Ignore the alias if the VF we chose ended up being no greater > than the dependence distance. */ > @@ -3409,15 +3403,15 @@ vect_prune_runtime_alias_test_list (loop > } > > dr_a = DDR_A (ddr); > - stmt_a = vect_dr_stmt (DDR_A (ddr)); > + stmt_vec_info stmt_info_a = vect_dr_stmt (DDR_A (ddr)); > > dr_b = DDR_B (ddr); > - stmt_b = vect_dr_stmt (DDR_B (ddr)); > + stmt_vec_info stmt_info_b = vect_dr_stmt (DDR_B (ddr)); > > /* Skip the pair if inter-iteration dependencies are irrelevant > and intra-iteration dependencies are guaranteed to be honored. */ > if (ignore_step_p > - && (vect_preserves_scalar_order_p (stmt_a, stmt_b) > + && (vect_preserves_scalar_order_p (stmt_info_a, stmt_info_b) > || vectorizable_with_step_bound_p (dr_a, dr_b, &lower_bound))) > { > if (dump_enabled_p ()) > @@ -3468,18 +3462,18 @@ vect_prune_runtime_alias_test_list (loop > continue; > } > > - dr_group_first_a = DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt_a)); > + stmt_vec_info dr_group_first_a = DR_GROUP_FIRST_ELEMENT (stmt_info_a); > if (dr_group_first_a) > { > - stmt_a = dr_group_first_a; > - dr_a = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt_a)); > + stmt_info_a = dr_group_first_a; > + dr_a = STMT_VINFO_DATA_REF (stmt_info_a); > } > > - dr_group_first_b = DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt_b)); > + stmt_vec_info dr_group_first_b = DR_GROUP_FIRST_ELEMENT (stmt_info_b); > if (dr_group_first_b) > { > - stmt_b = dr_group_first_b; > - dr_b = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt_b)); > + stmt_info_b = dr_group_first_b; > + dr_b = STMT_VINFO_DATA_REF (stmt_info_b); > } > > if (ignore_step_p) > @@ -4734,10 +4728,9 @@ vect_create_data_ref_ptr (gimple *stmt, > /* Likewise for any of the data references in the stmt group. */ > else if (DR_GROUP_SIZE (stmt_info) > 1) > { > - gimple *orig_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > + stmt_vec_info sinfo = DR_GROUP_FIRST_ELEMENT (stmt_info); > do > { > - stmt_vec_info sinfo = vinfo_for_stmt (orig_stmt); > struct data_reference *sdr = STMT_VINFO_DATA_REF (sinfo); > if (!alias_sets_conflict_p (get_alias_set (aggr_type), > get_alias_set (DR_REF (sdr)))) > @@ -4745,9 +4738,9 @@ vect_create_data_ref_ptr (gimple *stmt, > need_ref_all = true; > break; > } > - orig_stmt = DR_GROUP_NEXT_ELEMENT (sinfo); > + sinfo = DR_GROUP_NEXT_ELEMENT (sinfo); > } > - while (orig_stmt); > + while (sinfo); > } > aggr_ptr_type = build_pointer_type_for_mode (aggr_type, ptr_mode, > need_ref_all); > @@ -6345,19 +6338,18 @@ vect_record_grouped_load_vectors (gimple > { > stmt_vec_info stmt_info = vinfo_for_stmt (stmt); > vec_info *vinfo = stmt_info->vinfo; > - gimple *first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - gimple *next_stmt; > + stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > unsigned int i, gap_count; > tree tmp_data_ref; > > /* Put a permuted data-ref in the VECTORIZED_STMT field. > Since we scan the chain starting from it's first node, their order > corresponds the order of data-refs in RESULT_CHAIN. */ > - next_stmt = first_stmt; > + stmt_vec_info next_stmt_info = first_stmt_info; > gap_count = 1; > FOR_EACH_VEC_ELT (result_chain, i, tmp_data_ref) > { > - if (!next_stmt) > + if (!next_stmt_info) > break; > > /* Skip the gaps. Loads created for the gaps will be removed by dead > @@ -6366,27 +6358,27 @@ vect_record_grouped_load_vectors (gimple > DR_GROUP_GAP is the number of steps in elements from the previous > access (if there is no gap DR_GROUP_GAP is 1). We skip loads that > correspond to the gaps. */ > - if (next_stmt != first_stmt > - && gap_count < DR_GROUP_GAP (vinfo_for_stmt (next_stmt))) > + if (next_stmt_info != first_stmt_info > + && gap_count < DR_GROUP_GAP (next_stmt_info)) > { > gap_count++; > continue; > } > > - while (next_stmt) > + while (next_stmt_info) > { > stmt_vec_info new_stmt_info = vinfo->lookup_def (tmp_data_ref); > /* We assume that if VEC_STMT is not NULL, this is a case of > multiple > copies, and we put the new vector statement in the first > available > RELATED_STMT. */ > - if (!STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt))) > - STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt)) = new_stmt_info; > + if (!STMT_VINFO_VEC_STMT (next_stmt_info)) > + STMT_VINFO_VEC_STMT (next_stmt_info) = new_stmt_info; > else > { > - if (!DR_GROUP_SAME_DR_STMT (vinfo_for_stmt (next_stmt))) > + if (!DR_GROUP_SAME_DR_STMT (next_stmt_info)) > { > stmt_vec_info prev_stmt_info > - = STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt)); > + = STMT_VINFO_VEC_STMT (next_stmt_info); > stmt_vec_info rel_stmt_info > = STMT_VINFO_RELATED_STMT (prev_stmt_info); > while (rel_stmt_info) > @@ -6399,12 +6391,12 @@ vect_record_grouped_load_vectors (gimple > } > } > > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > gap_count = 1; > - /* If NEXT_STMT accesses the same DR as the previous statement, > + /* If NEXT_STMT_INFO accesses the same DR as the previous statement, > put the same TMP_DATA_REF as its vectorized statement; otherwise > get the next data-ref from RESULT_CHAIN. */ > - if (!next_stmt || !DR_GROUP_SAME_DR_STMT (vinfo_for_stmt > (next_stmt))) > + if (!next_stmt_info || !DR_GROUP_SAME_DR_STMT (next_stmt_info)) > break; > } > } > @@ -6545,8 +6537,8 @@ vect_supportable_dr_alignment (struct da > if (loop_vinfo > && STMT_SLP_TYPE (stmt_info) > && !multiple_p (LOOP_VINFO_VECT_FACTOR (loop_vinfo) > - * DR_GROUP_SIZE (vinfo_for_stmt > - (DR_GROUP_FIRST_ELEMENT > (stmt_info))), > + * (DR_GROUP_SIZE > + (DR_GROUP_FIRST_ELEMENT (stmt_info))), > TYPE_VECTOR_SUBPARTS (vectype))) > ; > else if (!loop_vinfo > Index: gcc/tree-vect-loop.c > =================================================================== > --- gcc/tree-vect-loop.c 2018-07-24 10:23:04.033010396 +0100 > +++ gcc/tree-vect-loop.c 2018-07-24 10:23:08.532970436 +0100 > @@ -661,14 +661,14 @@ vect_fixup_reduc_chain (gimple *stmt) > REDUC_GROUP_SIZE (firstp) = REDUC_GROUP_SIZE (stmt_info); > do > { > - stmtp = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt)); > + stmtp = STMT_VINFO_RELATED_STMT (stmt_info); > REDUC_GROUP_FIRST_ELEMENT (stmtp) = firstp; > - stmt = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt)); > - if (stmt) > + stmt_info = REDUC_GROUP_NEXT_ELEMENT (stmt_info); > + if (stmt_info) > REDUC_GROUP_NEXT_ELEMENT (stmtp) > - = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt)); > + = STMT_VINFO_RELATED_STMT (stmt_info); > } > - while (stmt); > + while (stmt_info); > STMT_VINFO_DEF_TYPE (stmtp) = vect_reduction_def; > } > > @@ -683,12 +683,12 @@ vect_fixup_scalar_cycles_with_patterns ( > FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo), i, first) > if (STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (first))) > { > - gimple *next = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first)); > + stmt_vec_info next = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt > (first)); > while (next) > { > - if (! STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (next))) > + if (! STMT_VINFO_IN_PATTERN_P (next)) > break; > - next = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next)); > + next = REDUC_GROUP_NEXT_ELEMENT (next); > } > /* If not all stmt in the chain are patterns try to handle > the chain without patterns. */ > @@ -2188,7 +2188,7 @@ vect_analyze_loop_2 (loop_vec_info loop_ > vinfo = SLP_TREE_SCALAR_STMTS (SLP_INSTANCE_TREE (instance))[0]; > if (! STMT_VINFO_GROUPED_ACCESS (vinfo)) > continue; > - vinfo = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (vinfo)); > + vinfo = DR_GROUP_FIRST_ELEMENT (vinfo); > unsigned int size = DR_GROUP_SIZE (vinfo); > tree vectype = STMT_VINFO_VECTYPE (vinfo); > if (! vect_store_lanes_supported (vectype, size, false) > @@ -2198,7 +2198,7 @@ vect_analyze_loop_2 (loop_vec_info loop_ > FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (instance), j, node) > { > vinfo = SLP_TREE_SCALAR_STMTS (node)[0]; > - vinfo = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (vinfo)); > + vinfo = DR_GROUP_FIRST_ELEMENT (vinfo); > bool single_element_p = !DR_GROUP_NEXT_ELEMENT (vinfo); > size = DR_GROUP_SIZE (vinfo); > vectype = STMT_VINFO_VECTYPE (vinfo); > @@ -2527,7 +2527,7 @@ vect_is_slp_reduction (loop_vec_info loo > struct loop *loop = (gimple_bb (phi))->loop_father; > struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info); > enum tree_code code; > - gimple *loop_use_stmt = NULL, *first, *next_stmt; > + gimple *loop_use_stmt = NULL; > stmt_vec_info use_stmt_info, current_stmt_info = NULL; > tree lhs; > imm_use_iterator imm_iter; > @@ -2592,12 +2592,12 @@ vect_is_slp_reduction (loop_vec_info loo > use_stmt_info = loop_info->lookup_stmt (loop_use_stmt); > if (current_stmt_info) > { > - REDUC_GROUP_NEXT_ELEMENT (current_stmt_info) = loop_use_stmt; > + REDUC_GROUP_NEXT_ELEMENT (current_stmt_info) = use_stmt_info; > REDUC_GROUP_FIRST_ELEMENT (use_stmt_info) > = REDUC_GROUP_FIRST_ELEMENT (current_stmt_info); > } > else > - REDUC_GROUP_FIRST_ELEMENT (use_stmt_info) = loop_use_stmt; > + REDUC_GROUP_FIRST_ELEMENT (use_stmt_info) = use_stmt_info; > > lhs = gimple_assign_lhs (loop_use_stmt); > current_stmt_info = use_stmt_info; > @@ -2610,9 +2610,10 @@ vect_is_slp_reduction (loop_vec_info loo > /* Swap the operands, if needed, to make the reduction operand be the > second > operand. */ > lhs = PHI_RESULT (phi); > - next_stmt = REDUC_GROUP_FIRST_ELEMENT (current_stmt_info); > - while (next_stmt) > + stmt_vec_info next_stmt_info = REDUC_GROUP_FIRST_ELEMENT > (current_stmt_info); > + while (next_stmt_info) > { > + gassign *next_stmt = as_a <gassign *> (next_stmt_info->stmt); > if (gimple_assign_rhs2 (next_stmt) == lhs) > { > tree op = gimple_assign_rhs1 (next_stmt); > @@ -2626,7 +2627,7 @@ vect_is_slp_reduction (loop_vec_info loo > && vect_valid_reduction_input_p (def_stmt_info)) > { > lhs = gimple_assign_lhs (next_stmt); > - next_stmt = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt > (next_stmt)); > + next_stmt_info = REDUC_GROUP_NEXT_ELEMENT (next_stmt_info); > continue; > } > > @@ -2663,13 +2664,14 @@ vect_is_slp_reduction (loop_vec_info loo > } > > lhs = gimple_assign_lhs (next_stmt); > - next_stmt = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > + next_stmt_info = REDUC_GROUP_NEXT_ELEMENT (next_stmt_info); > } > > /* Save the chain for further analysis in SLP detection. */ > - first = REDUC_GROUP_FIRST_ELEMENT (current_stmt_info); > - LOOP_VINFO_REDUCTION_CHAINS (loop_info).safe_push (first); > - REDUC_GROUP_SIZE (vinfo_for_stmt (first)) = size; > + stmt_vec_info first_stmt_info > + = REDUC_GROUP_FIRST_ELEMENT (current_stmt_info); > + LOOP_VINFO_REDUCTION_CHAINS (loop_info).safe_push (first_stmt_info); > + REDUC_GROUP_SIZE (first_stmt_info) = size; > > return true; > } > @@ -3254,12 +3256,12 @@ vect_is_simple_reduction (loop_vec_info > } > > /* Dissolve group eventually half-built by vect_is_slp_reduction. */ > - gimple *first = REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (def_stmt)); > + stmt_vec_info first = REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt > (def_stmt)); > while (first) > { > - gimple *next = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first)); > - REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (first)) = NULL; > - REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first)) = NULL; > + stmt_vec_info next = REDUC_GROUP_NEXT_ELEMENT (first); > + REDUC_GROUP_FIRST_ELEMENT (first) = NULL; > + REDUC_GROUP_NEXT_ELEMENT (first) = NULL; > first = next; > } > > @@ -6130,7 +6132,8 @@ vectorizable_reduction (gimple *stmt, gi > } > > if (REDUC_GROUP_FIRST_ELEMENT (stmt_info)) > - gcc_assert (slp_node && REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt); > + gcc_assert (slp_node > + && REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info); > > if (gimple_code (stmt) == GIMPLE_PHI) > { > @@ -6784,8 +6787,8 @@ vectorizable_reduction (gimple *stmt, gi > tree neutral_op = NULL_TREE; > if (slp_node) > neutral_op = neutral_op_for_slp_reduction > - (slp_node_instance->reduc_phis, code, > - REDUC_GROUP_FIRST_ELEMENT (stmt_info) != NULL); > + (slp_node_instance->reduc_phis, code, > + REDUC_GROUP_FIRST_ELEMENT (stmt_info) != NULL_STMT_VEC_INFO); > > if (double_reduc && reduction_type == FOLD_LEFT_REDUCTION) > { > Index: gcc/tree-vect-patterns.c > =================================================================== > --- gcc/tree-vect-patterns.c 2018-07-24 10:22:57.277070390 +0100 > +++ gcc/tree-vect-patterns.c 2018-07-24 10:23:08.536970400 +0100 > @@ -820,7 +820,7 @@ vect_reassociating_reduction_p (stmt_vec > { > return (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_reduction_def > ? STMT_VINFO_REDUC_TYPE (stmt_vinfo) != FOLD_LEFT_REDUCTION > - : REDUC_GROUP_FIRST_ELEMENT (stmt_vinfo) != NULL); > + : REDUC_GROUP_FIRST_ELEMENT (stmt_vinfo) != NULL_STMT_VEC_INFO); > } > > /* As above, but also require it to have code CODE and to be a reduction > Index: gcc/tree-vect-slp.c > =================================================================== > --- gcc/tree-vect-slp.c 2018-07-24 10:23:00.401042649 +0100 > +++ gcc/tree-vect-slp.c 2018-07-24 10:23:08.536970400 +0100 > @@ -712,7 +712,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, > int icode; > machine_mode optab_op2_mode; > machine_mode vec_mode; > - gimple *first_load = NULL, *prev_first_load = NULL; > + stmt_vec_info first_load = NULL, prev_first_load = NULL; > > /* For every stmt in NODE find its def stmt/s. */ > stmt_vec_info stmt_info; > @@ -1692,8 +1692,7 @@ vect_attempt_slp_rearrange_stmts (slp_in > FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_instn), i, node) > { > stmt_vec_info first_stmt_info = SLP_TREE_SCALAR_STMTS (node)[0]; > - first_stmt_info > - = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (first_stmt_info)); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (first_stmt_info); > /* But we have to keep those permutations that are required because > of handling of gaps. */ > if (known_eq (unrolling_factor, 1U) > @@ -1717,7 +1716,6 @@ vect_supported_load_permutation_p (slp_i > unsigned int group_size = SLP_INSTANCE_GROUP_SIZE (slp_instn); > unsigned int i, j, k, next; > slp_tree node; > - gimple *next_load; > > if (dump_enabled_p ()) > { > @@ -1766,26 +1764,25 @@ vect_supported_load_permutation_p (slp_i > if (!SLP_TREE_LOAD_PERMUTATION (node).exists ()) > continue; > bool subchain_p = true; > - next_load = NULL; > + stmt_vec_info next_load_info = NULL; > stmt_vec_info load_info; > FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), j, load_info) > { > if (j != 0 > - && (next_load != load_info > + && (next_load_info != load_info > || DR_GROUP_GAP (load_info) != 1)) > { > subchain_p = false; > break; > } > - next_load = DR_GROUP_NEXT_ELEMENT (load_info); > + next_load_info = DR_GROUP_NEXT_ELEMENT (load_info); > } > if (subchain_p) > SLP_TREE_LOAD_PERMUTATION (node).release (); > else > { > stmt_vec_info group_info = SLP_TREE_SCALAR_STMTS (node)[0]; > - group_info > - = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (group_info)); > + group_info = DR_GROUP_FIRST_ELEMENT (group_info); > unsigned HOST_WIDE_INT nunits; > unsigned k, maxk = 0; > FOR_EACH_VEC_ELT (SLP_TREE_LOAD_PERMUTATION (node), j, k) > @@ -1868,33 +1865,33 @@ vect_find_last_scalar_stmt_in_slp (slp_t > vect_split_slp_store_group (gimple *first_stmt, unsigned group1_size) > { > stmt_vec_info first_vinfo = vinfo_for_stmt (first_stmt); > - gcc_assert (DR_GROUP_FIRST_ELEMENT (first_vinfo) == first_stmt); > + gcc_assert (DR_GROUP_FIRST_ELEMENT (first_vinfo) == first_vinfo); > gcc_assert (group1_size > 0); > int group2_size = DR_GROUP_SIZE (first_vinfo) - group1_size; > gcc_assert (group2_size > 0); > DR_GROUP_SIZE (first_vinfo) = group1_size; > > - gimple *stmt = first_stmt; > + stmt_vec_info stmt_info = first_vinfo; > for (unsigned i = group1_size; i > 1; i--) > { > - stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt)); > - gcc_assert (DR_GROUP_GAP (vinfo_for_stmt (stmt)) == 1); > + stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info); > + gcc_assert (DR_GROUP_GAP (stmt_info) == 1); > } > /* STMT is now the last element of the first group. */ > - gimple *group2 = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt)); > - DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt)) = 0; > + stmt_vec_info group2 = DR_GROUP_NEXT_ELEMENT (stmt_info); > + DR_GROUP_NEXT_ELEMENT (stmt_info) = 0; > > - DR_GROUP_SIZE (vinfo_for_stmt (group2)) = group2_size; > - for (stmt = group2; stmt; stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt > (stmt))) > + DR_GROUP_SIZE (group2) = group2_size; > + for (stmt_info = group2; stmt_info; > + stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info)) > { > - DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) = group2; > - gcc_assert (DR_GROUP_GAP (vinfo_for_stmt (stmt)) == 1); > + DR_GROUP_FIRST_ELEMENT (stmt_info) = group2; > + gcc_assert (DR_GROUP_GAP (stmt_info) == 1); > } > > /* For the second group, the DR_GROUP_GAP is that before the original > group, > plus skipping over the first vector. */ > - DR_GROUP_GAP (vinfo_for_stmt (group2)) > - = DR_GROUP_GAP (first_vinfo) + group1_size; > + DR_GROUP_GAP (group2) = DR_GROUP_GAP (first_vinfo) + group1_size; > > /* DR_GROUP_GAP of the first group now has to skip over the second group > too. */ > DR_GROUP_GAP (first_vinfo) += group2_size; > @@ -1928,8 +1925,6 @@ vect_analyze_slp_instance (vec_info *vin > slp_tree node; > unsigned int group_size; > tree vectype, scalar_type = NULL_TREE; > - gimple *next; > - stmt_vec_info next_info; > unsigned int i; > vec<slp_tree> loads; > struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info); > @@ -1970,34 +1965,32 @@ vect_analyze_slp_instance (vec_info *vin > > /* Create a node (a root of the SLP tree) for the packed grouped stores. > */ > scalar_stmts.create (group_size); > - next = stmt; > + stmt_vec_info next_info = stmt_info; > if (STMT_VINFO_GROUPED_ACCESS (stmt_info)) > { > /* Collect the stores and store them in SLP_TREE_SCALAR_STMTS. */ > - while (next) > + while (next_info) > { > - next_info = vinfo_for_stmt (next); > if (STMT_VINFO_IN_PATTERN_P (next_info) > && STMT_VINFO_RELATED_STMT (next_info)) > scalar_stmts.safe_push (STMT_VINFO_RELATED_STMT (next_info)); > else > scalar_stmts.safe_push (next_info); > - next = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next)); > + next_info = DR_GROUP_NEXT_ELEMENT (next_info); > } > } > else if (!dr && REDUC_GROUP_FIRST_ELEMENT (stmt_info)) > { > /* Collect the reduction stmts and store them in > SLP_TREE_SCALAR_STMTS. */ > - while (next) > + while (next_info) > { > - next_info = vinfo_for_stmt (next); > if (STMT_VINFO_IN_PATTERN_P (next_info) > && STMT_VINFO_RELATED_STMT (next_info)) > scalar_stmts.safe_push (STMT_VINFO_RELATED_STMT (next_info)); > else > scalar_stmts.safe_push (next_info); > - next = REDUC_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next)); > + next_info = REDUC_GROUP_NEXT_ELEMENT (next_info); > } > /* Mark the first element of the reduction chain as reduction to > properly > transform the node. In the reduction analysis phase only the last > @@ -2067,15 +2060,14 @@ vect_analyze_slp_instance (vec_info *vin > vec<unsigned> load_permutation; > int j; > stmt_vec_info load_info; > - gimple *first_stmt; > bool this_load_permuted = false; > load_permutation.create (group_size); > - first_stmt = DR_GROUP_FIRST_ELEMENT > + stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT > (SLP_TREE_SCALAR_STMTS (load_node)[0]); > FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (load_node), j, load_info) > { > int load_place = vect_get_place_in_interleaving_chain > - (load_info, first_stmt); > + (load_info, first_stmt_info); > gcc_assert (load_place != -1); > if (load_place != j) > this_load_permuted = true; > @@ -2086,8 +2078,8 @@ vect_analyze_slp_instance (vec_info *vin > a gap either because the group is larger than the SLP > group-size or because there is a gap between the groups. */ > && (known_eq (unrolling_factor, 1U) > - || (group_size == DR_GROUP_SIZE (vinfo_for_stmt > (first_stmt)) > - && DR_GROUP_GAP (vinfo_for_stmt (first_stmt)) == 0))) > + || (group_size == DR_GROUP_SIZE (first_stmt_info) > + && DR_GROUP_GAP (first_stmt_info) == 0))) > { > load_permutation.release (); > continue; > @@ -2122,11 +2114,9 @@ vect_analyze_slp_instance (vec_info *vin > slp_tree load_node; > FOR_EACH_VEC_ELT (loads, i, load_node) > { > - gimple *first_stmt = DR_GROUP_FIRST_ELEMENT > + stmt_vec_info stmt_vinfo = DR_GROUP_FIRST_ELEMENT > (SLP_TREE_SCALAR_STMTS (load_node)[0]); > - stmt_vec_info stmt_vinfo = vinfo_for_stmt (first_stmt); > - /* Use SLP for strided accesses (or if we > - can't load-lanes). */ > + /* Use SLP for strided accesses (or if we can't load-lanes). */ > if (STMT_VINFO_STRIDED_P (stmt_vinfo) > || ! vect_load_lanes_supported > (STMT_VINFO_VECTYPE (stmt_vinfo), > @@ -2230,11 +2220,11 @@ vect_analyze_slp (vec_info *vinfo, unsig > max_tree_size)) > { > /* Dissolve reduction chain group. */ > - gimple *next, *stmt = first_element; > + gimple *stmt = first_element; > while (stmt) > { > stmt_vec_info vinfo = vinfo_for_stmt (stmt); > - next = REDUC_GROUP_NEXT_ELEMENT (vinfo); > + stmt_vec_info next = REDUC_GROUP_NEXT_ELEMENT (vinfo); > REDUC_GROUP_FIRST_ELEMENT (vinfo) = NULL; > REDUC_GROUP_NEXT_ELEMENT (vinfo) = NULL; > stmt = next; > @@ -3698,7 +3688,7 @@ vect_transform_slp_perm_load (slp_tree n > if (!STMT_VINFO_GROUPED_ACCESS (stmt_info)) > return false; > > - stmt_info = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (stmt_info)); > + stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > > mode = TYPE_MODE (vectype); > > Index: gcc/tree-vect-stmts.c > =================================================================== > --- gcc/tree-vect-stmts.c 2018-07-24 10:23:00.401042649 +0100 > +++ gcc/tree-vect-stmts.c 2018-07-24 10:23:08.536970400 +0100 > @@ -978,7 +978,7 @@ vect_model_store_cost (stmt_vec_info stm > stmt_vector_for_cost *cost_vec) > { > unsigned int inside_cost = 0, prologue_cost = 0; > - gimple *first_stmt = STMT_VINFO_STMT (stmt_info); > + stmt_vec_info first_stmt_info = stmt_info; > bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info); > > /* ??? Somehow we need to fix this at the callers. */ > @@ -998,12 +998,12 @@ vect_model_store_cost (stmt_vec_info stm > /* Grouped stores update all elements in the group at once, > so we want the DR for the first statement. */ > if (!slp_node && grouped_access_p) > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > > /* True if we should include any once-per-group costs as well as > the cost of the statement itself. For SLP we only get called > once per group anyhow. */ > - bool first_stmt_p = (first_stmt == STMT_VINFO_STMT (stmt_info)); > + bool first_stmt_p = (first_stmt_info == stmt_info); > > /* We assume that the cost of a single store-lanes instruction is > equivalent to the cost of DR_GROUP_SIZE separate stores. If a grouped > @@ -1014,7 +1014,7 @@ vect_model_store_cost (stmt_vec_info stm > { > /* Uses a high and low interleave or shuffle operations for each > needed permute. */ > - int group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > + int group_size = DR_GROUP_SIZE (first_stmt_info); > int nstmts = ncopies * ceil_log2 (group_size) * group_size; > inside_cost = record_stmt_cost (cost_vec, nstmts, vec_perm, > stmt_info, 0, vect_body); > @@ -1122,7 +1122,6 @@ vect_model_load_cost (stmt_vec_info stmt > slp_tree slp_node, > stmt_vector_for_cost *cost_vec) > { > - gimple *first_stmt = STMT_VINFO_STMT (stmt_info); > unsigned int inside_cost = 0, prologue_cost = 0; > bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info); > > @@ -1136,28 +1135,27 @@ vect_model_load_cost (stmt_vec_info stmt > { > /* If the load is permuted then the alignment is determined by > the first group element not by the first scalar stmt DR. */ > - gimple *stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - stmt_vec_info stmt_info = vinfo_for_stmt (stmt); > + stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > /* Record the cost for the permutation. */ > unsigned n_perms; > unsigned assumed_nunits > - = vect_nunits_for_cost (STMT_VINFO_VECTYPE (stmt_info)); > + = vect_nunits_for_cost (STMT_VINFO_VECTYPE (first_stmt_info)); > unsigned slp_vf = (ncopies * assumed_nunits) / instance->group_size; > vect_transform_slp_perm_load (slp_node, vNULL, NULL, > slp_vf, instance, true, > &n_perms); > inside_cost += record_stmt_cost (cost_vec, n_perms, vec_perm, > - stmt_info, 0, vect_body); > + first_stmt_info, 0, vect_body); > /* And adjust the number of loads performed. This handles > redundancies as well as loads that are later dead. */ > - auto_sbitmap perm (DR_GROUP_SIZE (stmt_info)); > + auto_sbitmap perm (DR_GROUP_SIZE (first_stmt_info)); > bitmap_clear (perm); > for (unsigned i = 0; > i < SLP_TREE_LOAD_PERMUTATION (slp_node).length (); ++i) > bitmap_set_bit (perm, SLP_TREE_LOAD_PERMUTATION (slp_node)[i]); > ncopies = 0; > bool load_seen = false; > - for (unsigned i = 0; i < DR_GROUP_SIZE (stmt_info); ++i) > + for (unsigned i = 0; i < DR_GROUP_SIZE (first_stmt_info); ++i) > { > if (i % assumed_nunits == 0) > { > @@ -1171,19 +1169,21 @@ vect_model_load_cost (stmt_vec_info stmt > if (load_seen) > ncopies++; > gcc_assert (ncopies > - <= (DR_GROUP_SIZE (stmt_info) - DR_GROUP_GAP (stmt_info) > + <= (DR_GROUP_SIZE (first_stmt_info) > + - DR_GROUP_GAP (first_stmt_info) > + assumed_nunits - 1) / assumed_nunits); > } > > /* Grouped loads read all elements in the group at once, > so we want the DR for the first statement. */ > + stmt_vec_info first_stmt_info = stmt_info; > if (!slp_node && grouped_access_p) > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > > /* True if we should include any once-per-group costs as well as > the cost of the statement itself. For SLP we only get called > once per group anyhow. */ > - bool first_stmt_p = (first_stmt == STMT_VINFO_STMT (stmt_info)); > + bool first_stmt_p = (first_stmt_info == stmt_info); > > /* We assume that the cost of a single load-lanes instruction is > equivalent to the cost of DR_GROUP_SIZE separate loads. If a grouped > @@ -1194,7 +1194,7 @@ vect_model_load_cost (stmt_vec_info stmt > { > /* Uses an even and odd extract operations or shuffle operations > for each needed permute. */ > - int group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > + int group_size = DR_GROUP_SIZE (first_stmt_info); > int nstmts = ncopies * ceil_log2 (group_size) * group_size; > inside_cost += record_stmt_cost (cost_vec, nstmts, vec_perm, > stmt_info, 0, vect_body); > @@ -2183,12 +2183,12 @@ get_group_load_store_type (gimple *stmt, > vec_info *vinfo = stmt_info->vinfo; > loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); > struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL; > - gimple *first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - data_reference *first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt > (first_stmt)); > - unsigned int group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > - bool single_element_p = (stmt == first_stmt > + stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > + data_reference *first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > + unsigned int group_size = DR_GROUP_SIZE (first_stmt_info); > + bool single_element_p = (stmt_info == first_stmt_info > && !DR_GROUP_NEXT_ELEMENT (stmt_info)); > - unsigned HOST_WIDE_INT gap = DR_GROUP_GAP (vinfo_for_stmt (first_stmt)); > + unsigned HOST_WIDE_INT gap = DR_GROUP_GAP (first_stmt_info); > poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype); > > /* True if the vectorized statements would access beyond the last > @@ -2315,14 +2315,14 @@ get_group_load_store_type (gimple *stmt, > *memory_access_type = VMAT_GATHER_SCATTER; > } > > - if (vls_type != VLS_LOAD && first_stmt == stmt) > + if (vls_type != VLS_LOAD && first_stmt_info == stmt_info) > { > /* STMT is the leader of the group. Check the operands of all the > stmts of the group. */ > - gimple *next_stmt = DR_GROUP_NEXT_ELEMENT (stmt_info); > - while (next_stmt) > + stmt_vec_info next_stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info); > + while (next_stmt_info) > { > - tree op = vect_get_store_rhs (next_stmt); > + tree op = vect_get_store_rhs (next_stmt_info); > enum vect_def_type dt; > if (!vect_is_simple_use (op, vinfo, &dt)) > { > @@ -2331,7 +2331,7 @@ get_group_load_store_type (gimple *stmt, > "use not simple.\n"); > return false; > } > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > } > } > > @@ -2482,7 +2482,7 @@ get_load_store_type (gimple *stmt, tree > traditional behavior until that can be fixed. */ > if (*memory_access_type == VMAT_ELEMENTWISE > && !STMT_VINFO_STRIDED_P (stmt_info) > - && !(stmt == DR_GROUP_FIRST_ELEMENT (stmt_info) > + && !(stmt_info == DR_GROUP_FIRST_ELEMENT (stmt_info) > && !DR_GROUP_NEXT_ELEMENT (stmt_info) > && !pow2p_hwi (DR_GROUP_SIZE (stmt_info)))) > { > @@ -6195,13 +6195,13 @@ ensure_base_align (struct data_reference > get_group_alias_ptr_type (gimple *first_stmt) > { > struct data_reference *first_dr, *next_dr; > - gimple *next_stmt; > > first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first_stmt)); > - while (next_stmt) > + stmt_vec_info next_stmt_info > + = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first_stmt)); > + while (next_stmt_info) > { > - next_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (next_stmt)); > + next_dr = STMT_VINFO_DATA_REF (next_stmt_info); > if (get_alias_set (DR_REF (first_dr)) > != get_alias_set (DR_REF (next_dr))) > { > @@ -6210,7 +6210,7 @@ get_group_alias_ptr_type (gimple *first_ > "conflicting alias set types.\n"); > return ptr_type_node; > } > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > } > return reference_alias_ptr_type (DR_REF (first_dr)); > } > @@ -6248,7 +6248,7 @@ vectorizable_store (gimple *stmt, gimple > gimple *ptr_incr = NULL; > int ncopies; > int j; > - gimple *next_stmt, *first_stmt; > + stmt_vec_info first_stmt_info; > bool grouped_store; > unsigned int group_size, i; > vec<tree> oprnds = vNULL; > @@ -6400,13 +6400,13 @@ vectorizable_store (gimple *stmt, gimple > && (slp || memory_access_type != VMAT_CONTIGUOUS)); > if (grouped_store) > { > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > - group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > + first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > + group_size = DR_GROUP_SIZE (first_stmt_info); > } > else > { > - first_stmt = stmt; > + first_stmt_info = stmt_info; > first_dr = dr; > group_size = vec_num = 1; > } > @@ -6584,10 +6584,7 @@ vectorizable_store (gimple *stmt, gimple > } > > if (STMT_VINFO_GROUPED_ACCESS (stmt_info)) > - { > - gimple *group_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - DR_GROUP_STORE_COUNT (vinfo_for_stmt (group_stmt))++; > - } > + DR_GROUP_STORE_COUNT (DR_GROUP_FIRST_ELEMENT (stmt_info))++; > > if (grouped_store) > { > @@ -6596,8 +6593,8 @@ vectorizable_store (gimple *stmt, gimple > > /* We vectorize all the stmts of the interleaving group when we > reach the last stmt in the group. */ > - if (DR_GROUP_STORE_COUNT (vinfo_for_stmt (first_stmt)) > - < DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)) > + if (DR_GROUP_STORE_COUNT (first_stmt_info) > + < DR_GROUP_SIZE (first_stmt_info) > && !slp) > { > *vec_stmt = NULL; > @@ -6610,17 +6607,18 @@ vectorizable_store (gimple *stmt, gimple > /* VEC_NUM is the number of vect stmts to be created for this > group. */ > vec_num = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node); > - first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0]; > - gcc_assert (DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (first_stmt)) == > first_stmt); > - first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > - op = vect_get_store_rhs (first_stmt); > + first_stmt_info = SLP_TREE_SCALAR_STMTS (slp_node)[0]; > + gcc_assert (DR_GROUP_FIRST_ELEMENT (first_stmt_info) > + == first_stmt_info); > + first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > + op = vect_get_store_rhs (first_stmt_info); > } > else > /* VEC_NUM is the number of vect stmts to be created for this > group. */ > vec_num = group_size; > > - ref_type = get_group_alias_ptr_type (first_stmt); > + ref_type = get_group_alias_ptr_type (first_stmt_info); > } > else > ref_type = reference_alias_ptr_type (DR_REF (first_dr)); > @@ -6759,7 +6757,7 @@ vectorizable_store (gimple *stmt, gimple > > prev_stmt_info = NULL; > alias_off = build_int_cst (ref_type, 0); > - next_stmt = first_stmt; > + stmt_vec_info next_stmt_info = first_stmt_info; > for (g = 0; g < group_size; g++) > { > running_off = offvar; > @@ -6780,7 +6778,7 @@ vectorizable_store (gimple *stmt, gimple > for (j = 0; j < ncopies; j++) > { > /* We've set op and dt above, from vect_get_store_rhs, > - and first_stmt == stmt. */ > + and first_stmt_info == stmt_info. */ > if (j == 0) > { > if (slp) > @@ -6791,8 +6789,9 @@ vectorizable_store (gimple *stmt, gimple > } > else > { > - op = vect_get_store_rhs (next_stmt); > - vec_oprnd = vect_get_vec_def_for_operand (op, > next_stmt); > + op = vect_get_store_rhs (next_stmt_info); > + vec_oprnd = vect_get_vec_def_for_operand > + (op, next_stmt_info); > } > } > else > @@ -6866,7 +6865,7 @@ vectorizable_store (gimple *stmt, gimple > } > } > } > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > if (slp) > break; > } > @@ -6985,19 +6984,20 @@ vectorizable_store (gimple *stmt, gimple > > If the store is not grouped, DR_GROUP_SIZE is 1, and > DR_CHAIN and > OPRNDS are of size 1. */ > - next_stmt = first_stmt; > + stmt_vec_info next_stmt_info = first_stmt_info; > for (i = 0; i < group_size; i++) > { > /* Since gaps are not supported for interleaved stores, > DR_GROUP_SIZE is the exact number of stmts in the chain. > - Therefore, NEXT_STMT can't be NULL_TREE. In case that > - there is no interleaving, DR_GROUP_SIZE is 1, and only > one > - iteration of the loop will be executed. */ > - op = vect_get_store_rhs (next_stmt); > - vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt); > + Therefore, NEXT_STMT_INFO can't be NULL_TREE. In case > + that there is no interleaving, DR_GROUP_SIZE is 1, > + and only one iteration of the loop will be executed. */ > + op = vect_get_store_rhs (next_stmt_info); > + vec_oprnd = vect_get_vec_def_for_operand > + (op, next_stmt_info); > dr_chain.quick_push (vec_oprnd); > oprnds.quick_push (vec_oprnd); > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt > (next_stmt)); > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > } > if (mask) > vec_mask = vect_get_vec_def_for_operand (mask, stmt, > @@ -7029,7 +7029,7 @@ vectorizable_store (gimple *stmt, gimple > } > else > dataref_ptr > - = vect_create_data_ref_ptr (first_stmt, aggr_type, > + = vect_create_data_ref_ptr (first_stmt_info, aggr_type, > simd_lane_access_p ? loop : NULL, > offset, &dummy, gsi, &ptr_incr, > simd_lane_access_p, &inv_p, > @@ -7132,7 +7132,7 @@ vectorizable_store (gimple *stmt, gimple > &result_chain); > } > > - next_stmt = first_stmt; > + stmt_vec_info next_stmt_info = first_stmt_info; > for (i = 0; i < vec_num; i++) > { > unsigned align, misalign; > @@ -7249,8 +7249,8 @@ vectorizable_store (gimple *stmt, gimple > if (slp) > continue; > > - next_stmt = DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt)); > - if (!next_stmt) > + next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info); > + if (!next_stmt_info) > break; > } > } > @@ -7423,7 +7423,7 @@ vectorizable_load (gimple *stmt, gimple_ > gphi *phi = NULL; > vec<tree> dr_chain = vNULL; > bool grouped_load = false; > - gimple *first_stmt; > + stmt_vec_info first_stmt_info; > stmt_vec_info first_stmt_info_for_drptr = NULL; > bool inv_p; > bool compute_in_loop = false; > @@ -7565,8 +7565,8 @@ vectorizable_load (gimple *stmt, gimple_ > gcc_assert (!nested_in_vect_loop); > gcc_assert (!STMT_VINFO_GATHER_SCATTER_P (stmt_info)); > > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > + group_size = DR_GROUP_SIZE (first_stmt_info); > > if (slp && SLP_TREE_LOAD_PERMUTATION (slp_node).exists ()) > slp_perm = true; > @@ -7696,25 +7696,26 @@ vectorizable_load (gimple *stmt, gimple_ > > if (grouped_load) > { > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > + first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > } > else > { > - first_stmt = stmt; > + first_stmt_info = stmt_info; > first_dr = dr; > } > if (slp && grouped_load) > { > - group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > - ref_type = get_group_alias_ptr_type (first_stmt); > + group_size = DR_GROUP_SIZE (first_stmt_info); > + ref_type = get_group_alias_ptr_type (first_stmt_info); > } > else > { > if (grouped_load) > cst_offset > = (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (vectype))) > - * vect_get_place_in_interleaving_chain (stmt, first_stmt)); > + * vect_get_place_in_interleaving_chain (stmt, > + first_stmt_info)); > group_size = 1; > ref_type = reference_alias_ptr_type (DR_REF (dr)); > } > @@ -7924,19 +7925,19 @@ vectorizable_load (gimple *stmt, gimple_ > > if (grouped_load) > { > - first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info); > - group_size = DR_GROUP_SIZE (vinfo_for_stmt (first_stmt)); > + first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > + group_size = DR_GROUP_SIZE (first_stmt_info); > /* For SLP vectorization we directly vectorize a subchain > without permutation. */ > if (slp && ! SLP_TREE_LOAD_PERMUTATION (slp_node).exists ()) > - first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0]; > + first_stmt_info = SLP_TREE_SCALAR_STMTS (slp_node)[0]; > /* For BB vectorization always use the first stmt to base > the data ref pointer on. */ > if (bb_vinfo) > first_stmt_info_for_drptr = SLP_TREE_SCALAR_STMTS (slp_node)[0]; > > /* Check if the chain of loads is already vectorized. */ > - if (STMT_VINFO_VEC_STMT (vinfo_for_stmt (first_stmt)) > + if (STMT_VINFO_VEC_STMT (first_stmt_info) > /* For SLP we would need to copy over SLP_TREE_VEC_STMTS. > ??? But we can only do so if there is exactly one > as we have no way to get at the rest. Leave the CSE > @@ -7950,7 +7951,7 @@ vectorizable_load (gimple *stmt, gimple_ > *vec_stmt = STMT_VINFO_VEC_STMT (stmt_info); > return true; > } > - first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)); > + first_dr = STMT_VINFO_DATA_REF (first_stmt_info); > group_gap_adj = 0; > > /* VEC_NUM is the number of vect stmts to be created for this group. > */ > @@ -7979,11 +7980,11 @@ vectorizable_load (gimple *stmt, gimple_ > else > vec_num = group_size; > > - ref_type = get_group_alias_ptr_type (first_stmt); > + ref_type = get_group_alias_ptr_type (first_stmt_info); > } > else > { > - first_stmt = stmt; > + first_stmt_info = stmt_info; > first_dr = dr; > group_size = vec_num = 1; > group_gap_adj = 0; > @@ -8120,7 +8121,7 @@ vectorizable_load (gimple *stmt, gimple_ > || alignment_support_scheme == dr_explicit_realign) > && !compute_in_loop) > { > - msq = vect_setup_realignment (first_stmt, gsi, &realignment_token, > + msq = vect_setup_realignment (first_stmt_info, gsi, &realignment_token, > alignment_support_scheme, NULL_TREE, > &at_loop); > if (alignment_support_scheme == dr_explicit_realign_optimized) > @@ -8184,7 +8185,7 @@ vectorizable_load (gimple *stmt, gimple_ > inv_p = false; > } > else if (first_stmt_info_for_drptr > - && first_stmt != first_stmt_info_for_drptr) > + && first_stmt_info != first_stmt_info_for_drptr) > { > dataref_ptr > = vect_create_data_ref_ptr (first_stmt_info_for_drptr, > @@ -8209,7 +8210,7 @@ vectorizable_load (gimple *stmt, gimple_ > } > else > dataref_ptr > - = vect_create_data_ref_ptr (first_stmt, aggr_type, at_loop, > + = vect_create_data_ref_ptr (first_stmt_info, aggr_type, at_loop, > offset, &dummy, gsi, &ptr_incr, > simd_lane_access_p, &inv_p, > byte_offset, bump); > @@ -8388,7 +8389,7 @@ vectorizable_load (gimple *stmt, gimple_ > tree vs = size_int (TYPE_VECTOR_SUBPARTS (vectype)); > > if (compute_in_loop) > - msq = vect_setup_realignment (first_stmt, gsi, > + msq = vect_setup_realignment (first_stmt_info, gsi, > &realignment_token, > dr_explicit_realign, > dataref_ptr, NULL); > @@ -9708,8 +9709,7 @@ vect_transform_stmt (gimple *stmt, gimpl > one are skipped, and there vec_stmt_info shouldn't be freed > meanwhile. */ > *grouped_store = true; > - stmt_vec_info group_info > - = vinfo_for_stmt (DR_GROUP_FIRST_ELEMENT (stmt_info)); > + stmt_vec_info group_info = DR_GROUP_FIRST_ELEMENT (stmt_info); > if (DR_GROUP_STORE_COUNT (group_info) == DR_GROUP_SIZE (group_info)) > is_store = true; > } > @@ -9817,14 +9817,13 @@ vect_transform_stmt (gimple *stmt, gimpl > vect_remove_stores (gimple *first_stmt) > { > gimple *next = first_stmt; > - gimple *tmp; > gimple_stmt_iterator next_si; > > while (next) > { > stmt_vec_info stmt_info = vinfo_for_stmt (next); > > - tmp = DR_GROUP_NEXT_ELEMENT (stmt_info); > + stmt_vec_info tmp = DR_GROUP_NEXT_ELEMENT (stmt_info); > if (is_pattern_stmt_p (stmt_info)) > next = STMT_VINFO_RELATED_STMT (stmt_info); > /* Free the attached stmt_vec_info and remove the stmt. */