On Fri, 29 May 2015, Richard Biener wrote: > > In addition to the required backport of PR66251 (testing right now) > we need to fix hybrid stmt detection for trunk and for a related > testcase also on the branch.
But it needed even more surgery, so the following is what I have applied after bootstrapping and testing on x86_64-unknown-linux-gnu. Richard. 2015-06-01 Richard Biener <rguent...@suse.de> PR tree-optimization/66280 * tree-vect-slp.c (vect_detect_hybrid_slp_stmts): Fix pattern def-use walking. * g++.dg/torture/pr66280.C: New testcase. * g++.dg/torture/pr66280-2.C: Likewise. Index: gcc/tree-vect-slp.c =================================================================== *** gcc/tree-vect-slp.c (revision 223860) --- gcc/tree-vect-slp.c (working copy) *************** vect_detect_hybrid_slp_stmts (slp_tree n *** 2031,2051 **** { /* Check if a pure SLP stmt has uses in non-SLP stmts. */ gcc_checking_assert (PURE_SLP_STMT (stmt_vinfo)); if (TREE_CODE (gimple_op (stmt, 0)) == SSA_NAME) FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, gimple_op (stmt, 0)) ! if (gimple_bb (use_stmt) ! && flow_bb_inside_loop_p (loop, gimple_bb (use_stmt)) ! && (use_vinfo = vinfo_for_stmt (use_stmt)) ! && !STMT_SLP_TYPE (use_vinfo) ! && (STMT_VINFO_RELEVANT (use_vinfo) ! || VECTORIZABLE_CYCLE_DEF (STMT_VINFO_DEF_TYPE (use_vinfo)) ! || (STMT_VINFO_IN_PATTERN_P (use_vinfo) ! && STMT_VINFO_RELATED_STMT (use_vinfo) ! && !STMT_SLP_TYPE (vinfo_for_stmt ! (STMT_VINFO_RELATED_STMT (use_vinfo))))) ! && !(gimple_code (use_stmt) == GIMPLE_PHI ! && STMT_VINFO_DEF_TYPE (use_vinfo) == vect_reduction_def)) ! stype = hybrid; } if (stype == hybrid) --- 2031,2057 ---- { /* Check if a pure SLP stmt has uses in non-SLP stmts. */ gcc_checking_assert (PURE_SLP_STMT (stmt_vinfo)); + /* We always get the pattern stmt here, but for immediate + uses we have to use the LHS of the original stmt. */ + gcc_checking_assert (!STMT_VINFO_IN_PATTERN_P (stmt_vinfo)); + if (STMT_VINFO_RELATED_STMT (stmt_vinfo)) + stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo); if (TREE_CODE (gimple_op (stmt, 0)) == SSA_NAME) FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, gimple_op (stmt, 0)) ! { ! if (!flow_bb_inside_loop_p (loop, gimple_bb (use_stmt))) ! continue; ! use_vinfo = vinfo_for_stmt (use_stmt); ! if (STMT_VINFO_IN_PATTERN_P (use_vinfo) ! && STMT_VINFO_RELATED_STMT (use_vinfo)) ! use_vinfo = vinfo_for_stmt (STMT_VINFO_RELATED_STMT (use_vinfo)); ! if (!STMT_SLP_TYPE (use_vinfo) ! && (STMT_VINFO_RELEVANT (use_vinfo) ! || VECTORIZABLE_CYCLE_DEF (STMT_VINFO_DEF_TYPE (use_vinfo))) ! && !(gimple_code (use_stmt) == GIMPLE_PHI ! && STMT_VINFO_DEF_TYPE (use_vinfo) == vect_reduction_def)) ! stype = hybrid; ! } } if (stype == hybrid) Index: gcc/testsuite/g++.dg/torture/pr66280.C =================================================================== *** gcc/testsuite/g++.dg/torture/pr66280.C (revision 0) --- gcc/testsuite/g++.dg/torture/pr66280.C (working copy) *************** *** 0 **** --- 1,25 ---- + // { dg-do compile } + + typedef struct + { + short re; + short im; + } cint16_T; + typedef struct + { + int re; + int im; + } cint32_T; + int a; + short b; + cint16_T *c; + cint32_T *d, *e; + void + fn1 () + { + for (; a; a++) + { + d[a].re = d[a].im = e[a].re = c[a].re * b; + e[a].im = c[a].im * b; + } + } Index: gcc/testsuite/g++.dg/torture/pr66280-2.C =================================================================== *** gcc/testsuite/g++.dg/torture/pr66280-2.C (revision 0) --- gcc/testsuite/g++.dg/torture/pr66280-2.C (working copy) *************** *** 0 **** --- 1,25 ---- + // { dg-do compile } + + typedef struct + { + short re; + short im; + } cint16_T; + typedef struct + { + int re; + int im; + } cint32_T; + int a; + short b; + cint16_T *c; + cint32_T *d, *e; + void + fn1 () + { + for (; a; a++) + { + d[a].re = d[a].im = e[a].im = c[a].im * b; + e[a].re = c[a].re * b; + } + }