> This is the rest of the implementation.
...with the patch this time...
--
Eric Botcazou
Index: ipa-polymorphic-call.c
===================================================================
--- ipa-polymorphic-call.c (.../trunk/gcc) (revision 228112)
+++ ipa-polymorphic-call.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -761,6 +761,7 @@ ipa_polymorphic_call_context::set_by_inv
HOST_WIDE_INT off)
{
HOST_WIDE_INT offset2, size, max_size;
+ bool reverse;
tree base;
invalid = false;
@@ -771,7 +772,7 @@ ipa_polymorphic_call_context::set_by_inv
return false;
cst = TREE_OPERAND (cst, 0);
- base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
+ base = get_ref_base_and_extent (cst, &offset2, &size, &max_size, &reverse);
if (!DECL_P (base) || max_size == -1 || max_size != size)
return false;
@@ -901,8 +902,10 @@ ipa_polymorphic_call_context::ipa_polymo
{
HOST_WIDE_INT size, max_size;
HOST_WIDE_INT offset2;
- tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
- &offset2, &size, &max_size);
+ bool reverse;
+ tree base
+ = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
+ &offset2, &size, &max_size, &reverse);
if (max_size != -1 && max_size == size)
combine_speculation_with (TYPE_MAIN_VARIANT (TREE_TYPE (base)),
@@ -1170,6 +1173,7 @@ extr_type_from_vtbl_ptr_store (gimple *s
{
HOST_WIDE_INT offset, size, max_size;
tree lhs, rhs, base;
+ bool reverse;
if (!gimple_assign_single_p (stmt))
return NULL_TREE;
@@ -1188,7 +1192,7 @@ extr_type_from_vtbl_ptr_store (gimple *s
;
else
{
- base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+ base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
if (DECL_P (tci->instance))
{
if (base != tci->instance)
@@ -1377,6 +1381,7 @@ check_stmt_for_type_change (ao_ref *ao A
tree op = walk_ssa_copies (gimple_call_arg (stmt, 0));
tree type = TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
HOST_WIDE_INT offset = 0, size, max_size;
+ bool reverse;
if (dump_file)
{
@@ -1387,8 +1392,8 @@ check_stmt_for_type_change (ao_ref *ao A
/* See if THIS parameter seems like instance pointer. */
if (TREE_CODE (op) == ADDR_EXPR)
{
- op = get_ref_base_and_extent (TREE_OPERAND (op, 0),
- &offset, &size, &max_size);
+ op = get_ref_base_and_extent (TREE_OPERAND (op, 0), &offset,
+ &size, &max_size, &reverse);
if (size != max_size || max_size == -1)
{
tci->speculative = true;
@@ -1531,6 +1536,7 @@ ipa_polymorphic_call_context::get_dynami
{
tree ref = gimple_call_fn (call);
HOST_WIDE_INT offset2, size, max_size;
+ bool reverse;
if (TREE_CODE (ref) == OBJ_TYPE_REF)
{
@@ -1560,8 +1566,9 @@ ipa_polymorphic_call_context::get_dynami
&& gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)))
{
tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref));
- tree base_ref = get_ref_base_and_extent
- (ref_exp, &offset2, &size, &max_size);
+ tree base_ref
+ = get_ref_base_and_extent (ref_exp, &offset2, &size,
+ &max_size, &reverse);
/* Finally verify that what we found looks like read from
OTR_OBJECT or from INSTANCE with offset OFFSET. */
Index: ipa-cp.c
===================================================================
--- ipa-cp.c (.../trunk/gcc) (revision 228112)
+++ ipa-cp.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -939,7 +939,7 @@ ipa_get_jf_ancestor_result (struct ipa_j
{
tree t = TREE_OPERAND (input, 0);
t = build_ref_for_offset (EXPR_LOCATION (t), t,
- ipa_get_jf_ancestor_offset (jfunc),
+ ipa_get_jf_ancestor_offset (jfunc), false,
ptr_type_node, NULL, false);
return build_fold_addr_expr (t);
}
Index: tree-scalar-evolution.c
===================================================================
--- tree-scalar-evolution.c (.../trunk/gcc) (revision 228112)
+++ tree-scalar-evolution.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -1728,15 +1728,16 @@ interpret_rhs_expr (struct loop *loop, g
{
machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
- int unsignedp;
+ int unsignedp, reversep;
int volatilep = 0;
tree base, offset;
tree chrec3;
tree unitpos;
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
- &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep,
+ false);
if (TREE_CODE (base) == MEM_REF)
{
Index: builtins.c
===================================================================
--- builtins.c (.../trunk/gcc) (revision 228112)
+++ builtins.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -286,14 +286,14 @@ get_object_alignment_2 (tree exp, unsign
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
unsigned int align = BITS_PER_UNIT;
bool known_alignment = false;
/* Get the innermost object and the constant (bitpos) and possibly
variable (offset) offset of the access. */
- exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, true);
/* Extract alignment information from the innermost object and
possibly adjust bitpos and offset. */
Index: tree-ssa-sccvn.c
===================================================================
--- tree-ssa-sccvn.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-sccvn.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -709,6 +709,9 @@ vn_reference_eq (const_vn_reference_t co
{
if (vro1->opcode == MEM_REF)
deref1 = true;
+ /* Do not look through a storage order barrier. */
+ else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
+ return false;
if (vro1->off == -1)
break;
off1 += vro1->off;
@@ -717,6 +720,9 @@ vn_reference_eq (const_vn_reference_t co
{
if (vro2->opcode == MEM_REF)
deref2 = true;
+ /* Do not look through a storage order barrier. */
+ else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
+ return false;
if (vro2->off == -1)
break;
off2 += vro2->off;
@@ -820,9 +826,10 @@ copy_reference_ops_from_ref (tree ref, v
temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
temp.clique = MR_DEPENDENCE_CLIQUE (ref);
temp.base = MR_DEPENDENCE_BASE (ref);
+ temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
break;
case BIT_FIELD_REF:
- /* Record bits and position. */
+ /* Record bits, position and storage order. */
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
@@ -831,6 +838,7 @@ copy_reference_ops_from_ref (tree ref, v
if (off % BITS_PER_UNIT == 0)
temp.off = off / BITS_PER_UNIT;
}
+ temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
break;
case COMPONENT_REF:
/* The field decl is enough to unambiguously specify the field,
@@ -927,8 +935,11 @@ copy_reference_ops_from_ref (tree ref, v
operand), so we don't have to put anything
for op* as it will be handled by the iteration */
case REALPART_EXPR:
+ temp.off = 0;
+ break;
case VIEW_CONVERT_EXPR:
temp.off = 0;
+ temp.reverse = storage_order_barrier_p (ref);
break;
case IMAGPART_EXPR:
/* This is only interesting for its constant offset. */
@@ -1437,6 +1448,21 @@ fully_constant_vn_reference_p (vn_refere
return NULL_TREE;
}
+/* Return true if OPS contain a storage order barrier. */
+
+static bool
+contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
+{
+ vn_reference_op_t op;
+ unsigned i;
+
+ FOR_EACH_VEC_ELT (ops, i, op)
+ if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
+ return true;
+
+ return false;
+}
+
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
@@ -1755,7 +1781,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree
tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
- base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
+ bool reverse;
+ base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
+ &reverse);
size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
if ((unsigned HOST_WIDE_INT)size2 / 8
== tree_to_uhwi (gimple_call_arg (def_stmt, 2))
@@ -1778,8 +1806,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree
{
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
+ bool reverse;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
- &offset2, &size2, &maxsize2);
+ &offset2, &size2, &maxsize2, &reverse);
if (maxsize2 != -1
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
@@ -1799,14 +1828,17 @@ vn_reference_lookup_3 (ao_ref *ref, tree
&& maxsize % BITS_PER_UNIT == 0
&& offset % BITS_PER_UNIT == 0
&& is_gimple_reg_type (vr->type)
+ && !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
{
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
+ bool reverse;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
- &offset2, &size2, &maxsize2);
- if (maxsize2 != -1
+ &offset2, &size2, &maxsize2, &reverse);
+ if (!reverse
+ && maxsize2 != -1
&& maxsize2 == size2
&& size2 % BITS_PER_UNIT == 0
&& offset2 % BITS_PER_UNIT == 0
@@ -1838,6 +1870,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
to access pieces from. */
else if (ref->size == maxsize
&& is_gimple_reg_type (vr->type)
+ && !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
{
@@ -1850,10 +1883,13 @@ vn_reference_lookup_3 (ao_ref *ref, tree
{
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2, off;
+ bool reverse;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
- &offset2, &size2, &maxsize2);
+ &offset2, &size2, &maxsize2,
+ &reverse);
off = offset - offset2;
- if (maxsize2 != -1
+ if (!reverse
+ && maxsize2 != -1
&& maxsize2 == size2
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
@@ -1902,7 +1938,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
{
tree base2;
HOST_WIDE_INT maxsize2;
- int i, j;
+ int i, j, k;
auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
@@ -1962,6 +1998,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree
if (j != -1)
return (void *)-1;
+ /* Punt if the additional ops contain a storage order barrier. */
+ for (k = i; k >= 0; k--)
+ {
+ vro = &vr->operands[k];
+ if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
+ return (void *)-1;
+ }
+
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
@@ -2036,7 +2080,6 @@ vn_reference_lookup_3 (ao_ref *ref, tree
vn_reference_op_s op;
HOST_WIDE_INT at;
-
/* Only handle non-variable, addressable refs. */
if (ref->size != maxsize
|| offset % BITS_PER_UNIT != 0
Index: tree-ssa-sccvn.h
===================================================================
--- tree-ssa-sccvn.h (.../trunk/gcc) (revision 228112)
+++ tree-ssa-sccvn.h (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -92,6 +92,7 @@ typedef struct vn_reference_op_struct
tree op0;
tree op1;
tree op2;
+ bool reverse;
} vn_reference_op_s;
typedef vn_reference_op_s *vn_reference_op_t;
typedef const vn_reference_op_s *const_vn_reference_op_t;
Index: dbxout.c
===================================================================
--- dbxout.c (.../trunk/gcc) (revision 228112)
+++ dbxout.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -2489,11 +2489,11 @@ dbxout_expand_expr (tree expr)
machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
tree offset, tem;
- int volatilep = 0, unsignedp = 0;
+ int unsignedp, reversep, volatilep = 0;
rtx x;
- tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ tem = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, true);
x = dbxout_expand_expr (tem);
if (x == NULL || !MEM_P (x))
Index: tree-ssa-loop-ivopts.c
===================================================================
--- tree-ssa-loop-ivopts.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-loop-ivopts.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -1997,10 +1997,27 @@ may_be_nonaddressable_p (tree expr)
target, thus they are always addressable. */
return false;
+ case MEM_REF:
+ /* Likewise for MEM_REFs, modulo the storage order. */
+ return REF_REVERSE_STORAGE_ORDER (expr);
+
+ case BIT_FIELD_REF:
+ if (REF_REVERSE_STORAGE_ORDER (expr))
+ return true;
+ return may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
+
case COMPONENT_REF:
+ if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (expr, 0))))
+ return true;
return DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1))
|| may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (expr, 0))))
+ return true;
+ return may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
+
case VIEW_CONVERT_EXPR:
/* This kind of view-conversions may wrap non-addressable objects
and make them look addressable. After some processing the
@@ -2009,11 +2026,6 @@ may_be_nonaddressable_p (tree expr)
if (is_gimple_reg (TREE_OPERAND (expr, 0))
|| !is_gimple_addressable (TREE_OPERAND (expr, 0)))
return true;
-
- /* ... fall through ... */
-
- case ARRAY_REF:
- case ARRAY_RANGE_REF:
return may_be_nonaddressable_p (TREE_OPERAND (expr, 0));
CASE_CONVERT:
@@ -4320,13 +4332,14 @@ split_address_cost (struct ivopts_data *
HOST_WIDE_INT bitpos;
tree toffset;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
if (toffset != 0
|| bitpos % BITS_PER_UNIT != 0
+ || reversep
|| TREE_CODE (core) != VAR_DECL)
{
*symbol_present = false;
Index: tree-ssa-math-opts.c
===================================================================
--- tree-ssa-math-opts.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-math-opts.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -2030,7 +2030,7 @@ find_bswap_or_nop_load (gimple *stmt, tr
offset from base to compare to other such leaf node. */
HOST_WIDE_INT bitsize, bitpos;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
tree offset, base_addr;
/* Not prepared to handle PDP endian. */
@@ -2041,7 +2041,7 @@ find_bswap_or_nop_load (gimple *stmt, tr
return false;
base_addr = get_inner_reference (ref, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (base_addr) == MEM_REF)
{
@@ -2080,6 +2080,8 @@ find_bswap_or_nop_load (gimple *stmt, tr
return false;
if (bitsize % BITS_PER_UNIT)
return false;
+ if (reversep)
+ return false;
if (!init_symbolic_number (n, ref))
return false;
@@ -2528,11 +2530,11 @@ bswap_replace (gimple *cur_stmt, gimple
{
HOST_WIDE_INT bitsize, bitpos;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
tree offset;
get_inner_reference (src, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
if (n->range < (unsigned HOST_WIDE_INT) bitsize)
{
load_offset = (bitsize - n->range) / BITS_PER_UNIT;
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-alias.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -557,10 +557,12 @@ ao_ref_init (ao_ref *r, tree ref)
tree
ao_ref_base (ao_ref *ref)
{
+ bool reverse;
+
if (ref->base)
return ref->base;
ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
- &ref->max_size);
+ &ref->max_size, &reverse);
return ref->base;
}
@@ -741,9 +743,10 @@ aliasing_component_refs_p (tree ref1,
else if (same_p == 1)
{
HOST_WIDE_INT offadj, sztmp, msztmp;
- get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
+ bool reverse;
+ get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset2 -= offadj;
- get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
+ get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
offset1 -= offadj;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
@@ -759,9 +762,10 @@ aliasing_component_refs_p (tree ref1,
else if (same_p == 1)
{
HOST_WIDE_INT offadj, sztmp, msztmp;
- get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
+ bool reverse;
+ get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset1 -= offadj;
- get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
+ get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
offset2 -= offadj;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
@@ -2302,7 +2306,9 @@ stmt_kills_ref_p (gimple *stmt, ao_ref *
if (ref->max_size == -1)
return false;
HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
- tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+ bool reverse;
+ tree base
+ = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == ref->base does not always hold. */
if (base != ref->base)
Index: ifcvt.c
===================================================================
--- ifcvt.c (.../trunk/gcc) (revision 228112)
+++ ifcvt.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -953,7 +953,7 @@ noce_emit_move_insn (rtx x, rtx y)
}
gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
- store_bit_field (op, size, start, 0, 0, GET_MODE (x), y);
+ store_bit_field (op, size, start, 0, 0, GET_MODE (x), y, false);
return;
}
@@ -1008,7 +1008,7 @@ noce_emit_move_insn (rtx x, rtx y)
outmode = GET_MODE (outer);
bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos,
- 0, 0, outmode, y);
+ 0, 0, outmode, y, false);
}
/* Return the CC reg if it is used in COND. */
Index: dwarf2out.c
===================================================================
--- dwarf2out.c (.../trunk/gcc) (revision 228112)
+++ dwarf2out.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -5267,9 +5267,10 @@ add_var_loc_to_decl (tree decl, rtx loc_
&& TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
{
HOST_WIDE_INT maxsize;
- tree innerdecl;
- innerdecl
- = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize);
+ bool reverse;
+ tree innerdecl
+ = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, &maxsize,
+ &reverse);
if (!DECL_P (innerdecl)
|| DECL_IGNORED_P (innerdecl)
|| TREE_STATIC (innerdecl)
@@ -14443,12 +14444,12 @@ loc_list_for_address_of_addr_expr_of_ind
tree obj, offset;
HOST_WIDE_INT bitsize, bitpos, bytepos;
machine_mode mode;
- int unsignedp, volatilep = 0;
+ int unsignedp, reversep, volatilep = 0;
dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
obj = get_inner_reference (TREE_OPERAND (loc, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
STRIP_NOPS (obj);
if (bitpos % BITS_PER_UNIT)
{
@@ -14777,10 +14778,10 @@ loc_list_from_tree (tree loc, int want_a
tree obj, offset;
HOST_WIDE_INT bitsize, bitpos, bytepos;
machine_mode mode;
- int unsignedp, volatilep = 0;
+ int unsignedp, reversep, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
gcc_assert (obj != loc);
@@ -16080,7 +16081,7 @@ fortran_common (tree decl, HOST_WIDE_INT
machine_mode mode;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- int unsignedp, volatilep = 0;
+ int unsignedp, reversep, volatilep = 0;
/* If the decl isn't a VAR_DECL, or if it isn't static, or if
it does not have a value (the offset into the common area), or if it
@@ -16096,8 +16097,8 @@ fortran_common (tree decl, HOST_WIDE_INT
if (TREE_CODE (val_expr) != COMPONENT_REF)
return NULL_TREE;
- cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, true);
if (cvar == NULL_TREE
|| TREE_CODE (cvar) != VAR_DECL
Index: tsan.c
===================================================================
--- tsan.c (.../trunk/gcc) (revision 228112)
+++ tsan.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -126,9 +126,9 @@ instrument_expr (gimple_stmt_iterator gs
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
- base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ int unsignedp, reversep, volatilep = 0;
+ base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, false);
/* No need to instrument accesses to decls that don't escape,
they can't escape to other threads then. */
Index: asan.c
===================================================================
--- asan.c (.../trunk/gcc) (revision 228112)
+++ asan.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -1783,9 +1783,9 @@ instrument_derefs (gimple_stmt_iterator
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
- tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ int unsignedp, reversep, volatilep = 0;
+ tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (t) == COMPONENT_REF
&& DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
Index: gimple-ssa-strength-reduction.c
===================================================================
--- gimple-ssa-strength-reduction.c (.../trunk/gcc) (revision 228112)
+++ gimple-ssa-strength-reduction.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -985,7 +985,7 @@ slsr_process_ref (gimple *gs)
tree ref_expr, base, offset, type;
HOST_WIDE_INT bitsize, bitpos;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
slsr_cand_t c;
if (gimple_vdef (gs))
@@ -1000,7 +1000,9 @@ slsr_process_ref (gimple *gs)
return;
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
+ if (reversep)
+ return;
widest_int index = bitpos;
if (!restructure_reference (&base, &offset, &index, &type))
Index: tree-data-ref.c
===================================================================
--- tree-data-ref.c (.../trunk/gcc) (revision 228112)
+++ tree-data-ref.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -625,11 +625,12 @@ split_constant_offset_1 (tree type, tree
tree base, poffset;
HOST_WIDE_INT pbitsize, pbitpos;
machine_mode pmode;
- int punsignedp, pvolatilep;
+ int punsignedp, preversep, pvolatilep;
op0 = TREE_OPERAND (op0, 0);
- base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep, false);
+ base
+ = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset, &pmode,
+ &punsignedp, &preversep, &pvolatilep, false);
if (pbitpos % BITS_PER_UNIT != 0)
return false;
@@ -773,7 +774,7 @@ dr_analyze_innermost (struct data_refere
HOST_WIDE_INT pbitsize, pbitpos;
tree base, poffset;
machine_mode pmode;
- int punsignedp, pvolatilep;
+ int punsignedp, preversep, pvolatilep;
affine_iv base_iv, offset_iv;
tree init, dinit, step;
bool in_loop = (loop && loop->num);
@@ -781,8 +782,8 @@ dr_analyze_innermost (struct data_refere
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "analyze_innermost: ");
- base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep, false);
+ base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset, &pmode,
+ &punsignedp, &preversep, &pvolatilep, false);
gcc_assert (base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
@@ -792,6 +793,13 @@ dr_analyze_innermost (struct data_refere
return false;
}
+ if (preversep)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "failed: reverse storage order.\n");
+ return false;
+ }
+
if (TREE_CODE (base) == MEM_REF)
{
if (!integer_zerop (TREE_OPERAND (base, 1)))
Index: tree-affine.c
===================================================================
--- tree-affine.c (.../trunk/gcc) (revision 228112)
+++ tree-affine.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -274,7 +274,7 @@ tree_to_aff_combination (tree expr, tree
tree cst, core, toffset;
HOST_WIDE_INT bitpos, bitsize;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
STRIP_NOPS (expr);
@@ -331,8 +331,8 @@ tree_to_aff_combination (tree expr, tree
return;
}
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &volatilep,
- false);
+ &toffset, &mode, &unsignedp, &reversep,
+ &volatilep, false);
if (bitpos % BITS_PER_UNIT != 0)
break;
aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
@@ -899,10 +899,10 @@ get_inner_reference_aff (tree ref, aff_t
HOST_WIDE_INT bitsize, bitpos;
tree toff;
machine_mode mode;
- int uns, vol;
+ int uns, rev, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
- &uns, &vol, false);
+ &uns, &rev, &vol, false);
tree base_addr = build_fold_addr_expr (base);
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
Index: tree-vect-data-refs.c
===================================================================
--- tree-vect-data-refs.c (.../trunk/gcc) (revision 228112)
+++ tree-vect-data-refs.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -2999,7 +2999,7 @@ vect_check_gather_scatter (gimple *stmt,
tree offtype = NULL_TREE;
tree decl, base, off;
machine_mode pmode;
- int punsignedp, pvolatilep;
+ int punsignedp, reversep, pvolatilep = 0;
base = DR_REF (dr);
/* For masked loads/stores, DR_REF (dr) is an artificial MEM_REF,
@@ -3031,9 +3031,9 @@ vect_check_gather_scatter (gimple *stmt,
vectorized. The following code attempts to find such a preexistng
SSA_NAME OFF and put the loop invariants into a tree BASE
that can be gimplified before the loop. */
- base = get_inner_reference (base, &pbitsize, &pbitpos, &off,
- &pmode, &punsignedp, &pvolatilep, false);
- gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0);
+ base = get_inner_reference (base, &pbitsize, &pbitpos, &off, &pmode,
+ &punsignedp, &reversep, &pvolatilep, false);
+ gcc_assert (base && (pbitpos % BITS_PER_UNIT) == 0 && !reversep);
if (TREE_CODE (base) == MEM_REF)
{
@@ -3584,7 +3584,7 @@ again:
HOST_WIDE_INT pbitsize, pbitpos;
tree poffset;
machine_mode pmode;
- int punsignedp, pvolatilep;
+ int punsignedp, preversep, pvolatilep;
affine_iv base_iv, offset_iv;
tree dinit;
@@ -3603,7 +3603,8 @@ again:
}
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
- &poffset, &pmode, &punsignedp, &pvolatilep, false);
+ &poffset, &pmode, &punsignedp,
+ &preversep, &pvolatilep, false);
gcc_assert (outer_base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
@@ -3614,6 +3615,14 @@ again:
return false;
}
+ if (preversep)
+ {
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
+ "failed: reverse storage order.\n");
+ return false;
+ }
+
outer_base = build_fold_addr_expr (outer_base);
if (!simple_iv (loop, loop_containing_stmt (stmt), outer_base,
&base_iv, false))
Index: gimple-fold.c
===================================================================
--- gimple-fold.c (.../trunk/gcc) (revision 228112)
+++ gimple-fold.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -792,13 +792,14 @@ gimple_fold_builtin_memory_op (gimple_st
HOST_WIDE_INT src_offset = 0, dest_offset = 0;
HOST_WIDE_INT size = -1;
HOST_WIDE_INT maxsize = -1;
+ bool reverse;
srcvar = TREE_OPERAND (src, 0);
src_base = get_ref_base_and_extent (srcvar, &src_offset,
- &size, &maxsize);
+ &size, &maxsize, &reverse);
destvar = TREE_OPERAND (dest, 0);
dest_base = get_ref_base_and_extent (destvar, &dest_offset,
- &size, &maxsize);
+ &size, &maxsize, &reverse);
if (tree_fits_uhwi_p (len))
maxsize = tree_to_uhwi (len);
else
@@ -5184,6 +5185,8 @@ get_base_constructor (tree base, HOST_WI
tree (*valueize)(tree))
{
HOST_WIDE_INT bit_offset2, size, max_size;
+ bool reverse;
+
if (TREE_CODE (base) == MEM_REF)
{
if (!integer_zerop (TREE_OPERAND (base, 1)))
@@ -5224,7 +5227,8 @@ get_base_constructor (tree base, HOST_WI
case ARRAY_REF:
case COMPONENT_REF:
- base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
+ base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
+ &reverse);
if (max_size == -1 || size != max_size)
return NULL_TREE;
*bit_offset += bit_offset2;
@@ -5468,6 +5472,7 @@ fold_const_aggregate_ref_1 (tree t, tree
tree ctor, idx, base;
HOST_WIDE_INT offset, size, max_size;
tree tem;
+ bool reverse;
if (TREE_THIS_VOLATILE (t))
return NULL_TREE;
@@ -5538,7 +5543,7 @@ fold_const_aggregate_ref_1 (tree t, tree
case BIT_FIELD_REF:
case TARGET_MEM_REF:
case MEM_REF:
- base = get_ref_base_and_extent (t, &offset, &size, &max_size);
+ base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
ctor = get_base_constructor (base, &offset, valueize);
/* Empty constructor. Always fold to 0. */
Index: cfgexpand.c
===================================================================
--- cfgexpand.c (.../trunk/gcc) (revision 228112)
+++ cfgexpand.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -4398,9 +4398,10 @@ expand_debug_expr (tree exp)
machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- int volatilep = 0;
- tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep, false);
+ int reversep, volatilep = 0;
+ tree tem
+ = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
+ &unsignedp, &reversep, &volatilep, false);
rtx orig_op0;
if (bitsize == 0)
@@ -4823,9 +4824,10 @@ expand_debug_expr (tree exp)
if (handled_component_p (TREE_OPERAND (exp, 0)))
{
HOST_WIDE_INT bitoffset, bitsize, maxsize;
+ bool reverse;
tree decl
- = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
- &bitoffset, &bitsize, &maxsize);
+ = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
+ &bitsize, &maxsize, &reverse);
if ((TREE_CODE (decl) == VAR_DECL
|| TREE_CODE (decl) == PARM_DECL
|| TREE_CODE (decl) == RESULT_DECL)
Index: simplify-rtx.c
===================================================================
--- simplify-rtx.c (.../trunk/gcc) (revision 228112)
+++ simplify-rtx.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -317,10 +317,11 @@ delegitimize_mem_from_attrs (rtx x)
{
HOST_WIDE_INT bitsize, bitpos;
tree toffset;
- int unsignedp, volatilep = 0;
+ int unsignedp, reversep, volatilep = 0;
- decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
- &mode, &unsignedp, &volatilep, false);
+ decl
+ = get_inner_reference (decl, &bitsize, &bitpos, &toffset, &mode,
+ &unsignedp, &reversep, &volatilep, false);
if (bitsize != GET_MODE_BITSIZE (mode)
|| (bitpos % BITS_PER_UNIT)
|| (toffset && !tree_fits_shwi_p (toffset)))
Index: tree-ssa-pre.c
===================================================================
--- tree-ssa-pre.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-pre.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -2534,6 +2534,7 @@ create_component_ref_by_pieces_1 (basic_
genop = build2 (MEM_REF, currop->type, baseop, offset);
MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
MR_DEPENDENCE_BASE (genop) = currop->base;
+ REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
return genop;
}
@@ -2603,7 +2604,9 @@ create_component_ref_by_pieces_1 (basic_
return NULL_TREE;
tree op1 = currop->op0;
tree op2 = currop->op1;
- return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
+ tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
+ REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
+ return fold (t);
}
/* For array ref vn_reference_op's, operand 1 of the array ref
Index: tree-sra.c
===================================================================
--- tree-sra.c (.../trunk/gcc) (revision 228112)
+++ tree-sra.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -190,12 +190,15 @@ struct access
when grp_to_be_replaced flag is set. */
tree replacement_decl;
- /* Is this particular access write access? */
- unsigned write : 1;
-
/* Is this access an access to a non-addressable field? */
unsigned non_addressable : 1;
+ /* Is this access made in reverse storage order? */
+ unsigned reverse : 1;
+
+ /* Is this particular access write access? */
+ unsigned write : 1;
+
/* Is this access currently in the work queue? */
unsigned grp_queued : 1;
@@ -434,6 +437,8 @@ dump_access (FILE *f, struct access *acc
print_generic_expr (f, access->expr, 0);
fprintf (f, ", type = ");
print_generic_expr (f, access->type, 0);
+ fprintf (f, ", non_addressable = %d, reverse = %d",
+ access->non_addressable, access->reverse);
if (grp)
fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
"grp_assignment_write = %d, grp_scalar_read = %d, "
@@ -850,9 +855,9 @@ create_access (tree expr, gimple *stmt,
struct access *access;
HOST_WIDE_INT offset, size, max_size;
tree base = expr;
- bool ptr, unscalarizable_region = false;
+ bool reverse, ptr, unscalarizable_region = false;
- base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
+ base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
if (sra_mode == SRA_MODE_EARLY_IPA
&& TREE_CODE (base) == MEM_REF)
@@ -906,6 +911,7 @@ create_access (tree expr, gimple *stmt,
access->write = write;
access->grp_unscalarizable_region = unscalarizable_region;
access->stmt = stmt;
+ access->reverse = reverse;
if (TREE_CODE (expr) == COMPONENT_REF
&& DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
@@ -969,7 +975,7 @@ scalarizable_type_p (tree type)
}
}
-static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, tree, tree);
+static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
/* Create total_scalarization accesses for all scalar fields of a member
of type DECL_TYPE conforming to scalarizable_type_p. BASE
@@ -990,8 +996,9 @@ completely_scalarize (tree base, tree de
tree ft = TREE_TYPE (fld);
tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
- scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)), nref,
- ft);
+ scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
+ TYPE_REVERSE_STORAGE_ORDER (decl_type),
+ nref, ft);
}
break;
case ARRAY_TYPE:
@@ -1017,7 +1024,9 @@ completely_scalarize (tree base, tree de
tree nref = build4 (ARRAY_REF, elemtype, ref, size_int (idx),
NULL_TREE, NULL_TREE);
int el_off = offset + idx * el_size;
- scalarize_elem (base, el_off, el_size, nref, elemtype);
+ scalarize_elem (base, el_off, el_size,
+ TYPE_REVERSE_STORAGE_ORDER (decl_type),
+ nref, elemtype);
}
while (++idx <= lenp1);
}
@@ -1031,11 +1040,12 @@ completely_scalarize (tree base, tree de
/* Create total_scalarization accesses for a member of type TYPE, which must
satisfy either is_gimple_reg_type or scalarizable_type_p. BASE must be the
top-most VAR_DECL representing the variable; within that, POS and SIZE locate
- the member and REF must be the reference expression for it. */
+ the member, REVERSE gives its torage order. and REF must be the reference
+ expression for it. */
static void
-scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size,
- tree ref, tree type)
+scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
+ tree ref, tree type)
{
if (is_gimple_reg_type (type))
{
@@ -1043,6 +1053,7 @@ scalarize_elem (tree base, HOST_WIDE_INT
access->expr = ref;
access->type = type;
access->grp_total_scalarization = 1;
+ access->reverse = reverse;
/* Accesses for intraprocedural SRA can have their stmt NULL. */
}
else
@@ -1118,7 +1129,7 @@ build_access_from_expr_1 (tree expr, gim
and not the result type. Ada produces such statements. We are also
capable of handling the topmost V_C_E but not any of those buried in other
handled components. */
- if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
+ if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
expr = TREE_OPERAND (expr, 0);
if (contains_view_convert_expr_p (expr))
@@ -1251,7 +1262,11 @@ build_accesses_from_assign (gimple *stmt
lacc = build_access_from_expr_1 (lhs, stmt, true);
if (lacc)
- lacc->grp_assignment_write = 1;
+ {
+ lacc->grp_assignment_write = 1;
+ if (storage_order_barrier_p (rhs))
+ lacc->grp_unscalarizable_region = 1;
+ }
if (racc)
{
@@ -1259,6 +1274,8 @@ build_accesses_from_assign (gimple *stmt
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
+ if (storage_order_barrier_p (lhs))
+ racc->grp_unscalarizable_region = 1;
}
if (lacc && racc
@@ -1566,17 +1583,15 @@ make_fancy_name (tree expr)
}
/* Construct a MEM_REF that would reference a part of aggregate BASE of type
- EXP_TYPE at the given OFFSET. If BASE is something for which
- get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
- to insert new statements either before or below the current one as specified
- by INSERT_AFTER. This function is not capable of handling bitfields.
-
- BASE must be either a declaration or a memory reference that has correct
- alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
+ EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
+ something for which get_addr_base_and_unit_offset returns NULL, gsi must
+ be non-NULL and is used to insert new statements either before or below
+ the current one as specified by INSERT_AFTER. This function is not capable
+ of handling bitfields. */
tree
build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
- tree exp_type, gimple_stmt_iterator *gsi,
+ bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
bool insert_after)
{
tree prev_base = base;
@@ -1633,6 +1648,7 @@ build_ref_for_offset (location_t loc, tr
exp_type = build_aligned_type (exp_type, align);
mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
+ REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
if (TREE_THIS_VOLATILE (prev_base))
TREE_THIS_VOLATILE (mem_ref) = 1;
if (TREE_SIDE_EFFECTS (prev_base))
@@ -1659,13 +1675,17 @@ build_ref_for_model (location_t loc, tre
offset -= int_bit_position (fld);
exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
- t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
+ t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
+ gsi, insert_after);
+ /* The flag will be set on the record type. */
+ REF_REVERSE_STORAGE_ORDER (t) = 0;
return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
NULL_TREE);
}
else
- return build_ref_for_offset (loc, base, offset, model->type,
- gsi, insert_after);
+ return
+ build_ref_for_offset (loc, base, offset, model->reverse, model->type,
+ gsi, insert_after);
}
/* Attempt to build a memory reference that we could but into a gimple
@@ -2322,8 +2342,8 @@ analyze_access_subtree (struct access *r
&& (root->size % BITS_PER_UNIT) == 0);
root->type = build_nonstandard_integer_type (root->size,
TYPE_UNSIGNED (rt));
- root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
- root->base, root->offset,
+ root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
+ root->offset, root->reverse,
root->type, NULL, false);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2447,6 +2467,7 @@ create_artificial_child_access (struct a
access->type = model->type;
access->grp_write = true;
access->grp_read = false;
+ access->reverse = model->reverse;
child = &parent->first_child;
while (*child && (*child)->offset < new_offset)
@@ -2831,6 +2852,7 @@ get_access_for_expr (tree expr)
{
HOST_WIDE_INT offset, size, max_size;
tree base;
+ bool reverse;
/* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
a different size than the size of its argument and we need the latter
@@ -2838,7 +2860,7 @@ get_access_for_expr (tree expr)
if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
expr = TREE_OPERAND (expr, 0);
- base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
+ base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
if (max_size == -1 || !DECL_P (base))
return NULL;
@@ -4471,6 +4493,7 @@ turn_representatives_into_adjustments (v
adj.type = repr->type;
adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
adj.offset = repr->offset;
+ adj.reverse = repr->reverse;
adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
&& (repr->grp_maybe_modified
|| repr->grp_not_necessarilly_dereferenced));
@@ -5097,9 +5120,9 @@ ipa_sra_check_caller (struct cgraph_node
tree offset;
HOST_WIDE_INT bitsize, bitpos;
machine_mode mode;
- int unsignedp, volatilep = 0;
+ int unsignedp, reversep, volatilep = 0;
get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
if (bitpos % BITS_PER_UNIT)
{
iscc->bad_arg_alignment = true;
Index: ubsan.c
===================================================================
--- ubsan.c (.../trunk/gcc) (revision 228112)
+++ ubsan.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -1378,9 +1378,9 @@ instrument_bool_enum_load (gimple_stmt_i
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
+ int volatilep = 0, reversep, unsignedp = 0;
tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
tree utype = build_nonstandard_integer_type (modebitsize, 1);
if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base))
@@ -1763,9 +1763,9 @@ instrument_object_size (gimple_stmt_iter
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
+ int volatilep = 0, reversep, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep, false);
if (bitpos % BITS_PER_UNIT != 0
|| bitsize != size_in_bytes * BITS_PER_UNIT)
Index: ipa-prop.c
===================================================================
--- ipa-prop.c (.../trunk/gcc) (revision 228112)
+++ ipa-prop.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -967,7 +967,9 @@ ipa_load_from_parm_agg (struct ipa_func_
{
int index;
HOST_WIDE_INT size, max_size;
- tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
+ bool reverse;
+ tree base
+ = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
if (max_size == -1 || max_size != size || *offset_p < 0)
return false;
@@ -1092,6 +1094,7 @@ compute_complex_assign_jump_func (struct
{
HOST_WIDE_INT offset, size, max_size;
tree op1, tc_ssa, base, ssa;
+ bool reverse;
int index;
op1 = gimple_assign_rhs1 (stmt);
@@ -1139,7 +1142,7 @@ compute_complex_assign_jump_func (struct
op1 = TREE_OPERAND (op1, 0);
if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
return;
- base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
+ base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
if (TREE_CODE (base) != MEM_REF
/* If this is a varying address, punt. */
|| max_size == -1
@@ -1175,6 +1178,7 @@ get_ancestor_addr_info (gimple *assign,
{
HOST_WIDE_INT size, max_size;
tree expr, parm, obj;
+ bool reverse;
if (!gimple_assign_single_p (assign))
return NULL_TREE;
@@ -1184,7 +1188,7 @@ get_ancestor_addr_info (gimple *assign,
return NULL_TREE;
expr = TREE_OPERAND (expr, 0);
obj = expr;
- expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
+ expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
if (TREE_CODE (expr) != MEM_REF
/* If this is a varying address, punt. */
@@ -1450,10 +1454,11 @@ determine_locally_known_aggregate_parts
else if (TREE_CODE (arg) == ADDR_EXPR)
{
HOST_WIDE_INT arg_max_size;
+ bool reverse;
arg = TREE_OPERAND (arg, 0);
arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
- &arg_max_size);
+ &arg_max_size, &reverse);
if (arg_max_size == -1
|| arg_max_size != arg_size
|| arg_offset < 0)
@@ -1472,13 +1477,14 @@ determine_locally_known_aggregate_parts
else
{
HOST_WIDE_INT arg_max_size;
+ bool reverse;
gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
by_ref = false;
check_ref = false;
arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
- &arg_max_size);
+ &arg_max_size, &reverse);
if (arg_max_size == -1
|| arg_max_size != arg_size
|| arg_offset < 0)
@@ -1499,6 +1505,7 @@ determine_locally_known_aggregate_parts
gimple *stmt = gsi_stmt (gsi);
HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
tree lhs, rhs, lhs_base;
+ bool reverse;
if (!stmt_may_clobber_ref_p_1 (stmt, &r))
continue;
@@ -1513,7 +1520,7 @@ determine_locally_known_aggregate_parts
break;
lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
- &lhs_max_size);
+ &lhs_max_size, &reverse);
if (lhs_max_size == -1
|| lhs_max_size != lhs_size)
break;
@@ -3982,6 +3989,7 @@ ipa_modify_call_arguments (struct cgraph
base = force_gimple_operand_gsi (&gsi, base,
true, NULL, true, GSI_SAME_STMT);
expr = fold_build2_loc (loc, MEM_REF, type, base, off);
+ REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
/* If expr is not a valid gimple call argument emit
a load into a temporary. */
if (is_gimple_reg_type (TREE_TYPE (expr)))
@@ -4001,6 +4009,7 @@ ipa_modify_call_arguments (struct cgraph
else
{
expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
+ REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
expr = build_fold_addr_expr (expr);
expr = force_gimple_operand_gsi (&gsi, expr,
true, NULL, true, GSI_SAME_STMT);
@@ -4105,7 +4114,10 @@ ipa_modify_expr (tree *expr, bool conver
tree src;
if (cand->by_ref)
- src = build_simple_mem_ref (cand->new_decl);
+ {
+ src = build_simple_mem_ref (cand->new_decl);
+ REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
+ }
else
src = cand->new_decl;
@@ -4172,7 +4184,9 @@ ipa_get_adjustment_candidate (tree **exp
}
HOST_WIDE_INT offset, size, max_size;
- tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
+ bool reverse;
+ tree base
+ = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
if (!base || size == -1 || max_size == -1)
return NULL;
Index: ipa-prop.h
===================================================================
--- ipa-prop.h (.../trunk/gcc) (revision 228112)
+++ ipa-prop.h (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -735,6 +735,10 @@ struct ipa_parm_adjustment
or one about to be removed. */
enum ipa_parm_op op;
+ /* Storage order of the original parameter (for the cases when the new
+ parameter is a component of an original one). */
+ unsigned reverse : 1;
+
/* The parameter is to be passed by reference. */
unsigned by_ref : 1;
};
@@ -772,7 +776,7 @@ ipa_parm_adjustment *ipa_get_adjustment_
/* From tree-sra.c: */
-tree build_ref_for_offset (location_t, tree, HOST_WIDE_INT, tree,
+tree build_ref_for_offset (location_t, tree, HOST_WIDE_INT, bool, tree,
gimple_stmt_iterator *, bool);
/* In ipa-cp.c */
Index: tree-ssa-dce.c
===================================================================
--- tree-ssa-dce.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-dce.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -485,8 +485,10 @@ mark_aliased_reaching_defs_necessary_1 (
{
tree base, lhs = gimple_get_lhs (def_stmt);
HOST_WIDE_INT size, offset, max_size;
+ bool reverse;
ao_ref_base (ref);
- base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+ base
+ = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == refd->base does not always hold. */
if (base == ref->base)
Index: var-tracking.c
===================================================================
--- var-tracking.c (.../trunk/gcc) (revision 228112)
+++ var-tracking.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -5105,9 +5105,10 @@ track_expr_p (tree expr, bool need_rtl)
&& TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
{
HOST_WIDE_INT bitsize, bitpos, maxsize;
+ bool reverse;
tree innerdecl
= get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
- &maxsize);
+ &maxsize, &reverse);
if (!DECL_P (innerdecl)
|| DECL_IGNORED_P (innerdecl)
/* Do not track declarations for parts of tracked parameters
Index: tree-ssa-structalias.c
===================================================================
--- tree-ssa-structalias.c (.../trunk/gcc) (revision 228112)
+++ tree-ssa-structalias.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -3170,6 +3170,7 @@ get_constraint_for_component_ref (tree t
HOST_WIDE_INT bitsize = -1;
HOST_WIDE_INT bitmaxsize = -1;
HOST_WIDE_INT bitpos;
+ bool reverse;
tree forzero;
/* Some people like to do cute things like take the address of
@@ -3191,7 +3192,7 @@ get_constraint_for_component_ref (tree t
return;
}
- t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
+ t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
@@ -3617,9 +3618,12 @@ do_structure_copy (tree lhsop, tree rhso
{
HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
+ bool reverse;
unsigned k = 0;
- get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
- get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
+ get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
+ &reverse);
+ get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
+ &reverse);
for (j = 0; lhsc.iterate (j, &lhsp);)
{
varinfo_t lhsv, rhsv;
ndex: gimple-laddress.c
===================================================================
--- gimple-laddress.c (.../trunk/gcc) (revision 228112)
+++ gimple-laddress.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -102,10 +102,10 @@ pass_laddress::execute (function *fun)
HOST_WIDE_INT bitsize, bitpos;
tree base, offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
+ int volatilep = 0, reversep, unsignedp = 0;
base = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize,
&bitpos, &offset, &mode, &unsignedp,
- &volatilep, false);
+ &reversep, &volatilep, false);
gcc_assert (base != NULL_TREE && (bitpos % BITS_PER_UNIT) == 0);
if (offset != NULL_TREE)
{
Index: config/s390/s390.c
===================================================================
--- config/s390/s390.c (.../trunk/gcc) (revision 228112)
+++ config/s390/s390.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -8728,7 +8728,7 @@ s390_output_pool_entry (rtx exp, machine
gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
- assemble_real (r, mode, align);
+ assemble_real (r, mode, align, false);
break;
case MODE_INT:
Index: config/sh/sh.md
===================================================================
--- config/sh/sh.md (.../trunk/gcc) (revision 228112)
+++ config/sh/sh.md (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -12392,7 +12392,7 @@ (define_insn "consttable_sf"
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, operands[0]);
- assemble_real (d, SFmode, GET_MODE_ALIGNMENT (SFmode));
+ assemble_real (d, SFmode, GET_MODE_ALIGNMENT (SFmode), false);
}
return "";
}
@@ -12410,7 +12410,7 @@ (define_insn "consttable_df"
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, operands[0]);
- assemble_real (d, DFmode, GET_MODE_ALIGNMENT (DFmode));
+ assemble_real (d, DFmode, GET_MODE_ALIGNMENT (DFmode), false);
}
return "";
}
Index: config/arm/arm.c
===================================================================
--- config/arm/arm.c (.../trunk/gcc) (revision 228112)
+++ config/arm/arm.c (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -22644,7 +22644,7 @@ arm_assemble_integer (rtx x, unsigned in
assemble_real
(rval, GET_MODE_INNER (mode),
- i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT);
+ i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, false);
}
return true;
Index: config/arm/arm.md
===================================================================
--- config/arm/arm.md (.../trunk/gcc) (revision 228112)
+++ config/arm/arm.md (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -10857,7 +10857,7 @@ (define_insn "consttable_4"
{
REAL_VALUE_TYPE r;
REAL_VALUE_FROM_CONST_DOUBLE (r, x);
- assemble_real (r, GET_MODE (x), BITS_PER_WORD);
+ assemble_real (r, GET_MODE (x), BITS_PER_WORD, false);
break;
}
default:
@@ -10890,7 +10890,7 @@ (define_insn "consttable_8"
{
REAL_VALUE_TYPE r;
REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
- assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
+ assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD, false);
break;
}
default:
@@ -10915,7 +10915,7 @@ (define_insn "consttable_16"
{
REAL_VALUE_TYPE r;
REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
- assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
+ assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD, false);
break;
}
default:
Index: config/mips/mips.md
===================================================================
--- config/mips/mips.md (.../trunk/gcc) (revision 228112)
+++ config/mips/mips.md (.../branches/scalar-storage-order/gcc) (revision 228133)
@@ -7309,7 +7309,7 @@ (define_insn "consttable_float"
gcc_assert (GET_CODE (operands[0]) == CONST_DOUBLE);
REAL_VALUE_FROM_CONST_DOUBLE (d, operands[0]);
assemble_real (d, GET_MODE (operands[0]),
- GET_MODE_BITSIZE (GET_MODE (operands[0])));
+ GET_MODE_BITSIZE (GET_MODE (operands[0])), false);
return "";
}
[(set (attr "length")