Hi,

this addresses the issue raised by Andrew a few weeks ago about the usage of 
memory copy functions to toggle the scalar storage order.  Recall that you 
cannot (the compiler errors out) take the address of a scalar which is stored 
in reverse order, but you can do it for the enclosing aggregate type., which 
means that you can also pass it to the memory copy functions.  In this case, 
the optimizer may rewrite the copy into a scalar copy, which is a no-no.

The patch also contains an unrelated hunk for the tree pretty printer.

Tested on x86-64/Linux, OK for the mainline?


2020-06-01  Eric Botcazou  <ebotca...@adacore.com>

        * gimple-fold.c (gimple_fold_builtin_memory_op): Do not replace with a
        scalar copy if either type has reverse scalar storage order.
        * tree-ssa-sccvn.c (vn_reference_lookup_3): Do not propagate through a
        memory copy if either type has reverse scalar storage order.

        * tree-pretty-print.c (dump_generic_node) <ARRAY_TYPE>: Print quals.


2020-06-01  Eric Botcazou  <ebotca...@adacore.com>

        * gcc.c-torture/execute/sso-1.c: New test.

-- 
Eric Botcazou
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 4e3de95d2d2..64a9221f8cf 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -741,7 +741,8 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
     }
   else
     {
-      tree srctype, desttype;
+      tree srctype = TREE_TYPE (TREE_TYPE (src));
+      tree desttype = TREE_TYPE (TREE_TYPE (dest));
       unsigned int src_align, dest_align;
       tree off0;
       const char *tmp_str;
@@ -767,7 +768,11 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
 	     hack can be removed.  */
 	  && !c_strlen (src, 1)
 	  && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
-	       && memchr (tmp_str, 0, tmp_len) == NULL))
+	       && memchr (tmp_str, 0, tmp_len) == NULL)
+	  && !(AGGREGATE_TYPE_P (srctype)
+	       && TYPE_REVERSE_STORAGE_ORDER (srctype))
+	  && !(AGGREGATE_TYPE_P (desttype)
+	       && TYPE_REVERSE_STORAGE_ORDER (desttype)))
 	{
 	  unsigned ilen = tree_to_uhwi (len);
 	  if (pow2p_hwi (ilen))
@@ -957,10 +962,15 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
 	 but that only gains us that the destination and source possibly
 	 no longer will have their address taken.  */
       srctype = TREE_TYPE (TREE_TYPE (src));
+      desttype = TREE_TYPE (TREE_TYPE (dest));
+      if ((AGGREGATE_TYPE_P (srctype)
+	   && TYPE_REVERSE_STORAGE_ORDER (srctype))
+	  || (AGGREGATE_TYPE_P (desttype)
+	      && TYPE_REVERSE_STORAGE_ORDER (desttype)))
+	return false;
       if (TREE_CODE (srctype) == ARRAY_TYPE
 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
 	srctype = TREE_TYPE (srctype);
-      desttype = TREE_TYPE (TREE_TYPE (dest));
       if (TREE_CODE (desttype) == ARRAY_TYPE
 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
 	desttype = TREE_TYPE (desttype);
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index f04fd65091a..7d581214022 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -1899,8 +1899,16 @@ dump_generic_node (pretty_printer *pp, tree node, int spc, dump_flags_t flags,
 
     case ARRAY_TYPE:
       {
+	unsigned int quals = TYPE_QUALS (node);
 	tree tmp;
 
+	if (quals & TYPE_QUAL_ATOMIC)
+	  pp_string (pp, "atomic ");
+	if (quals & TYPE_QUAL_CONST)
+	  pp_string (pp, "const ");
+	if (quals & TYPE_QUAL_VOLATILE)
+	  pp_string (pp, "volatile ");
+
 	/* Print the innermost component type.  */
 	for (tmp = TREE_TYPE (node); TREE_CODE (tmp) == ARRAY_TYPE;
 	     tmp = TREE_TYPE (tmp))
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 4b3f31c12cb..17867b65ecb 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -3275,6 +3275,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
 	}
       if (TREE_CODE (lhs) == ADDR_EXPR)
 	{
+	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
+	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
+	    return (void *)-1;
 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
 						    &lhs_offset);
 	  if (!tem)
@@ -3303,6 +3306,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
 	rhs = vn_valueize (rhs);
       if (TREE_CODE (rhs) == ADDR_EXPR)
 	{
+	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
+	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
+	    return (void *)-1;
 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
 						    &rhs_offset);
 	  if (!tem)
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;

#define __big_endian__ scalar_storage_order("big-endian")
#define __little_endian__ scalar_storage_order("little-endian")

typedef union
{
  uint32_t val;
  uint8_t v[4];
} __attribute__((__big_endian__)) upal_u32be_t;

typedef union
{
  uint32_t val;
  uint8_t v[4];
} __attribute__((__little_endian__)) upal_u32le_t;

static inline uint32_t native_to_big_endian(uint32_t t)
{
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
  return t;
#else
  return __builtin_bswap32(t);
#endif
}
static inline uint32_t native_to_little_endian(uint32_t t)
{
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
  return __builtin_bswap32(t);
#else
  return t;
#endif
}
#define test(p, p1, i) do { if (p[i] != p1[i]) __builtin_abort (); } while (0)

#define tests(p, p1) do { test(p, p1, 0); test(p, p1, 1); \
                          test(p, p1, 2); test(p, p1, 3); } while (0)

int main(void)
{
  uint8_t *p, *p1;
  uint32_t u = 0x12345678;
  upal_u32be_t tempb;
  __builtin_memcpy (&tempb, &u, sizeof(uint32_t));
  uint32_t bu = tempb.val;
  uint32_t b1u = native_to_big_endian(u);
  p = (uint8_t*)&bu;
  p1 = (uint8_t*)&b1u;
  tests(p, p1);

  u = 0x12345678;
  upal_u32le_t templ;
  __builtin_memcpy (&templ, &u, sizeof(uint32_t));
  uint32_t lu = templ.val;
  uint32_t l1u = native_to_little_endian(u);
  p = (uint8_t*)&lu;
  p1 = (uint8_t*)&l1u;
  tests(p, p1);

  return 0;
}

Reply via email to