Change the AARCH64_EXPAND_ALIGNMENT macro into proper function calls to make
future changes easier.  Use the existing alignment settings, however avoid
overaligning small array's or structs to 64 bits when there is no benefit.
This gives a small reduction in data and stack size.

Passes regress & bootstrap, OK for commit?

gcc/ChangeLog:

        * config/aarch64/aarch64.h (AARCH64_EXPAND_ALIGNMENT): Remove.
        (DATA_ALIGNMENT): Use aarch64_data_alignment.
        (LOCAL_ALIGNMENT): Use aarch64_stack_alignment.
        * config/aarch64/aarch64.cc (aarch64_data_alignment): New function.
        (aarch64_stack_alignment): Likewise.
        * config/aarch64/aarch64-protos.h (aarch64_data_alignment): New 
prototype.
        (aarch64_stack_alignment): Likewise.

---

diff --git a/gcc/config/aarch64/aarch64-protos.h 
b/gcc/config/aarch64/aarch64-protos.h
index 
6da81556110c978a9de6f6fad5775c9d77771b10..4133a47693b24abca071a7f77fcdbb91d3dc261a
 100644
--- a/gcc/config/aarch64/aarch64-protos.h
+++ b/gcc/config/aarch64/aarch64-protos.h
@@ -1207,4 +1207,7 @@ extern void aarch64_adjust_reg_alloc_order ();
 bool aarch64_optimize_mode_switching (aarch64_mode_entity);
 void aarch64_restore_za (rtx);
 
+extern unsigned aarch64_data_alignment (const_tree exp, unsigned align);
+extern unsigned aarch64_stack_alignment (const_tree exp, unsigned align);
+
 #endif /* GCC_AARCH64_PROTOS_H */
diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
index 
f07b2c49f0d9abd3309afb98499ab7eebcff05bd..64f55f6f94e37bffa6b1e7403274ec5f5d906095
 100644
--- a/gcc/config/aarch64/aarch64.h
+++ b/gcc/config/aarch64/aarch64.h
@@ -121,24 +121,11 @@
    of LSE instructions.  */
 #define TARGET_OUTLINE_ATOMICS (aarch64_flag_outline_atomics)
 
-/* Align definitions of arrays, unions and structures so that
-   initializations and copies can be made more efficient.  This is not
-   ABI-changing, so it only affects places where we can see the
-   definition.  Increasing the alignment tends to introduce padding,
-   so don't do this when optimizing for size/conserving stack space.  */
-#define AARCH64_EXPAND_ALIGNMENT(COND, EXP, ALIGN)                     \
-  (((COND) && ((ALIGN) < BITS_PER_WORD)                                        
\
-    && (TREE_CODE (EXP) == ARRAY_TYPE                                  \
-       || TREE_CODE (EXP) == UNION_TYPE                                \
-       || TREE_CODE (EXP) == RECORD_TYPE)) ? BITS_PER_WORD : (ALIGN))
-
-/* Align global data.  */
-#define DATA_ALIGNMENT(EXP, ALIGN)                     \
-  AARCH64_EXPAND_ALIGNMENT (!optimize_size, EXP, ALIGN)
-
-/* Similarly, make sure that objects on the stack are sensibly aligned.  */
-#define LOCAL_ALIGNMENT(EXP, ALIGN)                            \
-  AARCH64_EXPAND_ALIGNMENT (!flag_conserve_stack, EXP, ALIGN)
+/* Align global data as an optimization.  */
+#define DATA_ALIGNMENT(EXP, ALIGN) aarch64_data_alignment (EXP, ALIGN)
+
+/* Align stack data as an optimization.  */
+#define LOCAL_ALIGNMENT(EXP, ALIGN) aarch64_stack_alignment (EXP, ALIGN)
 
 #define STRUCTURE_SIZE_BOUNDARY                8
 
diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
index 
c78845fc27e6d6a8a1631b487b19fb3143a231ac..5369129d4a405afe5a760081149da1347e7b8842
 100644
--- a/gcc/config/aarch64/aarch64.cc
+++ b/gcc/config/aarch64/aarch64.cc
@@ -2651,6 +2651,60 @@ aarch64_constant_alignment (const_tree exp, 
HOST_WIDE_INT align)
   return align;
 }
 
+/* Align definitions of arrays, unions and structures so that
+   initializations and copies can be made more efficient.  This is not
+   ABI-changing, so it only affects places where we can see the
+   definition.  Increasing the alignment tends to introduce padding,
+   so don't do this when optimizing for size/conserving stack space.  */
+
+unsigned
+aarch64_data_alignment (const_tree type, unsigned align)
+{
+  if (optimize_size)
+    return align;
+
+  if (AGGREGATE_TYPE_P (type))
+    {
+      unsigned HOST_WIDE_INT size = 0;
+
+      if (TYPE_SIZE (type) && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
+         && tree_fits_uhwi_p (TYPE_SIZE (type)))
+       size = tree_to_uhwi (TYPE_SIZE (type));
+
+      /* Align small structs/arrays to 32 bits, or 64 bits if larger.  */
+      if (align < 32 && size <= 32)
+       align = 32;
+      else if (align < 64)
+       align = 64;
+    }
+
+  return align;
+}
+
+unsigned
+aarch64_stack_alignment (const_tree type, unsigned align)
+{
+  if (flag_conserve_stack)
+    return align;
+
+  if (AGGREGATE_TYPE_P (type))
+    {
+      unsigned HOST_WIDE_INT size = 0;
+
+      if (TYPE_SIZE (type) && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
+         && tree_fits_uhwi_p (TYPE_SIZE (type)))
+       size = tree_to_uhwi (TYPE_SIZE (type));
+
+      /* Align small structs/arrays to 32 bits, or 64 bits if larger.  */
+      if (align < 32 && size <= 32)
+       align = 32;
+      else if (align < 64)
+       align = 64;
+    }
+
+  return align;
+}
+
 /* Return true if calls to DECL should be treated as
    long-calls (ie called via a register).  */
 static bool

Reply via email to