Hello!

The patch changes move insn patterns to output insn template depending
on their type attribute, not which_alternative.

2013-03-20  Uros Bizjak  <ubiz...@gmail.com>

        * config/i386/i386.md (*movoi_internal_avx): Emit insn template
        depending on type attribute.
        (*movti_internal): Ditto.
        (*movtf_internal): Ditto.
        (*movxf_internal): Ditto.
        (*movdf_internal): Ditto.
        (*movsf_internal): Ditto.

Tested on x86_64-pc-linux-gnu {,-m32}, committed to mainline.

Uros.
Index: i386.md
===================================================================
--- i386.md     (revision 196837)
+++ i386.md     (working copy)
@@ -1758,12 +1758,12 @@
        (match_operand:OI 1 "vector_move_operand"  "C ,xm,x"))]
   "TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
+    case TYPE_SSELOG1:
       return standard_sse_constant_opcode (insn, operands[1]);
-    case 1:
-    case 2:
+
+    case TYPE_SSEMOV:
       if (misaligned_operand (operands[0], OImode)
          || misaligned_operand (operands[1], OImode))
        {
@@ -1779,6 +1779,7 @@
          else
            return "vmovdqa\t{%1, %0|%0, %1}";
        }
+
     default:
       gcc_unreachable ();
     }
@@ -1800,15 +1801,15 @@
   "(TARGET_64BIT || TARGET_SSE)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
-    case 1:
+    case TYPE_MULTI:
       return "#";
-    case 2:
+
+    case TYPE_SSELOG1:
       return standard_sse_constant_opcode (insn, operands[1]);
-    case 3:
-    case 4:
+
+    case TYPE_SSEMOV:
       /* TDmode values are passed as TImode on the stack.  Moving them
         to stack may result in unaligned memory access.  */
       if (misaligned_operand (operands[0], TImode)
@@ -1826,12 +1827,13 @@
          else
            return "%vmovdqa\t{%1, %0|%0, %1}";
        }
+
     default:
       gcc_unreachable ();
     }
 }
   [(set_attr "isa" "x64,x64,*,*,*")
-   (set_attr "type" "*,*,sselog1,ssemov,ssemov")
+   (set_attr "type" "multi,multi,sselog1,ssemov,ssemov")
    (set (attr "prefix")
      (if_then_else (eq_attr "type" "sselog1,ssemov")
        (const_string "maybe_vex")
@@ -1914,7 +1916,7 @@
     case TYPE_LEA:
       return "lea{q}\t{%E1, %0|%0, %E1}";
 
-    default:
+    case TYPE_IMOV:
       gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
       if (get_attr_mode (insn) == MODE_SI)
        return "mov{l}\t{%k1, %k0|%k0, %k1}";
@@ -1924,6 +1926,9 @@
        return "lea{q}\t{%E1, %0|%0, %E1}";
       else
        return "mov{q}\t{%1, %0|%0, %1}";
+
+    default:
+      gcc_unreachable ();
     }
 }
   [(set (attr "isa")
@@ -2018,14 +2023,18 @@
     case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
        {
+       case MODE_SI:
+          return "%vmovd\t{%1, %0|%0, %1}";
        case MODE_TI:
          return "%vmovdqa\t{%1, %0|%0, %1}";
+
        case MODE_V4SF:
          return "%vmovaps\t{%1, %0|%0, %1}";
-       case MODE_SI:
-          return "%vmovd\t{%1, %0|%0, %1}";
+
        case MODE_SF:
-          return "%vmovss\t{%1, %0|%0, %1}";
+         gcc_assert (!TARGET_AVX);
+          return "movss\t{%1, %0|%0, %1}";
+
        default:
          gcc_unreachable ();
        }
@@ -2041,12 +2050,15 @@
     case TYPE_LEA:
       return "lea{l}\t{%E1, %0|%0, %E1}";
 
-    default:
+    case TYPE_IMOV:
       gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
       if (ix86_use_lea_for_mov (insn, operands))
        return "lea{l}\t{%E1, %0|%0, %E1}";
       else
        return "mov{l}\t{%1, %0|%0, %1}";
+
+    default:
+      gcc_unreachable ();
     }
 }
   [(set (attr "type")
@@ -2631,12 +2643,12 @@
        || (!TARGET_MEMORY_MISMATCH_STALL
           && memory_operand (operands[0], TFmode)))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
+    case TYPE_SSELOG1:
       return standard_sse_constant_opcode (insn, operands[1]);
-    case 1:
-    case 2:
+
+    case TYPE_SSEMOV:
       /* Handle misaligned load/store since we
          don't have movmisaligntf pattern. */
       if (misaligned_operand (operands[0], TFmode)
@@ -2655,8 +2667,7 @@
            return "%vmovdqa\t{%1, %0|%0, %1}";
        }
 
-    case 3:
-    case 4:
+    case TYPE_MULTI:
        return "#";
 
     default:
@@ -2664,7 +2675,7 @@
     }
 }
   [(set_attr "isa" "*,*,*,x64,x64")
-   (set_attr "type" "sselog1,ssemov,ssemov,*,*")
+   (set_attr "type" "sselog1,ssemov,ssemov,multi,multi")
    (set (attr "prefix")
      (if_then_else (eq_attr "type" "sselog1,ssemov")
        (const_string "maybe_vex")
@@ -2701,18 +2712,14 @@
        || (!TARGET_MEMORY_MISMATCH_STALL
           && memory_operand (operands[0], XFmode)))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
-    case 1:
+    case TYPE_FMOV:
+      if (which_alternative == 2)
+        return standard_80387_constant_opcode (operands[1]);
       return output_387_reg_move (insn, operands);
 
-    case 2:
-      return standard_80387_constant_opcode (operands[1]);
-
-    case 3:
-    case 4:
-    case 5:
+    case TYPE_MULTI:
       return "#";
 
     default:
@@ -2748,41 +2755,28 @@
        || ((TARGET_64BIT || !TARGET_MEMORY_MISMATCH_STALL)
           && memory_operand (operands[0], DFmode)))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
-    case 1:
+    case TYPE_FMOV:
+      if (which_alternative == 2)
+        return standard_80387_constant_opcode (operands[1]);
       return output_387_reg_move (insn, operands);
 
-    case 2:
-      return standard_80387_constant_opcode (operands[1]);
-
-    case 3:
-    case 4:
+    case TYPE_MULTI:
       return "#";
 
-    case 5:
-    case 6:
-      return "mov{q}\t{%1, %0|%0, %1}";
+    case TYPE_IMOV:
+      if (get_attr_mode (insn) == MODE_SI)
+       return "mov{l}\t{%1, %k0|%k0, %1}";
+      else if (which_alternative == 8)
+       return "movabs{q}\t{%1, %0|%0, %1}";
+      else
+       return "mov{q}\t{%1, %0|%0, %1}";
 
-    case 7:
-      return "mov{l}\t{%1, %k0|%k0, %1}";
-
-    case 8:
-      return "movabs{q}\t{%1, %0|%0, %1}";
-
-    case 9:
-    case 13:
+    case TYPE_SSELOG1:
       return standard_sse_constant_opcode (insn, operands[1]);
 
-    case 10:
-    case 11:
-    case 12:
-    case 14:
-    case 15:
-    case 16:
-    case 17:
-    case 18:
+    case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
        {
        case MODE_DF:
@@ -2790,15 +2784,17 @@
            return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
          return "%vmovsd\t{%1, %0|%0, %1}";
 
-       case MODE_V1DF:
-         return "%vmovlpd\t{%1, %d0|%d0, %1}";
+       case MODE_V4SF:
+         return "%vmovaps\t{%1, %0|%0, %1}";
        case MODE_V2DF:
          return "%vmovapd\t{%1, %0|%0, %1}";
+
        case MODE_V2SF:
          gcc_assert (!TARGET_AVX);
          return "movlps\t{%1, %0|%0, %1}";
-       case MODE_V4SF:
-         return "%vmovaps\t{%1, %0|%0, %1}";
+       case MODE_V1DF:
+         gcc_assert (!TARGET_AVX);
+         return "movlpd\t{%1, %0|%0, %1}";
 
        case MODE_DI:
          /* Handle broken assemblers that require movd instead of movq.  */
@@ -2858,7 +2854,7 @@
               (eq_attr "alternative" "5,6,8,17,18")
                 (const_string "DI")
 
-              /* xorps is one byte shorter for !TARGET_AVX.  */
+              /* xorps is one byte shorter for non-AVX targets.  */
               (eq_attr "alternative" "9,13")
                 (cond [(not (match_test "TARGET_SSE2"))
                          (const_string "V4SF")
@@ -2872,10 +2868,10 @@
                       (const_string "V2DF"))
 
               /* For architectures resolving dependencies on
-                 whole SSE registers use APD move to break dependency
-                 chains, otherwise use short move to avoid extra work.
+                 whole SSE registers use movapd to break dependency
+                 chains, otherwise use short move to avoid extra work.  */
 
-                 movaps encodes one byte shorter for !TARGET_AVX.  */
+              /* movaps is one byte shorter for non-AVX targets.  */
               (eq_attr "alternative" "10,14")
                 (cond [(ior (not (match_test "TARGET_SSE2"))
                             (match_test 
"TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL"))
@@ -2895,6 +2891,8 @@
               (eq_attr "alternative" "11,15")
                 (cond [(not (match_test "TARGET_SSE2"))
                          (const_string "V2SF")
+                       (match_test "TARGET_AVX")
+                         (const_string "DF")
                        (match_test "TARGET_SSE_SPLIT_REGS")
                          (const_string "V1DF")
                       ]
@@ -2922,46 +2920,38 @@
                   && standard_sse_constant_p (operands[1]))))
        || memory_operand (operands[0], SFmode))"
 {
-  switch (which_alternative)
+  switch (get_attr_type (insn))
     {
-    case 0:
-    case 1:
+    case TYPE_FMOV:
+      if (which_alternative == 2)
+        return standard_80387_constant_opcode (operands[1]);
       return output_387_reg_move (insn, operands);
 
-    case 2:
-      return standard_80387_constant_opcode (operands[1]);
-
-    case 3:
-    case 4:
+    case TYPE_IMOV:
       return "mov{l}\t{%1, %0|%0, %1}";
 
-    case 5:
+    case TYPE_SSELOG1:
       return standard_sse_constant_opcode (insn, operands[1]);
 
-    case 6:
-    case 7:
-    case 8:
+    case TYPE_SSEMOV:
       switch (get_attr_mode (insn))
        {
-       case MODE_V4SF:
-         return "%vmovaps\t{%1, %0|%0, %1}";
        case MODE_SF:
          if (TARGET_AVX && REG_P (operands[0]) && REG_P (operands[1]))
            return "vmovss\t{%1, %0, %0|%0, %0, %1}";
          return "%vmovss\t{%1, %0|%0, %1}";
+
+       case MODE_V4SF:
+         return "%vmovaps\t{%1, %0|%0, %1}";
+
+       case MODE_SI:
+         return "%vmovd\t{%1, %0|%0, %1}";
+
        default:
          gcc_unreachable ();
        }
 
-    case 9:
-    case 10:
-      return "%vmovd\t{%1, %0|%0, %1}";
-
-    case 11:
-    case 12:
-    case 13:
-    case 14:
-    case 15:
+    case TYPE_MMXMOV:
       if (get_attr_mode (insn) == MODE_DI)
        return "movq\t{%1, %0|%0, %1}";
       return "movd\t{%1, %0|%0, %1}";

Reply via email to