Hello!

2011-05-04  Uros Bizjak  <ubiz...@gmail.com>

        * config/i386/i386.md (*movdi_internal_rex64) <TYPE_SSEMOV>:
        Use %v prefix in insn mnemonic to handle TARGET_AVX.
        (*movdi_internal): Use "maybe_vex" instead of "vex" in "prefix"
        attribute calculation.
        (*movdf_internal): Output AVX mnemonics.  Add "prefix" attribute.
        * config/i386/sse.md (*sse2_storeq_rex64): Do not emit %v prefix
        for mov{q} mnemonic.
        (*vec_extractv2di_1_rex64_avx): Ditto.

Tested on x86_64-pc-linux-gnu {,-m32}, committed to 4.6 and 4.5 branches.

Uros.
Index: i386.md
===================================================================
--- i386.md     (revision 173377)
+++ i386.md     (working copy)
@@ -1974,21 +1974,15 @@
        return "movdq2q\t{%1, %0|%0, %1}";
 
     case TYPE_SSEMOV:
-      if (TARGET_AVX)
-       {
-         if (get_attr_mode (insn) == MODE_TI)
-           return "vmovdqa\t{%1, %0|%0, %1}";
-         else
-           return "vmovq\t{%1, %0|%0, %1}";
-       }
-
       if (get_attr_mode (insn) == MODE_TI)
-       return "movdqa\t{%1, %0|%0, %1}";
-      /* FALLTHRU */
+       return "%vmovdqa\t{%1, %0|%0, %1}";
+      /* Handle broken assemblers that require movd instead of movq.  */
+      if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+       return "%vmovd\t{%1, %0|%0, %1}";
+      return "%vmovq\t{%1, %0|%0, %1}";
 
     case TYPE_MMXMOV:
-      /* Moves from and into integer register is done using movd
-        opcode with REX prefix.  */
+      /* Handle broken assemblers that require movd instead of movq.  */
       if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
        return "movd\t{%1, %0|%0, %1}";
       return "movq\t{%1, %0|%0, %1}";
@@ -2108,7 +2102,7 @@
   [(set_attr "type" 
"*,*,mmx,mmxmov,mmxmov,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
    (set (attr "prefix")
      (if_then_else (eq_attr "alternative" "5,6,7,8")
-       (const_string "vex")
+       (const_string "maybe_vex")
        (const_string "orig")))
    (set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF")])
 
@@ -2997,7 +2991,8 @@
 
     case 11:
     case 12:
-    return "%vmovd\t{%1, %0|%0, %1}";
+      /* Handle broken assemblers that require movd instead of movq.  */
+      return "%vmovd\t{%1, %0|%0, %1}";
 
     default:
       gcc_unreachable();
@@ -3106,17 +3101,17 @@
       switch (get_attr_mode (insn))
        {
        case MODE_V4SF:
-         return "xorps\t%0, %0";
+         return "%vxorps\t%0, %d0";
        case MODE_V2DF:
          if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
-           return "xorps\t%0, %0";
+           return "%vxorps\t%0, %d0";
          else
-           return "xorpd\t%0, %0";
+           return "%vxorpd\t%0, %d0";
        case MODE_TI:
          if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
-           return "xorps\t%0, %0";
+           return "%vxorps\t%0, %d0";
          else
-           return "pxor\t%0, %0";
+           return "%vpxor\t%0, %d0";
        default:
          gcc_unreachable ();
        }
@@ -3126,34 +3121,62 @@
       switch (get_attr_mode (insn))
        {
        case MODE_V4SF:
-         return "movaps\t{%1, %0|%0, %1}";
+         return "%vmovaps\t{%1, %0|%0, %1}";
        case MODE_V2DF:
          if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
-           return "movaps\t{%1, %0|%0, %1}";
+           return "%vmovaps\t{%1, %0|%0, %1}";
          else
-           return "movapd\t{%1, %0|%0, %1}";
+           return "%vmovapd\t{%1, %0|%0, %1}";
        case MODE_TI:
          if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
-           return "movaps\t{%1, %0|%0, %1}";
+           return "%vmovaps\t{%1, %0|%0, %1}";
          else
-           return "movdqa\t{%1, %0|%0, %1}";
+           return "%vmovdqa\t{%1, %0|%0, %1}";
        case MODE_DI:
-         return "movq\t{%1, %0|%0, %1}";
+         return "%vmovq\t{%1, %0|%0, %1}";
        case MODE_DF:
-         return "movsd\t{%1, %0|%0, %1}";
+         if (TARGET_AVX)
+           {
+             if (REG_P (operands[0]) && REG_P (operands[1]))
+               return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
+             else
+               return "vmovsd\t{%1, %0|%0, %1}";
+           }
+         else
+           return "movsd\t{%1, %0|%0, %1}";
        case MODE_V1DF:
-         return "movlpd\t{%1, %0|%0, %1}";
+         if (TARGET_AVX)
+           {
+             if (REG_P (operands[0]))
+               return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
+             else
+               return "vmovlpd\t{%1, %0|%0, %1}";
+           }
+         else
+           return "movlpd\t{%1, %0|%0, %1}";
        case MODE_V2SF:
-         return "movlps\t{%1, %0|%0, %1}";
+         if (TARGET_AVX)
+           {
+             if (REG_P (operands[0]))
+               return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
+             else
+               return "vmovlps\t{%1, %0|%0, %1}";
+           }
+         else
+           return "movlps\t{%1, %0|%0, %1}";
        default:
          gcc_unreachable ();
        }
 
     default:
-      gcc_unreachable();
+      gcc_unreachable ();
     }
 }
   [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
+   (set (attr "prefix")
+     (if_then_else (eq_attr "alternative" "0,1,2,3,4")
+       (const_string "orig")
+       (const_string "maybe_vex")))
    (set (attr "prefix_data16")
      (if_then_else (eq_attr "mode" "V1DF")
        (const_string "1")
@@ -3441,12 +3464,13 @@
 
     case 9: case 10: case 14: case 15:
       return "movd\t{%1, %0|%0, %1}";
-    case 12: case 13:
-      return "%vmovd\t{%1, %0|%0, %1}";
 
     case 11:
       return "movq\t{%1, %0|%0, %1}";
 
+    case 12: case 13:
+      return "%vmovd\t{%1, %0|%0, %1}";
+
     default:
       gcc_unreachable ();
     }
Index: mmx.md
===================================================================
--- mmx.md      (revision 173377)
+++ mmx.md      (working copy)
@@ -63,6 +63,7 @@
   DONE;
 })
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*mov<mode>_internal_rex64"
   [(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
         "=rm,r,!?y,!y,!?y,m  ,!y ,*Y2,x,x ,m,r ,Yi")
@@ -196,6 +197,7 @@
        (const_string "orig")))
    (set_attr "mode" "DI,DI,DI,DI,DI,DI,DI,DI,V4SF,V4SF,V2SF,V2SF,DI,DI")])
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*movv2sf_internal_rex64"
   [(set (match_operand:V2SF 0 "nonimmediate_operand"
         "=rm,r,!?y,!y,!?y,m  ,!y ,*Y2,x,x,x,m,r ,Yi")
Index: sse.md
===================================================================
--- sse.md      (revision 173377)
+++ sse.md      (working copy)
@@ -7434,9 +7434,8 @@
   "@
    #
    #
-   %vmov{q}\t{%1, %0|%0, %1}"
+   mov{q}\t{%1, %0|%0, %1}"
   [(set_attr "type" "*,*,imov")
-   (set_attr "prefix" "*,*,maybe_vex")
    (set_attr "mode" "*,*,DI")])
 
 (define_insn "*sse2_storeq"
@@ -7472,11 +7471,11 @@
    vmovhps\t{%1, %0|%0, %1}
    vpsrldq\t{$8, %1, %0|%0, %1, 8}
    vmovq\t{%H1, %0|%0, %H1}
-   vmov{q}\t{%H1, %0|%0, %H1}"
+   mov{q}\t{%H1, %0|%0, %H1}"
   [(set_attr "type" "ssemov,sseishft1,ssemov,imov")
    (set_attr "length_immediate" "*,1,*,*")
    (set_attr "memory" "*,none,*,*")
-   (set_attr "prefix" "vex")
+   (set_attr "prefix" "vex,vex,vex,orig")
    (set_attr "mode" "V2SF,TI,TI,DI")])
 
 (define_insn "*vec_extractv2di_1_rex64"
@@ -7754,6 +7753,7 @@
        (const_string "vex")))
    (set_attr "mode" "TI,TI,TI,TI,TI,V2SF")])
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*vec_concatv2di_rex64_sse4_1"
   [(set (match_operand:V2DI 0 "register_operand"     "=x ,x ,Yi,!x,x,x,x")
        (vec_concat:V2DI
@@ -7774,6 +7774,7 @@
    (set_attr "length_immediate" "1,*,*,*,*,*,*")
    (set_attr "mode" "TI,TI,TI,TI,TI,V4SF,V2SF")])
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*vec_concatv2di_rex64_sse"
   [(set (match_operand:V2DI 0 "register_operand"     "=Y2 ,Yi,!Y2,Y2,x,x")
        (vec_concat:V2DI

Reply via email to