Hi, Address in x32 is 32bit and x32 doesn't support indirec branch via memory. This patch properly handles address output and calls patterns for x32. OK for trunk?
Thanks. H.J. --- 2011-07-18 H.J. Lu <hongjiu...@intel.com> * config/i386/i386.c (ix86_output_addr_vec_elt): Check TARGET_LP64 instead of TARGET_64BIT for ASM_QUAD. (ix86_output_addr_diff_elt): Likewise. * config/i386/i386.md (*indirect_jump): Disabled for TARGET_X32. (*call_vzeroupper): Likewise. (*call): Likewise. (*call_value_vzeroupper): Likewise. (*call_value): Likewise. (*call_rex64_ms_sysv_vzeroupper): Check TARGET_LP64 instead of TARGET_64BIT. (*call_rex64_ms_sysv"): Likewise. (*call_value_rex64_ms_sysv_vzeroupper): Likewise. (*call_value_rex64_ms_sysv): Likewise. (set_got_offset_rex64): Likewise. (*indirect_jump_x32): New. (*call_vzeroupper_x32): Likewise. (*call_x32): Likewise. (*call_rex64_ms_sysv_vzeroupper_x32): Likewise. (*call_rex64_ms_sysv_x32): Likewise. (*call_value_vzeroupper_x32): Likewise. (*call_value_x32): Likewise. (*call_value_rex64_ms_sysv_vzeroupper_x3): Likewise. (*call_value_rex64_ms_sysv_x32): Likewise. diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c index c268899..1ed451b 100644 --- a/gcc/config/i386/i386.c +++ b/gcc/config/i386/i386.c @@ -14861,14 +14919,22 @@ ix86_output_addr_vec_elt (FILE *file, int value) { const char *directive = ASM_LONG; + if (TARGET_X32) + { + fprintf (file, "%s%s%d\n", directive, LPREFIX, value); + fprintf (file, "%s0\n", directive); + } + else + { #ifdef ASM_QUAD - if (TARGET_64BIT) - directive = ASM_QUAD; + if (TARGET_LP64) + directive = ASM_QUAD; #else - gcc_assert (!TARGET_64BIT); + gcc_assert (!TARGET_64BIT); #endif - fprintf (file, "%s%s%d\n", directive, LPREFIX, value); + fprintf (file, "%s%s%d\n", directive, LPREFIX, value); + } } void @@ -14877,7 +14943,7 @@ ix86_output_addr_diff_elt (FILE *file, int value, int rel) const char *directive = ASM_LONG; #ifdef ASM_QUAD - if (TARGET_64BIT && CASE_VECTOR_MODE == DImode) + if (TARGET_LP64 && CASE_VECTOR_MODE == DImode) directive = ASM_QUAD; #else gcc_assert (!TARGET_64BIT); diff --git a/gcc/config/i386/i386.md b/gcc/config/i386/i386.md index cf0fdf4..36cdec1 100644 --- a/gcc/config/i386/i386.md +++ b/gcc/config/i386/i386.md @@ -10967,7 +10986,14 @@ (define_insn "*indirect_jump" [(set (pc) (match_operand:P 0 "nonimmediate_operand" "rm"))] - "" + "!TARGET_X32" + "jmp\t%A0" + [(set_attr "type" "ibr") + (set_attr "length_immediate" "0")]) + +(define_insn "*indirect_jump_x32" + [(set (pc) (match_operand:P 0 "register_operand" "r"))] + "TARGET_X32" "jmp\t%A0" [(set_attr "type" "ibr") (set_attr "length_immediate" "0")]) @@ -11103,7 +11129,7 @@ (match_operand 1 "" "")) (unspec [(match_operand 2 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] - "TARGET_VZEROUPPER && !SIBLING_CALL_P (insn)" + "TARGET_VZEROUPPER && !TARGET_X32 && !SIBLING_CALL_P (insn)" "#" "&& reload_completed" [(const_int 0)] @@ -11113,7 +11139,7 @@ (define_insn "*call" [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zm")) (match_operand 1 "" ""))] - "!SIBLING_CALL_P (insn)" + "!TARGET_X32 && !SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[0]);" [(set_attr "type" "call")]) @@ -11136,7 +11162,7 @@ (clobber (reg:DI DI_REG))]) (unspec [(match_operand 2 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] - "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)" + "TARGET_VZEROUPPER && TARGET_LP64 && !SIBLING_CALL_P (insn)" "#" "&& reload_completed" [(const_int 0)] @@ -11159,7 +11185,72 @@ (clobber (reg:TI XMM15_REG)) (clobber (reg:DI SI_REG)) (clobber (reg:DI DI_REG))] - "TARGET_64BIT && !SIBLING_CALL_P (insn)" + "TARGET_LP64 && !SIBLING_CALL_P (insn)" + "* return ix86_output_call_insn (insn, operands[0]);" + [(set_attr "type" "call")]) + +(define_insn_and_split "*call_vzeroupper_x32" + [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>z")) + (match_operand 1 "" "")) + (unspec [(match_operand 2 "const_int_operand" "")] + UNSPEC_CALL_NEEDS_VZEROUPPER)] + "TARGET_VZEROUPPER && TARGET_X32 && !SIBLING_CALL_P (insn)" + "#" + "&& reload_completed" + [(const_int 0)] + "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;" + [(set_attr "type" "call")]) + +(define_insn "*call_x32" + [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>z")) + (match_operand 1 "" ""))] + "TARGET_X32 && !SIBLING_CALL_P (insn)" + "* return ix86_output_call_insn (insn, operands[0]);" + [(set_attr "type" "call")]) + +(define_insn_and_split "*call_rex64_ms_sysv_vzeroupper_x32" + [(parallel + [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rz")) + (match_operand 1 "" "")) + (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) + (clobber (reg:TI XMM6_REG)) + (clobber (reg:TI XMM7_REG)) + (clobber (reg:TI XMM8_REG)) + (clobber (reg:TI XMM9_REG)) + (clobber (reg:TI XMM10_REG)) + (clobber (reg:TI XMM11_REG)) + (clobber (reg:TI XMM12_REG)) + (clobber (reg:TI XMM13_REG)) + (clobber (reg:TI XMM14_REG)) + (clobber (reg:TI XMM15_REG)) + (clobber (reg:DI SI_REG)) + (clobber (reg:DI DI_REG))]) + (unspec [(match_operand 2 "const_int_operand" "")] + UNSPEC_CALL_NEEDS_VZEROUPPER)] + "TARGET_VZEROUPPER && TARGET_X32 && !SIBLING_CALL_P (insn)" + "#" + "&& reload_completed" + [(const_int 0)] + "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;" + [(set_attr "type" "call")]) + +(define_insn "*call_rex64_ms_sysv_x32" + [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rz")) + (match_operand 1 "" "")) + (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) + (clobber (reg:TI XMM6_REG)) + (clobber (reg:TI XMM7_REG)) + (clobber (reg:TI XMM8_REG)) + (clobber (reg:TI XMM9_REG)) + (clobber (reg:TI XMM10_REG)) + (clobber (reg:TI XMM11_REG)) + (clobber (reg:TI XMM12_REG)) + (clobber (reg:TI XMM13_REG)) + (clobber (reg:TI XMM14_REG)) + (clobber (reg:TI XMM15_REG)) + (clobber (reg:DI SI_REG)) + (clobber (reg:DI DI_REG))] + "TARGET_X32 && !SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[0]);" [(set_attr "type" "call")]) @@ -11279,7 +11370,7 @@ (match_operand 2 "" ""))) (unspec [(match_operand 3 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] - "TARGET_VZEROUPPER && !SIBLING_CALL_P (insn)" + "TARGET_VZEROUPPER && !TARGET_X32 && !SIBLING_CALL_P (insn)" "#" "&& reload_completed" [(const_int 0)] @@ -11290,7 +11381,28 @@ [(set (match_operand 0 "" "") (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zm")) (match_operand 2 "" "")))] - "!SIBLING_CALL_P (insn)" + "!TARGET_X32 && !SIBLING_CALL_P (insn)" + "* return ix86_output_call_insn (insn, operands[1]);" + [(set_attr "type" "callv")]) + +(define_insn_and_split "*call_value_vzeroupper_x32" + [(set (match_operand 0 "" "") + (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>z")) + (match_operand 2 "" ""))) + (unspec [(match_operand 3 "const_int_operand" "")] + UNSPEC_CALL_NEEDS_VZEROUPPER)] + "TARGET_VZEROUPPER && TARGET_X32 && !SIBLING_CALL_P (insn)" + "#" + "&& reload_completed" + [(const_int 0)] + "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;" + [(set_attr "type" "callv")]) + +(define_insn "*call_value_x32" + [(set (match_operand 0 "" "") + (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>z")) + (match_operand 2 "" "")))] + "TARGET_X32 && !SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[1]);" [(set_attr "type" "callv")]) @@ -11335,7 +11447,7 @@ (clobber (reg:DI DI_REG))]) (unspec [(match_operand 3 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] - "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)" + "TARGET_VZEROUPPER && TARGET_LP64 && !SIBLING_CALL_P (insn)" "#" "&& reload_completed" [(const_int 0)] @@ -11359,7 +11471,55 @@ (clobber (reg:TI XMM15_REG)) (clobber (reg:DI SI_REG)) (clobber (reg:DI DI_REG))] - "TARGET_64BIT && !SIBLING_CALL_P (insn)" + "TARGET_LP64 && !SIBLING_CALL_P (insn)" + "* return ix86_output_call_insn (insn, operands[1]);" + [(set_attr "type" "callv")]) + +(define_insn_and_split "*call_value_rex64_ms_sysv_vzeroupper_x32" + [(parallel + [(set (match_operand 0 "" "") + (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rz")) + (match_operand 2 "" ""))) + (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) + (clobber (reg:TI XMM6_REG)) + (clobber (reg:TI XMM7_REG)) + (clobber (reg:TI XMM8_REG)) + (clobber (reg:TI XMM9_REG)) + (clobber (reg:TI XMM10_REG)) + (clobber (reg:TI XMM11_REG)) + (clobber (reg:TI XMM12_REG)) + (clobber (reg:TI XMM13_REG)) + (clobber (reg:TI XMM14_REG)) + (clobber (reg:TI XMM15_REG)) + (clobber (reg:DI SI_REG)) + (clobber (reg:DI DI_REG))]) + (unspec [(match_operand 3 "const_int_operand" "")] + UNSPEC_CALL_NEEDS_VZEROUPPER)] + "TARGET_VZEROUPPER && TARGET_X32 && !SIBLING_CALL_P (insn)" + "#" + "&& reload_completed" + [(const_int 0)] + "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;" + [(set_attr "type" "callv")]) + +(define_insn "*call_value_rex64_ms_sysv_x32" + [(set (match_operand 0 "" "") + (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rz")) + (match_operand 2 "" ""))) + (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) + (clobber (reg:TI XMM6_REG)) + (clobber (reg:TI XMM7_REG)) + (clobber (reg:TI XMM8_REG)) + (clobber (reg:TI XMM9_REG)) + (clobber (reg:TI XMM10_REG)) + (clobber (reg:TI XMM11_REG)) + (clobber (reg:TI XMM12_REG)) + (clobber (reg:TI XMM13_REG)) + (clobber (reg:TI XMM14_REG)) + (clobber (reg:TI XMM15_REG)) + (clobber (reg:DI SI_REG)) + (clobber (reg:DI DI_REG))] + "TARGET_X32 && !SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[1]);" [(set_attr "type" "callv")]) @@ -11666,7 +11826,7 @@ (unspec:DI [(label_ref (match_operand 1 "" ""))] UNSPEC_SET_GOT_OFFSET))] - "TARGET_64BIT" + "TARGET_LP64" "movabs{q}\t{$_GLOBAL_OFFSET_TABLE_-%l1, %0|%0, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_-%l1}" [(set_attr "type" "imov") (set_attr "length_immediate" "0")