On Wed, Jul 20, 2011 at 9:42 AM, Richard Henderson <r...@redhat.com> wrote: >> - ix86_print_operand (file, x, 0); >> + /* Always use 64bit register for indirect branch. */ >> + if (REG_P (x) && TARGET_64BIT) >> + print_reg (x, 'q', file); >> + else >> + ix86_print_operand (file, x, 0); >> return; > > This bit is because of ... > >> +(define_insn "*tablejump_1_x32" >> + [(set (pc) (zero_extend:DI >> + (match_operand:SI 0 "register_operand" "r"))) >> + (use (label_ref (match_operand 1 "" "")))] >> + "TARGET_X32" >> + "jmp\t%A0" >> + [(set_attr "type" "ibr") >> + (set_attr "length_immediate" "0")]) > > ... this bit, am I right? > > Well, clearly the representation here is incorrect, or > you wouldn't have needed the hack in print_operand. > > The jmp instruction does not zero_extend, does it? It > really looks at the entire 64-bit register. Now, while > generally we've just loaded the register from memory, > and while generally it ought to be zero_extended in the > register because of that, this is still an incorrect > representation. > > You should not require a different tablejump_1. The > existing Pmode one is sufficient. > > What needs changing is the tablejump expander. > Something like > > if (TARGET_X32) > operands[0] = convert_memory_address (Pmode, operands[0]); > if (flag_pic) > { > ... > } > > > r~ >
I am testing this patch. OK for trunk if it works? Thanks. -- H.J. ---- 2011-07-20 H.J. Lu <hongjiu...@intel.com> Uros Bizjak <ubiz...@gmail.com> Richard Henderson <r...@redhat.com> * config/i386/constraints.md (w): New. * config/i386/i386.c (ix86_output_addr_vec_elt): Check TARGET_LP64 instead of TARGET_64BIT for ASM_QUAD. * config/i386/i386.h (CASE_VECTOR_MODE): Check TARGET_LP64 instead of TARGET_64BIT. * config/i386/i386.md (indirect_jump): Replace nonimmediate_operand with indirect_branch_operand. (*indirect_jump): Likewise. Replace constraint "m" with "w". (tablejump): Replace nonimmediate_operand with indirect_branch_operand. Convert operand 0 to Pmode for x32. (*tablejump_1): Replace nonimmediate_operand with indirect_branch_operand. Replace constraint "m" with "w". (*call_vzeroupper): Replace constraint "m" with "w". (*call): Likewise. (*call_rex64_ms_sysv_vzeroupper): Likewise. (*call_rex64_ms_sysv): Likewise. (*call_value_vzeroupper): Likewise. (*call_value): Likewise. (*call_value_rex64_ms_sysv_vzeroupper): Likewise. (*call_value_rex64_ms_sysv): Likewise. (set_got_offset_rex64): Check TARGET_LP64 instead of TARGET_64BIT. * config/i386/predicates.md (indirect_branch_operand): New. (call_insn_operand): Support x32.
2011-07-20 H.J. Lu <hongjiu...@intel.com> Uros Bizjak <ubiz...@gmail.com> Richard Henderson <r...@redhat.com> * config/i386/constraints.md (w): New. * config/i386/i386.c (ix86_output_addr_vec_elt): Check TARGET_LP64 instead of TARGET_64BIT for ASM_QUAD. * config/i386/i386.h (CASE_VECTOR_MODE): Check TARGET_LP64 instead of TARGET_64BIT. * config/i386/i386.md (indirect_jump): Replace nonimmediate_operand with indirect_branch_operand. (*indirect_jump): Likewise. Replace constraint "m" with "w". (tablejump): Replace nonimmediate_operand with indirect_branch_operand. Convert operand 0 to Pmode for x32. (*tablejump_1): Replace nonimmediate_operand with indirect_branch_operand. Replace constraint "m" with "w". (*call_vzeroupper): Replace constraint "m" with "w". (*call): Likewise. (*call_rex64_ms_sysv_vzeroupper): Likewise. (*call_rex64_ms_sysv): Likewise. (*call_value_vzeroupper): Likewise. (*call_value): Likewise. (*call_value_rex64_ms_sysv_vzeroupper): Likewise. (*call_value_rex64_ms_sysv): Likewise. (set_got_offset_rex64): Check TARGET_LP64 instead of TARGET_64BIT. * config/i386/predicates.md (indirect_branch_operand): New. (call_insn_operand): Support x32. diff --git a/gcc/config/i386/constraints.md b/gcc/config/i386/constraints.md index 099c2e1..0b53c76 100644 --- a/gcc/config/i386/constraints.md +++ b/gcc/config/i386/constraints.md @@ -19,7 +19,7 @@ ;;; Unused letters: ;;; B H T W -;;; h jk vw +;;; h jk v ;; Integer register constraints. ;; It is not necessary to define 'r' here. @@ -127,6 +127,11 @@ "@internal Constant call address operand." (match_operand 0 "constant_call_address_operand")) +(define_constraint "w" + "@internal Call memory operand." + (and (match_test "!TARGET_X32") + (match_operand 0 "memory_operand"))) + ;; Integer constant constraints. (define_constraint "I" "Integer constant in the range 0 @dots{} 31, for 32-bit shifts." diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c index 814250f..931b066 100644 --- a/gcc/config/i386/i386.c +++ b/gcc/config/i386/i386.c @@ -14852,7 +14908,7 @@ ix86_output_addr_vec_elt (FILE *file, int value) const char *directive = ASM_LONG; #ifdef ASM_QUAD - if (TARGET_64BIT) + if (TARGET_LP64) directive = ASM_QUAD; #else gcc_assert (!TARGET_64BIT); diff --git a/gcc/config/i386/i386.h b/gcc/config/i386/i386.h index 47c1388..20c9a8f 100644 --- a/gcc/config/i386/i386.h +++ b/gcc/config/i386/i386.h @@ -1674,7 +1674,7 @@ typedef struct ix86_args { /* Specify the machine mode that this machine uses for the index in the tablejump instruction. */ #define CASE_VECTOR_MODE \ - (!TARGET_64BIT || (flag_pic && ix86_cmodel != CM_LARGE_PIC) ? SImode : DImode) + (!TARGET_LP64 || (flag_pic && ix86_cmodel != CM_LARGE_PIC) ? SImode : DImode) /* Define this as 1 if `char' should by default be signed; else as 0. */ #define DEFAULT_SIGNED_CHAR 1 diff --git a/gcc/config/i386/i386.md b/gcc/config/i386/i386.md index cf0fdf4..6e1dbb8 100644 --- a/gcc/config/i386/i386.md +++ b/gcc/config/i386/i386.md @@ -10963,20 +10982,23 @@ (set_attr "modrm" "0")]) (define_expand "indirect_jump" - [(set (pc) (match_operand 0 "nonimmediate_operand" ""))]) + [(set (pc) (match_operand 0 "indirect_branch_operand" ""))]) (define_insn "*indirect_jump" - [(set (pc) (match_operand:P 0 "nonimmediate_operand" "rm"))] + [(set (pc) (match_operand:P 0 "indirect_branch_operand" "rw"))] "" "jmp\t%A0" [(set_attr "type" "ibr") (set_attr "length_immediate" "0")]) (define_expand "tablejump" - [(parallel [(set (pc) (match_operand 0 "nonimmediate_operand" "")) + [(parallel [(set (pc) (match_operand 0 "indirect_branch_operand" "")) (use (label_ref (match_operand 1 "" "")))])] "" { + if (TARGET_X32) + operands[0] = convert_memory_address (Pmode, operands[0]); + /* In PIC mode, the table entries are stored GOT (32-bit) or PC (64-bit) relative. Convert the relative address to an absolute address. */ if (flag_pic) @@ -11011,7 +11033,7 @@ }) (define_insn "*tablejump_1" - [(set (pc) (match_operand:P 0 "nonimmediate_operand" "rm")) + [(set (pc) (match_operand:P 0 "indirect_branch_operand" "rw")) (use (label_ref (match_operand 1 "" "")))] "" "jmp\t%A0" @@ -11099,7 +11126,7 @@ }) (define_insn_and_split "*call_vzeroupper" - [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zm")) + [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zw")) (match_operand 1 "" "")) (unspec [(match_operand 2 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] @@ -11111,7 +11138,7 @@ [(set_attr "type" "call")]) (define_insn "*call" - [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zm")) + [(call (mem:QI (match_operand:P 0 "call_insn_operand" "<c>zw")) (match_operand 1 "" ""))] "!SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[0]);" @@ -11119,7 +11146,7 @@ (define_insn_and_split "*call_rex64_ms_sysv_vzeroupper" [(parallel - [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzm")) + [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw")) (match_operand 1 "" "")) (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) (clobber (reg:TI XMM6_REG)) @@ -11144,7 +11171,7 @@ [(set_attr "type" "call")]) (define_insn "*call_rex64_ms_sysv" - [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzm")) + [(call (mem:QI (match_operand:DI 0 "call_insn_operand" "rzw")) (match_operand 1 "" "")) (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) (clobber (reg:TI XMM6_REG)) @@ -11275,7 +11302,7 @@ (define_insn_and_split "*call_value_vzeroupper" [(set (match_operand 0 "" "") - (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zm")) + (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zw")) (match_operand 2 "" ""))) (unspec [(match_operand 3 "const_int_operand" "")] UNSPEC_CALL_NEEDS_VZEROUPPER)] @@ -11288,7 +11315,7 @@ (define_insn "*call_value" [(set (match_operand 0 "" "") - (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zm")) + (call (mem:QI (match_operand:P 1 "call_insn_operand" "<c>zw")) (match_operand 2 "" "")))] "!SIBLING_CALL_P (insn)" "* return ix86_output_call_insn (insn, operands[1]);" @@ -11318,7 +11345,7 @@ (define_insn_and_split "*call_value_rex64_ms_sysv_vzeroupper" [(parallel [(set (match_operand 0 "" "") - (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzm")) + (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw")) (match_operand 2 "" ""))) (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) (clobber (reg:TI XMM6_REG)) @@ -11344,7 +11371,7 @@ (define_insn "*call_value_rex64_ms_sysv" [(set (match_operand 0 "" "") - (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzm")) + (call (mem:QI (match_operand:DI 1 "call_insn_operand" "rzw")) (match_operand 2 "" ""))) (unspec [(const_int 0)] UNSPEC_MS_TO_SYSV_CALL) (clobber (reg:TI XMM6_REG)) @@ -11666,7 +11693,7 @@ (unspec:DI [(label_ref (match_operand 1 "" ""))] UNSPEC_SET_GOT_OFFSET))] - "TARGET_64BIT" + "TARGET_LP64" "movabs{q}\t{$_GLOBAL_OFFSET_TABLE_-%l1, %0|%0, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_-%l1}" [(set_attr "type" "imov") (set_attr "length_immediate" "0") diff --git a/gcc/config/i386/predicates.md b/gcc/config/i386/predicates.md index 2c75147..8b87753 100644 --- a/gcc/config/i386/predicates.md +++ b/gcc/config/i386/predicates.md @@ -563,11 +569,18 @@ (ior (match_operand 0 "register_no_elim_operand") (match_operand 0 "immediate_operand"))) +;; Test for a valid operand for indirect branch. +(define_predicate "indirect_branch_operand" + (if_then_else (match_test "TARGET_X32") + (match_operand 0 "register_operand") + (match_operand 0 "nonimmediate_operand"))) + ;; Test for a valid operand for a call instruction. (define_predicate "call_insn_operand" (ior (match_operand 0 "constant_call_address_operand") (match_operand 0 "call_register_no_elim_operand") - (match_operand 0 "memory_operand"))) + (and (match_test "!TARGET_X32") + (match_operand 0 "memory_operand")))) ;; Similarly, but for tail calls, in which we cannot allow memory references. (define_predicate "sibcall_insn_operand"