The patch refers to the submission of RISCV 7bbce9b50302959286381d9177818642bceaf301.
gcc/ChangeLog: * config/loongarch/loongarch.cc (loongarch_extend_comparands): In unsigned QImode test, check for sign extended subreg and/or constant operands, and do a sign extend in that case. * config/loongarch/loongarch.md (TARGET_64BIT): Define template cbranchqi4. gcc/testsuite/ChangeLog: * gcc.target/loongarch/switch-qi.c: New test. --- gcc/config/loongarch/loongarch.cc | 14 ++++++++++++-- gcc/config/loongarch/loongarch.md | 8 ++++++-- gcc/testsuite/gcc.target/loongarch/switch-qi.c | 16 ++++++++++++++++ 3 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 gcc/testsuite/gcc.target/loongarch/switch-qi.c diff --git a/gcc/config/loongarch/loongarch.cc b/gcc/config/loongarch/loongarch.cc index c72229cad87..7e300c826cf 100644 --- a/gcc/config/loongarch/loongarch.cc +++ b/gcc/config/loongarch/loongarch.cc @@ -4228,8 +4228,18 @@ loongarch_extend_comparands (rtx_code code, rtx *op0, rtx *op1) /* Comparisons consider all XLEN bits, so extend sub-XLEN values. */ if (GET_MODE_SIZE (word_mode) > GET_MODE_SIZE (GET_MODE (*op0))) { - /* TODO: checkout It is more profitable to zero-extend QImode values. */ - if (unsigned_condition (code) == code && GET_MODE (*op0) == QImode) + /* It is more profitable to zero-extend QImode values. But not if the + first operand has already been sign-extended, and the second one is + is a constant or has already been sign-extended also. */ + if (unsigned_condition (code) == code + && (GET_MODE (*op0) == QImode + && ! (GET_CODE (*op0) == SUBREG + && SUBREG_PROMOTED_VAR_P (*op0) + && SUBREG_PROMOTED_SIGNED_P (*op0) + && (CONST_INT_P (*op1) + || (GET_CODE (*op1) == SUBREG + && SUBREG_PROMOTED_VAR_P (*op1) + && SUBREG_PROMOTED_SIGNED_P (*op1)))))) { *op0 = gen_rtx_ZERO_EXTEND (word_mode, *op0); if (CONST_INT_P (*op1)) diff --git a/gcc/config/loongarch/loongarch.md b/gcc/config/loongarch/loongarch.md index b37e070660f..1bb4e461b38 100644 --- a/gcc/config/loongarch/loongarch.md +++ b/gcc/config/loongarch/loongarch.md @@ -2733,11 +2733,15 @@ (define_insn "*branch_equality<mode>_inverted" [(set_attr "type" "branch")]) +;; Branches operate on XLEN-sized quantities, but for LoongArch64 we accept +;; QImode values so we can force zero-extension. +(define_mode_iterator BR [(QI "TARGET_64BIT") SI (DI "TARGET_64BIT")]) + (define_expand "cbranch<mode>4" [(set (pc) (if_then_else (match_operator 0 "comparison_operator" - [(match_operand:GPR 1 "register_operand") - (match_operand:GPR 2 "nonmemory_operand")]) + [(match_operand:BR 1 "register_operand") + (match_operand:BR 2 "nonmemory_operand")]) (label_ref (match_operand 3 "")) (pc)))] "" diff --git a/gcc/testsuite/gcc.target/loongarch/switch-qi.c b/gcc/testsuite/gcc.target/loongarch/switch-qi.c new file mode 100644 index 00000000000..dd192fd497f --- /dev/null +++ b/gcc/testsuite/gcc.target/loongarch/switch-qi.c @@ -0,0 +1,16 @@ +/* { dg-do compile } */ +/* { dg-options "-march=loongarch64 -mabi=lp64d" } */ +/* { dg-final { scan-assembler-not "bstrpick" } } */ + +/* Test for loongarch_extend_comparands patch. */ +extern void asdf (int); +void +foo (signed char x) { + switch (x) { + case 0: asdf (10); break; + case 1: asdf (11); break; + case 2: asdf (12); break; + case 3: asdf (13); break; + case 4: asdf (14); break; + } +} -- 2.31.1