This patch removed some workarounds that were required for old versions
of the LLVM assembler. The minimum supported version is now 13.0.1 so
the workarounds are no longer needed.
Andrew
amdgcn: remove obsolete assembler workarounds
This nonsense is no longer required, now that the minimum supported
assembler version is LLVM 13.0.1.
gcc/ChangeLog:
* config/gcn/gcn.md (*movbi): Remove assembler bug workarounds.
(jump): Likewise.
(movdi_symbol_save_scc): Likewise.
diff --git a/gcc/config/gcn/gcn.md b/gcc/config/gcn/gcn.md
index 53e846e15d1..033c1708e88 100644
--- a/gcc/config/gcn/gcn.md
+++ b/gcc/config/gcn/gcn.md
@@ -481,14 +481,7 @@ (define_insn "*movbi"
we emit bytes directly as a workaround. */
switch (which_alternative) {
case 0:
- if (REG_P (operands[1]) && REGNO (operands[1]) == SCC_REG)
- return "; s_mov_b32\t%0,%1 is not supported by the assembler.\;"
- ".byte\t0xfd\;"
- ".byte\t0x0\;"
- ".byte\t0x80|%R0\;"
- ".byte\t0xbe";
- else
- return "s_mov_b32\t%0, %1";
+ return "s_mov_b32\t%0, %1";
case 1:
if (REG_P (operands[1]) && REGNO (operands[1]) == SCC_REG)
return "; v_mov_b32\t%0, %1\;"
@@ -505,16 +498,8 @@ (define_insn "*movbi"
case 4:
return "v_cmp_ne_u32\tvcc, 0, %1";
case 5:
- if (REGNO (operands[1]) == SCC_REG)
- return "; s_mov_b32\t%0, %1 is not supported by the assembler.\;"
- ".byte\t0xfd\;"
- ".byte\t0x0\;"
- ".byte\t0xea\;"
- ".byte\t0xbe\;"
- "s_mov_b32\tvcc_hi, 0";
- else
- return "s_mov_b32\tvcc_lo, %1\;"
- "s_mov_b32\tvcc_hi, 0";
+ return "s_mov_b32\tvcc_lo, %1\;"
+ "s_mov_b32\tvcc_hi, 0";
case 6:
return "s_load_dword\t%0, %A1\;s_waitcnt\tlgkmcnt(0)";
case 7:
@@ -739,8 +724,7 @@ (define_insn "jump"
return "s_branch\t%0";
else
/* !!! This sequence clobbers EXEC_SAVE_REG and CC_SAVE_REG. */
- return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
- ".long\t0xbe9600fd\;"
+ return "s_mov_b32\ts22, scc\;"
"s_getpc_b64\ts[20:21]\;"
"s_add_u32\ts20, s20, %0@rel32@lo+4\;"
"s_addc_u32\ts21, s21, %0@rel32@hi+4\;"
@@ -801,11 +785,7 @@ (define_insn "cjump"
}
else
return "s_cbranch%c1\t.Lskip%=\;"
- "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
- ".byte\t0xfd\;"
- ".byte\t0x0\;"
- ".byte\t0x80|22\;"
- ".byte\t0xbe\;"
+ "s_mov_b32\ts22, scc\;"
"s_getpc_b64\ts[20:21]\;"
"s_add_u32\ts20, s20, %0@rel32@lo+4\;"
"s_addc_u32\ts21, s21, %0@rel32@hi+4\;"
@@ -890,8 +870,7 @@ (define_insn "movdi_symbol_save_scc"
if (SYMBOL_REF_P (operands[1])
&& SYMBOL_REF_WEAK (operands[1]))
- return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
- ".long\t0xbe9600fd\;"
+ return "s_mov_b32\ts22, scc\;"
"s_getpc_b64\t%0\;"
"s_add_u32\t%L0, %L0, %1@gotpcrel32@lo+4\;"
"s_addc_u32\t%H0, %H0, %1@gotpcrel32@hi+4\;"
@@ -899,8 +878,7 @@ (define_insn "movdi_symbol_save_scc"
"s_cmpk_lg_u32\ts22, 0\;"
"s_waitcnt\tlgkmcnt(0)";
- return "; s_mov_b32\ts22, scc is not supported by the assembler.\;"
- ".long\t0xbe9600fd\;"
+ return "s_mov_b32\ts22, scc\;"
"s_getpc_b64\t%0\;"
"s_add_u32\t%L0, %L0, %1@rel32@lo+4\;"
"s_addc_u32\t%H0, %H0, %1@rel32@hi+4\;"