Signed-off-by: WANG Xuerui <g...@xen0n.name> --- tcg/loongarch/tcg-target-con-set.h | 1 + tcg/loongarch/tcg-target.c.inc | 91 ++++++++++++++++++++++++++++++ 2 files changed, 92 insertions(+)
diff --git a/tcg/loongarch/tcg-target-con-set.h b/tcg/loongarch/tcg-target-con-set.h index 417c97549a..8630d1ee6e 100644 --- a/tcg/loongarch/tcg-target-con-set.h +++ b/tcg/loongarch/tcg-target-con-set.h @@ -17,6 +17,7 @@ C_O0_I1(r) C_O1_I1(r, r) C_O1_I2(r, r, r) +C_O1_I2(r, r, ri) C_O1_I2(r, r, rU) C_O1_I2(r, r, rZ) C_O1_I2(r, 0, rZ) diff --git a/tcg/loongarch/tcg-target.c.inc b/tcg/loongarch/tcg-target.c.inc index e817964a7e..acbd0e65ef 100644 --- a/tcg/loongarch/tcg-target.c.inc +++ b/tcg/loongarch/tcg-target.c.inc @@ -513,6 +513,85 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, tcg_out_clzctz(s, OPC_CTZ_D, a0, a1, a2); break; + case INDEX_op_shl_i32: + if (c2) { + tcg_out_opc_slli_w(s, a0, a1, a2 & 0x1f); + } else { + tcg_out_opc_sll_w(s, a0, a1, a2); + } + break; + case INDEX_op_shl_i64: + if (c2) { + tcg_out_opc_slli_d(s, a0, a1, a2 & 0x3f); + } else { + tcg_out_opc_sll_d(s, a0, a1, a2); + } + break; + + case INDEX_op_shr_i32: + if (c2) { + tcg_out_opc_srli_w(s, a0, a1, a2 & 0x1f); + } else { + tcg_out_opc_srl_w(s, a0, a1, a2); + } + break; + case INDEX_op_shr_i64: + if (c2) { + tcg_out_opc_srli_d(s, a0, a1, a2 & 0x3f); + } else { + tcg_out_opc_srl_d(s, a0, a1, a2); + } + break; + + case INDEX_op_sar_i32: + if (c2) { + tcg_out_opc_srai_w(s, a0, a1, a2 & 0x1f); + } else { + tcg_out_opc_sra_w(s, a0, a1, a2); + } + break; + case INDEX_op_sar_i64: + if (c2) { + tcg_out_opc_srai_d(s, a0, a1, a2 & 0x3f); + } else { + tcg_out_opc_sra_d(s, a0, a1, a2); + } + break; + + case INDEX_op_rotl_i32: + /* transform into equivalent rotr_i32 */ + if (c2) { + a2 = 32 - a2; + } else { + tcg_out_opc_sub_w(s, a2, TCG_REG_ZERO, a2); + tcg_out_opc_addi_w(s, a2, a2, 32); + } + /* fallthrough */ + case INDEX_op_rotr_i32: + if (c2) { + tcg_out_opc_rotri_w(s, a0, a1, a2 & 0x1f); + } else { + tcg_out_opc_rotr_w(s, a0, a1, a2); + } + break; + + case INDEX_op_rotl_i64: + /* transform into equivalent rotr_i64 */ + if (c2) { + a2 = 64 - a2; + } else { + tcg_out_opc_sub_w(s, a2, TCG_REG_ZERO, a2); + tcg_out_opc_addi_w(s, a2, a2, 64); + } + /* fallthrough */ + case INDEX_op_rotr_i64: + if (c2) { + tcg_out_opc_rotri_d(s, a0, a1, a2 & 0x3f); + } else { + tcg_out_opc_rotr_d(s, a0, a1, a2); + } + break; + case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ case INDEX_op_mov_i64: default: @@ -557,6 +636,18 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) /* LoongArch insns for these ops don't have reg-imm forms */ return C_O1_I2(r, r, r); + case INDEX_op_shl_i32: + case INDEX_op_shl_i64: + case INDEX_op_shr_i32: + case INDEX_op_shr_i64: + case INDEX_op_sar_i32: + case INDEX_op_sar_i64: + case INDEX_op_rotl_i32: + case INDEX_op_rotl_i64: + case INDEX_op_rotr_i32: + case INDEX_op_rotr_i64: + return C_O1_I2(r, r, ri); + case INDEX_op_and_i32: case INDEX_op_or_i32: case INDEX_op_xor_i32: -- 2.33.0