Hi, Now the rotates and shifts.
Tested in series on aarch64-none-elf. OK for stage 1? Thanks, James --- 2014-03-27 James Greenhalgh <james.greenha...@arm.com> Philipp Tomsich <philipp.toms...@theobroma-systems.com> * config/aarch64/aarch64.c (aarch64_rtx_costs): Improve costs for rotates and shifts.
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index 3c3dd6d..a8de1e3 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -5286,21 +5286,59 @@ cost_minus: *cost += extra_cost->alu.extend; return false; + case ASHIFT: + op0 = XEXP (x, 0); + op1 = XEXP (x, 1); + + if (CONST_INT_P (op1)) + { + /* LSL (immediate), UBMF, UBFIZ and friends. These are all + aliases. */ + if (speed) + *cost += extra_cost->alu.shift; + + /* We can incorporate zero/sign extend for free. */ + if (GET_CODE (op0) == ZERO_EXTEND + || GET_CODE (op0) == SIGN_EXTEND) + op0 = XEXP (op0, 0); + + *cost += rtx_cost (op0, ASHIFT, 0, speed); + return true; + } + else + { + /* LSLV. */ + if (speed) + *cost += extra_cost->alu.shift_reg; + + return false; /* All arguments need to be in registers. */ + } + case ROTATE: - if (!CONST_INT_P (XEXP (x, 1))) - *cost += COSTS_N_INSNS (2); - /* Fall through. */ case ROTATERT: case LSHIFTRT: - case ASHIFT: case ASHIFTRT: + op0 = XEXP (x, 0); + op1 = XEXP (x, 1); - /* Shifting by a register often takes an extra cycle. */ - if (speed && !CONST_INT_P (XEXP (x, 1))) - *cost += extra_cost->alu.arith_shift_reg; + if (CONST_INT_P (op1)) + { + /* ASR (immediate) and friends. */ + if (speed) + *cost += extra_cost->alu.shift; - *cost += rtx_cost (XEXP (x, 0), ASHIFT, 0, speed); - return true; + *cost += rtx_cost (op0, (enum rtx_code) code, 0, speed); + return true; + } + else + { + + /* ASR (register) and friends. */ + if (speed) + *cost += extra_cost->alu.shift_reg; + + return false; /* All arguments need to be in registers. */ + } case HIGH: if (!CONSTANT_P (XEXP (x, 0)))