PPC __arch_swab32 and __arch_swab16 generates non optimal code. They do not schedule very well, need to copy its input register and swab16 needs an extra insn to clear its upper bits. Fix this with better inline ASM.
Signed-off-by: Joakim Tjernlund <joakim.tjernl...@transmode.se> --- arch/powerpc/include/asm/swab.h | 28 ++++++++++++++-------------- 1 files changed, 14 insertions(+), 14 deletions(-) diff --git a/arch/powerpc/include/asm/swab.h b/arch/powerpc/include/asm/swab.h index c581e3e..3b9a200 100644 --- a/arch/powerpc/include/asm/swab.h +++ b/arch/powerpc/include/asm/swab.h @@ -61,25 +61,25 @@ static inline void __arch_swab32s(__u32 *addr) static inline __attribute_const__ __u16 __arch_swab16(__u16 value) { - __u16 result; - - __asm__("rlwimi %0,%1,8,16,23" - : "=r" (result) - : "r" (value), "0" (value >> 8)); - return result; + __asm__("rlwimi %0,%0,16,0x00ff0000\n\t" + "rlwinm %0,%0,24,0x0000ffff" + : "+r"(value)); + return value; } #define __arch_swab16 __arch_swab16 static inline __attribute_const__ __u32 __arch_swab32(__u32 value) { - __u32 result; - - __asm__("rlwimi %0,%1,24,16,23\n\t" - "rlwimi %0,%1,8,8,15\n\t" - "rlwimi %0,%1,24,0,7" - : "=r" (result) - : "r" (value), "0" (value >> 24)); - return result; + __u32 tmp; + + __asm__("rlwimi %0,%1,24,0xffffffff" + : "=r" (value) : "r" (value)); + tmp = value; + __asm__("rlwimi %0,%1,16,0x00ff0000" + : "+r" (value) : "r" (tmp)); + __asm__("rlwimi %0,%1,16,0x000000ff" + : "+r" (value) : "r" (tmp)); + return value; } #define __arch_swab32 __arch_swab32 -- 1.7.3.4 _______________________________________________ Linuxppc-dev mailing list Linuxppc-dev@lists.ozlabs.org https://lists.ozlabs.org/listinfo/linuxppc-dev