Author: andrew
Date: Sat Mar 16 02:48:49 2013
New Revision: 248361
URL: http://svnweb.freebsd.org/changeset/base/248361

Log:
  Add an END macro to ARM. This is mostly used to tell gas where the bounds
  of the functions are when creating the EABI unwind tables.

Modified:
  head/sys/arm/arm/bcopy_page.S
  head/sys/arm/arm/bcopyinout.S
  head/sys/arm/arm/bcopyinout_xscale.S
  head/sys/arm/arm/blockio.S
  head/sys/arm/arm/bus_space_asm_generic.S
  head/sys/arm/arm/copystr.S
  head/sys/arm/arm/cpufunc_asm.S
  head/sys/arm/arm/cpufunc_asm_arm10.S
  head/sys/arm/arm/cpufunc_asm_arm11.S
  head/sys/arm/arm/cpufunc_asm_arm11x6.S
  head/sys/arm/arm/cpufunc_asm_arm7tdmi.S
  head/sys/arm/arm/cpufunc_asm_arm8.S
  head/sys/arm/arm/cpufunc_asm_arm9.S
  head/sys/arm/arm/cpufunc_asm_armv4.S
  head/sys/arm/arm/cpufunc_asm_armv5.S
  head/sys/arm/arm/cpufunc_asm_armv5_ec.S
  head/sys/arm/arm/cpufunc_asm_armv6.S
  head/sys/arm/arm/cpufunc_asm_armv7.S
  head/sys/arm/arm/cpufunc_asm_fa526.S
  head/sys/arm/arm/cpufunc_asm_ixp12x0.S
  head/sys/arm/arm/cpufunc_asm_pj4b.S
  head/sys/arm/arm/cpufunc_asm_sa1.S
  head/sys/arm/arm/cpufunc_asm_sa11x0.S
  head/sys/arm/arm/cpufunc_asm_sheeva.S
  head/sys/arm/arm/cpufunc_asm_xscale.S
  head/sys/arm/arm/cpufunc_asm_xscale_c3.S
  head/sys/arm/arm/exception.S
  head/sys/arm/arm/fiq_subr.S
  head/sys/arm/arm/fusu.S
  head/sys/arm/arm/in_cksum_arm.S
  head/sys/arm/arm/irq_dispatch.S
  head/sys/arm/arm/locore.S
  head/sys/arm/arm/setcpsr.S
  head/sys/arm/arm/support.S
  head/sys/arm/arm/swtch.S
  head/sys/arm/include/asm.h

Modified: head/sys/arm/arm/bcopy_page.S
==============================================================================
--- head/sys/arm/arm/bcopy_page.S       Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/bcopy_page.S       Sat Mar 16 02:48:49 2013        
(r248361)
@@ -117,6 +117,7 @@ ENTRY(bcopy_page)
        bne     1b
 
        RESTORE_REGS            /* ...and return. */
+END(bcopy_page)
 
 /*
  * bzero_page(dest)
@@ -178,6 +179,7 @@ ENTRY(bzero_page)
        bne     1b
 
        ldmfd   sp!, {r4-r8, pc}
+END(bzero_page)
 
 #else  /* _ARM_ARCH_5E */
 
@@ -246,6 +248,7 @@ ENTRY(bcopy_page)
        bgt     1b
        ldmfd   sp!, {r4, r5}
        RET
+END(bcopy_page)
 
 /*
  * armv5e version of bzero_page
@@ -273,4 +276,5 @@ ENTRY(bzero_page)
        subs    r1, r1, #128
        bne     1b
        RET
+END(bzero_page)
 #endif /* _ARM_ARCH_5E */

Modified: head/sys/arm/arm/bcopyinout.S
==============================================================================
--- head/sys/arm/arm/bcopyinout.S       Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/bcopyinout.S       Sat Mar 16 02:48:49 2013        
(r248361)
@@ -312,6 +312,7 @@ ENTRY(copyin)
        RESTORE_REGS
 
        RET
+END(copyin)
 
 /*
  * r0 = kernel space address
@@ -538,6 +539,7 @@ ENTRY(copyout)
        RESTORE_REGS
 
        RET
+END(copyout)
 #endif
 
 /*
@@ -564,6 +566,7 @@ ENTRY(badaddr_read_1)
        mov     r0, #0          /* No fault */
 1:     str     ip, [r2, #PCB_ONFAULT]
        RET
+END(badaddr_read_1)
 
 /*
  * int badaddr_read_2(const uint16_t *src, uint16_t *dest)
@@ -589,6 +592,7 @@ ENTRY(badaddr_read_2)
        mov     r0, #0          /* No fault */
 1:     str     ip, [r2, #PCB_ONFAULT]
        RET
+END(badaddr_read_2)
 
 /*
  * int badaddr_read_4(const uint32_t *src, uint32_t *dest)
@@ -614,4 +618,5 @@ ENTRY(badaddr_read_4)
        mov     r0, #0          /* No fault */
 1:     str     ip, [r2, #PCB_ONFAULT]
        RET
+END(badaddr_read_4)
 

Modified: head/sys/arm/arm/bcopyinout_xscale.S
==============================================================================
--- head/sys/arm/arm/bcopyinout_xscale.S        Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/bcopyinout_xscale.S        Sat Mar 16 02:48:49 2013        
(r248361)
@@ -492,7 +492,7 @@ ENTRY(copyin)
        ldrbt   ip, [r0]
        strb    ip, [r1]
        RET
-
+END(copyin)
 
 /*
  * r0 = kernel space address
@@ -935,3 +935,5 @@ ENTRY(copyout)
        ldrb    ip, [r0]
        strbt   ip, [r1]
        RET
+END(copyout)
+

Modified: head/sys/arm/arm/blockio.S
==============================================================================
--- head/sys/arm/arm/blockio.S  Sat Mar 16 01:16:57 2013        (r248360)
+++ head/sys/arm/arm/blockio.S  Sat Mar 16 02:48:49 2013        (r248361)
@@ -101,6 +101,7 @@ ENTRY(read_multi_1)
        ldrgtb  r3, [r0]
        strgtb  r3, [r1], #1
        ldmdb   fp, {fp, sp, pc}
+END(read_multi_1)
 
 /*
  * Write bytes to an I/O address from a block of memory
@@ -152,6 +153,7 @@ ENTRY(write_multi_1)
        ldrgtb  r3, [r1], #1
        strgtb  r3, [r0]
        ldmdb   fp, {fp, sp, pc}
+END(write_multi_1)
 
 /*
  * Reads short ints (16 bits) from an I/O address into a block of memory
@@ -199,7 +201,7 @@ ENTRY(insw)
        bgt     .Lfastinswloop
 
        RET
-
+END(insw)
 
 /*
  * Writes short ints (16 bits) from a block of memory to an I/O address
@@ -260,6 +262,7 @@ ENTRY(outsw)
        bgt     .Lfastoutswloop
 
        RET
+END(outsw)
 
 /*
  * reads short ints (16 bits) from an I/O address into a block of memory
@@ -318,7 +321,7 @@ ENTRY(insw16)
        bgt     .Linsw16loop
 
        ldmfd   sp!, {r4,r5,pc}         /* Restore regs and go home */
-
+END(insw16)
 
 /*
  * Writes short ints (16 bits) from a block of memory to an I/O address
@@ -385,6 +388,7 @@ ENTRY(outsw16)
        bgt     .Loutsw16loop
 
        ldmfd   sp!, {r4,r5,pc}         /* and go home */
+END(outsw16)
 
 /*
  * reads short ints (16 bits) from an I/O address into a block of memory
@@ -481,6 +485,7 @@ ENTRY(inswm8)
 
 .Linswm8_l1:
        ldmfd   sp!, {r4-r9,pc}         /* And go home */
+END(inswm8)
 
 /*
  * write short ints (16 bits) to an I/O address from a block of memory
@@ -585,3 +590,5 @@ ENTRY(outswm8)
 
 .Loutswm8_l1:
        ldmfd   sp!, {r4-r8,pc}         /* And go home */
+END(outswm8)
+

Modified: head/sys/arm/arm/bus_space_asm_generic.S
==============================================================================
--- head/sys/arm/arm/bus_space_asm_generic.S    Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/bus_space_asm_generic.S    Sat Mar 16 02:48:49 2013        
(r248361)
@@ -50,14 +50,17 @@ __FBSDID("$FreeBSD$");
 ENTRY(generic_bs_r_1)
        ldrb    r0, [r1, r2]
        RET
+END(generic_bs_r_1)
 
 ENTRY(generic_armv4_bs_r_2)
        ldrh    r0, [r1, r2]
        RET
+END(generic_armv4_bs_r_2)
 
 ENTRY(generic_bs_r_4)
        ldr     r0, [r1, r2]
        RET
+END(generic_bs_r_4)
 
 /*
  * write single
@@ -66,14 +69,17 @@ ENTRY(generic_bs_r_4)
 ENTRY(generic_bs_w_1)
        strb    r3, [r1, r2]
        RET
+END(generic_bs_w_1)
 
 ENTRY(generic_armv4_bs_w_2)
        strh    r3, [r1, r2]
        RET
+END(generic_armv4_bs_w_2)
 
 ENTRY(generic_bs_w_4)
        str     r3, [r1, r2]
        RET
+END(generic_bs_w_4)
 
 /*
  * read multiple
@@ -92,6 +98,7 @@ ENTRY(generic_bs_rm_1)
        bne     1b
 
        RET
+END(generic_bs_rm_1)
 
 ENTRY(generic_armv4_bs_rm_2)
        add     r0, r1, r2
@@ -106,6 +113,7 @@ ENTRY(generic_armv4_bs_rm_2)
        bne     1b
 
        RET
+END(generic_armv4_bs_rm_2)
 
 ENTRY(generic_bs_rm_4)
        add     r0, r1, r2
@@ -120,6 +128,7 @@ ENTRY(generic_bs_rm_4)
        bne     1b
 
        RET
+END(generic_bs_rm_4)
 
 /*
  * write multiple
@@ -138,6 +147,7 @@ ENTRY(generic_bs_wm_1)
        bne     1b
 
        RET
+END(generic_bs_wm_1)
 
 ENTRY(generic_armv4_bs_wm_2)
        add     r0, r1, r2
@@ -152,6 +162,7 @@ ENTRY(generic_armv4_bs_wm_2)
        bne     1b
 
        RET
+END(generic_armv4_bs_wm_2)
 
 ENTRY(generic_bs_wm_4)
        add     r0, r1, r2
@@ -166,6 +177,7 @@ ENTRY(generic_bs_wm_4)
        bne     1b
 
        RET
+END(generic_bs_wm_4)
 
 /*
  * read region
@@ -184,6 +196,7 @@ ENTRY(generic_bs_rr_1)
        bne     1b
 
        RET
+END(generic_bs_rr_1)
 
 ENTRY(generic_armv4_bs_rr_2)
        add     r0, r1, r2
@@ -198,6 +211,7 @@ ENTRY(generic_armv4_bs_rr_2)
        bne     1b
 
        RET
+END(generic_armv4_bs_rr_2)
 
 ENTRY(generic_bs_rr_4)
        add     r0, r1, r2
@@ -212,6 +226,7 @@ ENTRY(generic_bs_rr_4)
        bne     1b
 
        RET
+END(generic_bs_rr_4)
 
 /*
  * write region.
@@ -230,6 +245,7 @@ ENTRY(generic_bs_wr_1)
        bne     1b
 
        RET
+END(generic_bs_wr_1)
 
 ENTRY(generic_armv4_bs_wr_2)
        add     r0, r1, r2
@@ -244,6 +260,7 @@ ENTRY(generic_armv4_bs_wr_2)
        bne     1b
 
        RET
+END(generic_armv4_bs_wr_2)
 
 ENTRY(generic_bs_wr_4)
        add     r0, r1, r2
@@ -258,6 +275,7 @@ ENTRY(generic_bs_wr_4)
        bne     1b
 
        RET
+END(generic_bs_wr_4)
 
 /*
  * set region
@@ -275,6 +293,7 @@ ENTRY(generic_bs_sr_1)
        bne     1b
 
        RET
+END(generic_bs_sr_1)
 
 ENTRY(generic_armv4_bs_sr_2)
        add     r0, r1, r2
@@ -288,6 +307,7 @@ ENTRY(generic_armv4_bs_sr_2)
        bne     1b
 
        RET
+END(generic_armv4_bs_sr_2)
 
 ENTRY(generic_bs_sr_4)
        add     r0, r1, r2
@@ -301,6 +321,7 @@ ENTRY(generic_bs_sr_4)
        bne     1b
 
        RET
+END(generic_bs_sr_4)
 
 /*
  * copy region
@@ -335,3 +356,5 @@ ENTRY(generic_armv4_bs_c_2)
        bne     3b
 
        RET
+END(generic_armv4_bs_c_2)
+

Modified: head/sys/arm/arm/copystr.S
==============================================================================
--- head/sys/arm/arm/copystr.S  Sat Mar 16 01:16:57 2013        (r248360)
+++ head/sys/arm/arm/copystr.S  Sat Mar 16 02:48:49 2013        (r248361)
@@ -93,6 +93,7 @@ ENTRY(copystr)
 
        ldmfd   sp!, {r4-r5}                    /* stack is 8 byte aligned */
        RET
+END(copystr)
 
 #define SAVE_REGS      stmfd   sp!, {r4-r6}
 #define RESTORE_REGS   ldmfd   sp!, {r4-r6}
@@ -143,6 +144,7 @@ ENTRY(copyinstr)
 
        RESTORE_REGS
        RET
+END(copyinstr)
 
 /*
  * r0 - kernel space address
@@ -190,6 +192,7 @@ ENTRY(copyoutstr)
 
        RESTORE_REGS
        RET
+END(copyoutstr)
 
 /* A fault occurred during the copy */
 .Lcopystrfault:

Modified: head/sys/arm/arm/cpufunc_asm.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm.S      Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm.S      Sat Mar 16 02:48:49 2013        
(r248361)
@@ -50,6 +50,7 @@ __FBSDID("$FreeBSD$");
 
 ENTRY(cpufunc_nullop)
        RET
+END(cpufunc_nullop)
 
 /*
  * Generic functions to read the internal coprocessor registers
@@ -64,27 +65,32 @@ ENTRY(cpufunc_nullop)
 ENTRY(cpufunc_id)
        mrc     p15, 0, r0, c0, c0, 0
        RET
+END(cpufunc_id)
 
 ENTRY(cpufunc_cpuid)
        mrc     p15, 0, r0, c0, c0, 0
        RET
+END(cpufunc_cpuid)
 
 ENTRY(cpu_get_control)
        mrc     p15, 0, r0, c1, c0, 0
        RET
+END(cpu_get_control)
 
 ENTRY(cpu_read_cache_config)
        mrc     p15, 0, r0, c0, c0, 1
        RET
+END(cpu_read_cache_config)
 
 ENTRY(cpufunc_faultstatus)
        mrc     p15, 0, r0, c5, c0, 0
        RET
+END(cpufunc_faultstatus)
 
 ENTRY(cpufunc_faultaddress)
        mrc     p15, 0, r0, c6, c0, 0
        RET
-
+END(cpufunc_faultaddress)
 
 /*
  * Generic functions to write the internal coprocessor registers
@@ -101,11 +107,13 @@ ENTRY(cpufunc_faultaddress)
 ENTRY(cpufunc_control)
        mcr     p15, 0, r0, c1, c0, 0
        RET
+END(cpufunc_control)
 #endif
 
 ENTRY(cpufunc_domains)
        mcr     p15, 0, r0, c3, c0, 0
        RET
+END(cpufunc_domains)
 
 /*
  * Generic functions to read/modify/write the internal coprocessor registers
@@ -131,6 +139,8 @@ ENTRY(cpufunc_control)
 .Lglou:
        .asciz "plop %p\n"
        .align 0
+END(cpufunc_control)
+
 /*
  * other potentially useful software functions are:
  *  clean D cache entry and flush I cache entry
@@ -157,6 +167,7 @@ ENTRY(get_pc_str_offset)
        ldr     r0, [sp]
        sub     r0, r0, r1
        ldmdb   fp, {fp, sp, pc}
+END(get_pc_str_offset)
 
 /* Allocate and lock a cacheline for the specified address. */
 
@@ -180,3 +191,5 @@ ENTRY(arm_lock_cache_line)
        mcr     p15, 0, r1, c9, c2, 0 /* Disable data cache lock mode */
        CPWAIT()
        RET
+END(arm_lock_cache_line)
+

Modified: head/sys/arm/arm/cpufunc_asm_arm10.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm10.S        Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm10.S        Sat Mar 16 02:48:49 2013        
(r248361)
@@ -50,6 +50,7 @@ ENTRY(arm10_setttb)
 
        mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
        bx      lr
+END(arm10_setttb)
 
 /*
  * TLB functions
@@ -58,11 +59,12 @@ ENTRY(arm10_tlb_flushID_SE)
        mcr     p15, 0, r0, c8, c6, 1   /* flush D tlb single entry */
        mcr     p15, 0, r0, c8, c5, 1   /* flush I tlb single entry */
        bx      lr
+END(arm10_tlb_flushID_SE)
 
 ENTRY(arm10_tlb_flushI_SE)
        mcr     p15, 0, r0, c8, c5, 1   /* flush I tlb single entry */
        bx      lr
-       
+END(arm10_tlb_flushI_SE)
 
 /*
  * Cache operations.  For the entire cache we use the set/index
@@ -90,6 +92,7 @@ ENTRY_NP(arm10_icache_sync_range)
        bhi     .Larm10_sync_next
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_icache_sync_range)
 
 ENTRY_NP(arm10_icache_sync_all)
 .Larm10_icache_sync_all:
@@ -114,6 +117,7 @@ ENTRY_NP(arm10_icache_sync_all)
        bhs     .Lnext_set              /* Next set */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_icache_sync_all)
 
 .Larm10_line_size:
        .word   _C_LABEL(arm_pdcache_line_size)
@@ -134,6 +138,7 @@ ENTRY(arm10_dcache_wb_range)
        bhi     .Larm10_wb_next
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_dcache_wb_range)
        
 ENTRY(arm10_dcache_wbinv_range)
        ldr     ip, .Larm10_line_size
@@ -151,6 +156,7 @@ ENTRY(arm10_dcache_wbinv_range)
        bhi     .Larm10_wbinv_next
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_dcache_wbinv_range)
        
 /*
  * Note, we must not invalidate everything.  If the range is too big we
@@ -172,6 +178,7 @@ ENTRY(arm10_dcache_inv_range)
        bhi     .Larm10_inv_next
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_dcache_inv_range)
 
 ENTRY(arm10_idcache_wbinv_range)
        ldr     ip, .Larm10_line_size
@@ -190,6 +197,7 @@ ENTRY(arm10_idcache_wbinv_range)
        bhi     .Larm10_id_wbinv_next
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_idcache_wbinv_range)
 
 ENTRY_NP(arm10_idcache_wbinv_all)
 .Larm10_idcache_wbinv_all:
@@ -215,6 +223,8 @@ ENTRY(arm10_dcache_wbinv_all)
        bhs     .Lnext_set_inv          /* Next set */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        bx      lr
+END(arm10_idcache_wbinv_all)
+END(arm10_dcache_wbinv_all)
 
 .Larm10_cache_data:
        .word   _C_LABEL(arm10_dcache_sets_max)
@@ -242,6 +252,7 @@ ENTRY(arm10_context_switch)
        nop
        nop
        bx      lr
+END(arm10_context_switch)
 
        .bss
 

Modified: head/sys/arm/arm/cpufunc_asm_arm11.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm11.S        Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm11.S        Sat Mar 16 02:48:49 2013        
(r248361)
@@ -55,6 +55,7 @@ ENTRY(arm11_setttb)
        mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        RET
+END(arm11_setttb)
 
 /*
  * TLB functions
@@ -64,12 +65,13 @@ ENTRY(arm11_tlb_flushID_SE)
        mcr     p15, 0, r0, c8, c5, 1   /* flush I tlb single entry */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        RET
+END(arm11_tlb_flushID_SE)
 
 ENTRY(arm11_tlb_flushI_SE)
        mcr     p15, 0, r0, c8, c5, 1   /* flush I tlb single entry */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        RET
-       
+END(arm11_tlb_flushI_SE)
 
 /*
  * Context switch.
@@ -94,6 +96,7 @@ ENTRY(arm11_context_switch)
        nop
        nop
        RET
+END(arm11_context_switch)
 
 /*
  * TLB functions
@@ -102,21 +105,25 @@ ENTRY(arm11_tlb_flushID)
        mcr     p15, 0, r0, c8, c7, 0   /* flush I+D tlb */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        mov     pc, lr
+END(arm11_tlb_flushID)
 
 ENTRY(arm11_tlb_flushI)
        mcr     p15, 0, r0, c8, c5, 0   /* flush I tlb */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        mov     pc, lr
+END(arm11_tlb_flushI)
 
 ENTRY(arm11_tlb_flushD)
        mcr     p15, 0, r0, c8, c6, 0   /* flush D tlb */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        mov     pc, lr
+END(arm11_tlb_flushD)
 
 ENTRY(arm11_tlb_flushD_SE)
        mcr     p15, 0, r0, c8, c6, 1   /* flush D tlb single entry */
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        mov     pc, lr
+END(arm11_tlb_flushD_SE)
 
 /*
  * Other functions
@@ -124,8 +131,11 @@ ENTRY(arm11_tlb_flushD_SE)
 ENTRY(arm11_drain_writebuf)
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        mov     pc, lr
+END(arm11_drain_writebuf)
 
 ENTRY_NP(arm11_sleep)
        mov     r0, #0
        mcr     p15, 0, r0, c7, c0, 4   /* wait for interrupt */
        RET
+END(arm11_sleep)
+

Modified: head/sys/arm/arm/cpufunc_asm_arm11x6.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm11x6.S      Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm11x6.S      Sat Mar 16 02:48:49 2013        
(r248361)
@@ -124,24 +124,29 @@ ENTRY(arm11x6_setttb)
        mcr     p15, 0, r1, c8, c7, 0   /* invalidate I+D TLBs */
        mcr     p15, 0, r1, c7, c10, 4  /* drain write buffer */
        RET
+END(arm11x6_setttb)
 
 ENTRY_NP(arm11x6_idcache_wbinv_all)
        Flush_D_cache(r0)
        Invalidate_I_cache(r0, r1)
        RET
+END(arm11x6_idcache_wbinv_all)
 
 ENTRY_NP(arm11x6_dcache_wbinv_all)
        Flush_D_cache(r0)
        RET
+END(arm11x6_dcache_wbinv_all)
 
 ENTRY_NP(arm11x6_icache_sync_all)
        Flush_D_cache(r0)
        Invalidate_I_cache(r0, r1)
        RET
+END(arm11x6_icache_sync_all)
 
 ENTRY_NP(arm11x6_flush_prefetchbuf)
        mcr     p15, 0, r0, c7, c5, 4   /* Flush Prefetch Buffer */
        RET
+END(arm11x6_flush_prefetchbuf)
 
 ENTRY_NP(arm11x6_icache_sync_range)
        add     r1, r1, r0
@@ -168,6 +173,7 @@ ENTRY_NP(arm11x6_icache_sync_range)
        mcrr    p15, 0, r1, r0, c12     /* clean and invalidate D cache range 
*/ /* XXXNH */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(arm11x6_icache_sync_range)
 
 ENTRY_NP(arm11x6_idcache_wbinv_range)
        add     r1, r1, r0
@@ -194,6 +200,7 @@ ENTRY_NP(arm11x6_idcache_wbinv_range)
        mcrr    p15, 0, r1, r0, c14     /* clean and invalidate D cache range */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(arm11x6_idcache_wbinv_range)
 
 /*
  * Preload the cache before issuing the WFI by conditionally disabling the
@@ -216,3 +223,5 @@ ENTRY_NP(arm11x6_sleep)
        nop
        bne     1b
        RET
+END(arm11x6_sleep)
+

Modified: head/sys/arm/arm/cpufunc_asm_arm7tdmi.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm7tdmi.S     Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm7tdmi.S     Sat Mar 16 02:48:49 2013        
(r248361)
@@ -60,6 +60,7 @@ ENTRY(arm7tdmi_setttb)
        bl      _C_LABEL(arm7tdmi_cache_flushID)
 
        mov     pc, r2
+END(arm7tdmi_setttb)
 
 /*
  * TLB functions
@@ -68,10 +69,12 @@ ENTRY(arm7tdmi_tlb_flushID)
        mov     r0, #0
        mcr     p15, 0, r0, c8, c7, 0
        RET
+END(arm7tdmi_tlb_flushID)
 
 ENTRY(arm7tdmi_tlb_flushID_SE)
        mcr     p15, 0, r0, c8, c7, 1
        RET
+END(arm7tdmi_tlb_flushID_SE)
 
 /*
  * Cache functions
@@ -86,6 +89,7 @@ ENTRY(arm7tdmi_cache_flushID)
        mov     r0, r0
 
        RET
+END(arm7tdmi_cache_flushID)
 
 /*
  * Context switch.
@@ -98,3 +102,5 @@ ENTRY(arm7tdmi_cache_flushID)
  */
 ENTRY(arm7tdmi_context_switch)
        b       _C_LABEL(arm7tdmi_setttb)
+END(arm7tdmi_context_switch)
+

Modified: head/sys/arm/arm/cpufunc_asm_arm8.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm8.S Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm8.S Sat Mar 16 02:48:49 2013        
(r248361)
@@ -58,6 +58,7 @@ ENTRY(arm8_clock_config)
        mcr     p15, 0, r2, c15, c0, 0  /* Write clock register */
        mov     r0, r3                  /* Return old value */
        RET
+END(arm8_clock_config)
 
 /*
  * Functions to set the MMU Translation Table Base register
@@ -90,6 +91,7 @@ ENTRY(arm8_setttb)
        msr     cpsr_all, r3
 
        RET
+END(arm8_setttb)
 
 /*
  * TLB functions
@@ -97,10 +99,12 @@ ENTRY(arm8_setttb)
 ENTRY(arm8_tlb_flushID)
        mcr     p15, 0, r0, c8, c7, 0   /* flush I+D tlb */
        RET
+END(arm8_tlb_flushID)
 
 ENTRY(arm8_tlb_flushID_SE)
        mcr     p15, 0, r0, c8, c7, 1   /* flush I+D tlb single entry */
        RET
+END(arm8_tlb_flushID_SE)
 
 /*
  * Cache functions
@@ -108,10 +112,12 @@ ENTRY(arm8_tlb_flushID_SE)
 ENTRY(arm8_cache_flushID)
        mcr     p15, 0, r0, c7, c7, 0   /* flush I+D cache */
        RET
+END(arm8_cache_flushID)
 
 ENTRY(arm8_cache_flushID_E)
        mcr     p15, 0, r0, c7, c7, 1   /* flush I+D single entry */
        RET
+END(arm8_cache_flushID_E)
 
 ENTRY(arm8_cache_cleanID)
        mov     r0, #0x00000000
@@ -153,10 +159,12 @@ ENTRY(arm8_cache_cleanID)
        bne     1b
 
        RET
+END(arm8_cache_cleanID)
 
 ENTRY(arm8_cache_cleanID_E)
        mcr     p15, 0, r0, c7, c11, 1  /* clean I+D single entry */
        RET
+END(arm8_cache_cleanID_E)
 
 ENTRY(arm8_cache_purgeID)
        /*
@@ -232,6 +240,7 @@ ENTRY(arm8_cache_purgeID)
 
        msr     cpsr_all, r3
        RET
+END(arm8_cache_purgeID)
 
 ENTRY(arm8_cache_purgeID_E)
        /*
@@ -253,6 +262,7 @@ ENTRY(arm8_cache_purgeID_E)
        mcr     p15, 0, r0, c7, c7, 1   /* flush I+D single entry */
        msr     cpsr_all, r3
        RET
+END(arm8_cache_purgeID_E)
 
 /*
  * Context switch.
@@ -282,3 +292,5 @@ ENTRY(arm8_context_switch)
        mov     r0, r0
        mov     r0, r0
        RET
+END(arm8_context_switch)
+

Modified: head/sys/arm/arm/cpufunc_asm_arm9.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_arm9.S Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_arm9.S Sat Mar 16 02:48:49 2013        
(r248361)
@@ -49,6 +49,7 @@ ENTRY(arm9_setttb)
 
        mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
        mov     pc, lr
+END(arm9_setttb)
 
 /*
  * TLB functions
@@ -57,6 +58,7 @@ ENTRY(arm9_tlb_flushID_SE)
        mcr     p15, 0, r0, c8, c6, 1   /* flush D tlb single entry */
        mcr     p15, 0, r0, c8, c5, 1   /* flush I tlb single entry */
        mov     pc, lr
+END(arm9_tlb_flushID_SE)
 
 /*
  * Cache operations.  For the entire cache we use the set/index
@@ -83,6 +85,7 @@ ENTRY_NP(arm9_icache_sync_range)
        subs    r1, r1, ip
        bhi     .Larm9_sync_next
        mov     pc, lr
+END(arm9_icache_sync_range)
 
 ENTRY_NP(arm9_icache_sync_all)
 .Larm9_icache_sync_all:
@@ -106,6 +109,7 @@ ENTRY_NP(arm9_icache_sync_all)
        subs    s_max, s_max, s_inc
        bhs     .Lnext_set              /* Next set */
        mov     pc, lr
+END(arm9_icache_sync_all)
 
 .Larm9_line_size:
        .word   _C_LABEL(arm_pdcache_line_size)
@@ -125,6 +129,7 @@ ENTRY(arm9_dcache_wb_range)
        subs    r1, r1, ip
        bhi     .Larm9_wb_next
        mov     pc, lr
+END(arm9_dcache_wb_range)
        
 ENTRY(arm9_dcache_wbinv_range)
        ldr     ip, .Larm9_line_size
@@ -141,6 +146,7 @@ ENTRY(arm9_dcache_wbinv_range)
        subs    r1, r1, ip
        bhi     .Larm9_wbinv_next
        mov     pc, lr
+END(arm9_dcache_wbinv_range)
        
 /*
  * Note, we must not invalidate everything.  If the range is too big we
@@ -161,6 +167,7 @@ ENTRY(arm9_dcache_inv_range)
        subs    r1, r1, ip
        bhi     .Larm9_inv_next
        mov     pc, lr
+END(arm9_dcache_inv_range)
 
 ENTRY(arm9_idcache_wbinv_range)
        ldr     ip, .Larm9_line_size
@@ -178,6 +185,7 @@ ENTRY(arm9_idcache_wbinv_range)
        subs    r1, r1, ip
        bhi     .Larm9_id_wbinv_next
        mov     pc, lr
+END(arm9_idcache_wbinv_range)
 
 ENTRY_NP(arm9_idcache_wbinv_all)
 .Larm9_idcache_wbinv_all:
@@ -202,6 +210,8 @@ ENTRY(arm9_dcache_wbinv_all)
        subs    s_max, s_max, s_inc
        bhs     .Lnext_set_inv          /* Next set */
        mov     pc, lr
+END(arm9_idcache_wbinv_all)
+END(arm9_dcache_wbinv_all)
 
 .Larm9_cache_data:
        .word   _C_LABEL(arm9_dcache_sets_max)
@@ -229,6 +239,7 @@ ENTRY(arm9_context_switch)
        nop
        nop
        mov     pc, lr
+END(arm9_context_switch)
 
        .bss
 

Modified: head/sys/arm/arm/cpufunc_asm_armv4.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_armv4.S        Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_armv4.S        Sat Mar 16 02:48:49 2013        
(r248361)
@@ -46,18 +46,22 @@ __FBSDID("$FreeBSD$");
 ENTRY(armv4_tlb_flushID)
        mcr     p15, 0, r0, c8, c7, 0   /* flush I+D tlb */
        RET
+END(armv4_tlb_flushID)
 
 ENTRY(armv4_tlb_flushI)
        mcr     p15, 0, r0, c8, c5, 0   /* flush I tlb */
        RET
+END(armv4_tlb_flushI)
 
 ENTRY(armv4_tlb_flushD)
        mcr     p15, 0, r0, c8, c6, 0   /* flush D tlb */
        RET
+END(armv4_tlb_flushD)
 
 ENTRY(armv4_tlb_flushD_SE)
        mcr     p15, 0, r0, c8, c6, 1   /* flush D tlb single entry */
        RET
+END(armv4_tlb_flushD_SE)
 
 /*
  * Other functions
@@ -65,3 +69,5 @@ ENTRY(armv4_tlb_flushD_SE)
 ENTRY(armv4_drain_writebuf)
        mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
        RET
+END(armv4_drain_writebuf)
+

Modified: head/sys/arm/arm/cpufunc_asm_armv5.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_armv5.S        Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_armv5.S        Sat Mar 16 02:48:49 2013        
(r248361)
@@ -51,6 +51,7 @@ ENTRY(armv5_setttb)
 
        mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
        RET
+END(armv5_setttb)
 
 /*
  * Cache operations.  For the entire cache we use the set/index
@@ -79,6 +80,7 @@ ENTRY_NP(armv5_icache_sync_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_icache_sync_range)
 
 ENTRY_NP(armv5_icache_sync_all)
 .Larmv5_icache_sync_all:
@@ -105,6 +107,7 @@ ENTRY_NP(armv5_icache_sync_all)
        bpl     1b                      /* Next set */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_icache_sync_all)
 
 .Larmv5_line_size:
        .word   _C_LABEL(arm_pdcache_line_size)
@@ -126,6 +129,7 @@ ENTRY(armv5_dcache_wb_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_dcache_wb_range)
        
 ENTRY(armv5_dcache_wbinv_range)
        ldr     ip, .Larmv5_line_size
@@ -144,6 +148,7 @@ ENTRY(armv5_dcache_wbinv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_dcache_wbinv_range)
        
 /*
  * Note, we must not invalidate everything.  If the range is too big we
@@ -166,6 +171,7 @@ ENTRY(armv5_dcache_inv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_dcache_inv_range)
 
 ENTRY(armv5_idcache_wbinv_range)
        ldr     ip, .Larmv5_line_size
@@ -185,6 +191,7 @@ ENTRY(armv5_idcache_wbinv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_idcache_wbinv_range)
 
 ENTRY_NP(armv5_idcache_wbinv_all)
 .Larmv5_idcache_wbinv_all:
@@ -212,6 +219,8 @@ ENTRY(armv5_dcache_wbinv_all)
        bpl     1b                      /* Next set */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_idcache_wbinv_all)
+END(armv5_dcache_wbinv_all)
 
 .Larmv5_cache_data:
        .word   _C_LABEL(armv5_dcache_sets_max)

Modified: head/sys/arm/arm/cpufunc_asm_armv5_ec.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_armv5_ec.S     Sat Mar 16 01:16:57 2013        
(r248360)
+++ head/sys/arm/arm/cpufunc_asm_armv5_ec.S     Sat Mar 16 02:48:49 2013        
(r248361)
@@ -66,6 +66,7 @@ ENTRY(armv5_ec_setttb)
 
        mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
        RET
+END(armv5_ec_setttb)
 
 /*
  * Cache operations.  For the entire cache we use the enhanced cache
@@ -90,6 +91,7 @@ ENTRY_NP(armv5_ec_icache_sync_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_icache_sync_range)
 
 ENTRY_NP(armv5_ec_icache_sync_all)
 .Larmv5_ec_icache_sync_all:
@@ -107,6 +109,7 @@ ENTRY_NP(armv5_ec_icache_sync_all)
        bne     1b                      /* More to do? */
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_icache_sync_all)
 
 .Larmv5_ec_line_size:
        .word   _C_LABEL(arm_pdcache_line_size)
@@ -128,6 +131,7 @@ ENTRY(armv5_ec_dcache_wb_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_dcache_wb_range)
 
 ENTRY(armv5_ec_dcache_wbinv_range)
        ldr     ip, .Larmv5_ec_line_size
@@ -146,6 +150,7 @@ ENTRY(armv5_ec_dcache_wbinv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_dcache_wbinv_range)
 
 /*
  * Note, we must not invalidate everything.  If the range is too big we
@@ -168,6 +173,7 @@ ENTRY(armv5_ec_dcache_inv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_dcache_inv_range)
 
 ENTRY(armv5_ec_idcache_wbinv_range)
        ldr     ip, .Larmv5_ec_line_size
@@ -187,6 +193,7 @@ ENTRY(armv5_ec_idcache_wbinv_range)
        bpl     1b
        mcr     p15, 0, r0, c7, c10, 4  /* drain the write buffer */
        RET
+END(armv5_ec_idcache_wbinv_range)
 
 ENTRY_NP(armv5_ec_idcache_wbinv_all)
 .Larmv5_ec_idcache_wbinv_all:
@@ -197,6 +204,7 @@ ENTRY_NP(armv5_ec_idcache_wbinv_all)
         */
        mcr     p15, 0, r0, c7, c5, 0   /* Invalidate ICache */
        /* Fall through to purge Dcache. */
+END(armv5_ec_idcache_wbinv_all)

*** DIFF OUTPUT TRUNCATED AT 1000 LINES ***
_______________________________________________
svn-src-all@freebsd.org mailing list
http://lists.freebsd.org/mailman/listinfo/svn-src-all
To unsubscribe, send any mail to "svn-src-all-unsubscr...@freebsd.org"

Reply via email to