* Correct comments in arm{32,64}/head.S
 * Provide Linker assertions to check the safety of the zeroing loops

Signed-off-by: Andrew Cooper <andrew.coop...@citrix.com>
---
CC: Stefano Stabellini <sstabell...@kernel.org>
CC: Julien Grall <jul...@xen.org>
CC: Volodymyr Babchuk <volodymyr_babc...@epam.com>
CC: Bertrand Marquis <bertrand.marq...@arm.com>
CC: Bob Eshleman <bobbyeshle...@gmail.com>
CC: Alistair Francis <alistair.fran...@wdc.com>
CC: Connor Davis <connojda...@gmail.com>
CC: Oleksii Kurochko <oleksii.kuroc...@gmail.com>

Pulled out of the very start of my work to try and unify the handling of
xen_phys_addr across architectures.
---
 xen/arch/arm/arm32/head.S | 2 +-
 xen/arch/arm/arm64/head.S | 2 +-
 xen/arch/arm/xen.lds.S    | 2 ++
 xen/arch/riscv/xen.lds.S  | 4 ++++
 4 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/xen/arch/arm/arm32/head.S b/xen/arch/arm/arm32/head.S
index df51550baa8a..f9f7be9588b1 100644
--- a/xen/arch/arm/arm32/head.S
+++ b/xen/arch/arm/arm32/head.S
@@ -301,7 +301,7 @@ ENDPROC(check_cpu_mode)
 zero_bss:
         PRINT("- Zero BSS -\r\n")
         mov_w r0, __bss_start        /* r0 := vaddr(__bss_start) */
-        mov_w r1, __bss_end          /* r1 := vaddr(__bss_start) */
+        mov_w r1, __bss_end          /* r1 := vaddr(__bss_end)   */
 
         mov   r2, #0
 1:      str   r2, [r0], #4
diff --git a/xen/arch/arm/arm64/head.S b/xen/arch/arm/arm64/head.S
index 4a3f87117c83..8a4dd64c99ad 100644
--- a/xen/arch/arm/arm64/head.S
+++ b/xen/arch/arm/arm64/head.S
@@ -437,7 +437,7 @@ zero_bss:
 
         PRINT("- Zero BSS -\r\n")
         ldr   x0, =__bss_start       /* x0 := vaddr(__bss_start) */
-        ldr   x1, =__bss_end         /* x1 := vaddr(__bss_start) */
+        ldr   x1, =__bss_end         /* x1 := vaddr(__bss_end)   */
 
 1:      str   xzr, [x0], #8
         cmp   x0, x1
diff --git a/xen/arch/arm/xen.lds.S b/xen/arch/arm/xen.lds.S
index 1b392345bc3b..6ca3caefe607 100644
--- a/xen/arch/arm/xen.lds.S
+++ b/xen/arch/arm/xen.lds.S
@@ -240,3 +240,5 @@ ASSERT(_idmap_end - _idmap_start <= PAGE_SIZE, "Identity 
mapped code is larger t
  */
 ASSERT(IS_ALIGNED(__init_begin,     4), "__init_begin is misaligned")
 ASSERT(IS_ALIGNED(__init_end,       4), "__init_end is misaligned")
+ASSERT(IS_ALIGNED(__bss_start,      POINTER_ALIGN), "__bss_start is 
misaligned")
+ASSERT(IS_ALIGNED(__bss_end,        POINTER_ALIGN), "__bss_end is misaligned")
diff --git a/xen/arch/riscv/xen.lds.S b/xen/arch/riscv/xen.lds.S
index ca57cce75cba..2ed70eccc62a 100644
--- a/xen/arch/riscv/xen.lds.S
+++ b/xen/arch/riscv/xen.lds.S
@@ -1,3 +1,4 @@
+#include <xen/lib.h>
 #include <xen/xen.lds.h>
 
 #undef ENTRY
@@ -156,3 +157,6 @@ SECTIONS
 
     ELF_DETAILS_SECTIONS
 }
+
+ASSERT(IS_ALIGNED(__bss_start,      POINTER_ALIGN), "__bss_start is 
misaligned")
+ASSERT(IS_ALIGNED(__bss_end,        POINTER_ALIGN), "__bss_end is misaligned")
-- 
2.30.2


Reply via email to