On Sat, Jan 14, 2023 at 09:08:38PM +0100, Theo Buehler wrote:
> This moves constants from .text into .rodata.
>
> All tests pass, gnupg tests work, gpgme and gcr build.
OK
>
> Index: Makefile
> ===================================================================
> RCS file: /cvs/ports/security/libgcrypt/Makefile,v
> retrieving revision 1.81
> diff -u -p -r1.81 Makefile
> --- Makefile 9 Jan 2023 17:27:49 -0000 1.81
> +++ Makefile 14 Jan 2023 19:53:18 -0000
> @@ -1,7 +1,7 @@
> COMMENT= crypto library based on code used in GnuPG
>
> DISTNAME= libgcrypt-1.10.1
> -REVISION= 1
> +REVISION= 2
> CATEGORIES= security
>
> SHARED_LIBS += gcrypt 21.0 # 24.1
> @@ -23,7 +23,9 @@ CONFIGURE_STYLE= gnu
> CONFIGURE_ARGS= --enable-static \
> --disable-drng-support
>
> +.if ${MACHINE_ARCH} != aarch64
> USE_NOEXECONLY= Yes
> +.endif
>
> DEBUG_PACKAGES= ${BUILD_PACKAGES}
>
> Index: patches/patch-cipher_camellia-aarch64_S
> ===================================================================
> RCS file: patches/patch-cipher_camellia-aarch64_S
> diff -N patches/patch-cipher_camellia-aarch64_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_camellia-aarch64_S 14 Jan 2023 19:44:18 -0000
> @@ -0,0 +1,11 @@
> +Index: cipher/camellia-aarch64.S
> +--- cipher/camellia-aarch64.S.orig
> ++++ cipher/camellia-aarch64.S
> +@@ -313,6 +313,7 @@ _gcry_camellia_arm_decrypt_block:
> + .ltorg
> + ELF(.size
> _gcry_camellia_arm_decrypt_block,.-_gcry_camellia_arm_decrypt_block;)
> +
> ++.rodata
> + /* Encryption/Decryption tables */
> + ELF(.type _gcry_camellia_arm_tables,@object;)
> + .balign 32
> Index: patches/patch-cipher_chacha20-aarch64_S
> ===================================================================
> RCS file: patches/patch-cipher_chacha20-aarch64_S
> diff -N patches/patch-cipher_chacha20-aarch64_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_chacha20-aarch64_S 14 Jan 2023 19:44:25 -0000
> @@ -0,0 +1,21 @@
> +Index: cipher/chacha20-aarch64.S
> +--- cipher/chacha20-aarch64.S.orig
> ++++ cipher/chacha20-aarch64.S
> +@@ -36,7 +36,7 @@
> +
> + .cpu generic+simd
> +
> +-.text
> ++.rodata
> +
> + #include "asm-poly1305-aarch64.h"
> +
> +@@ -192,6 +192,8 @@ _gcry_chacha20_aarch64_blocks4_data_rot8:
> + .byte 7,4,5,6
> + .byte 11,8,9,10
> + .byte 15,12,13,14
> ++
> ++.text
> +
> + .align 3
> + .globl _gcry_chacha20_aarch64_blocks4
> Index: patches/patch-cipher_cipher-gcm-armv8-aarch64-ce_S
> ===================================================================
> RCS file: patches/patch-cipher_cipher-gcm-armv8-aarch64-ce_S
> diff -N patches/patch-cipher_cipher-gcm-armv8-aarch64-ce_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_cipher-gcm-armv8-aarch64-ce_S 14 Jan 2023
> 19:27:48 -0000
> @@ -0,0 +1,21 @@
> +Index: cipher/cipher-gcm-armv8-aarch64-ce.S
> +--- cipher/cipher-gcm-armv8-aarch64-ce.S.orig
> ++++ cipher/cipher-gcm-armv8-aarch64-ce.S
> +@@ -25,7 +25,7 @@
> +
> + .cpu generic+simd+crypto
> +
> +-.text
> ++.rodata
> +
> +
> + /* Constants */
> +@@ -170,6 +170,8 @@ gcry_gcm_reduction_constant:
> + CFI_ADJUST_CFA_OFFSET(-16); \
> + ldp d8, d9, [sp], #16; \
> + CFI_ADJUST_CFA_OFFSET(-16);
> ++
> ++.text
> +
> + /*
> + * unsigned int _gcry_ghash_armv8_ce_pmull (void *gcm_key, byte *result,
> Index: patches/patch-cipher_crc-armv8-aarch64-ce_S
> ===================================================================
> RCS file: patches/patch-cipher_crc-armv8-aarch64-ce_S
> diff -N patches/patch-cipher_crc-armv8-aarch64-ce_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_crc-armv8-aarch64-ce_S 14 Jan 2023 19:18:35
> -0000
> @@ -0,0 +1,20 @@
> +Index: cipher/crc-armv8-aarch64-ce.S
> +--- cipher/crc-armv8-aarch64-ce.S.orig
> ++++ cipher/crc-armv8-aarch64-ce.S
> +@@ -25,7 +25,7 @@
> +
> + .cpu generic+simd+crypto
> +
> +-.text
> ++.rodata
> +
> +
> + /* Structure of crc32_consts_s */
> +@@ -54,6 +54,7 @@
> + .byte 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff
> + .byte 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff
> +
> ++.text
> +
> + /*
> + * void _gcry_crc32r_armv8_ce_bulk (u32 *pcrc, const byte *inbuf, size_t
> inlen,
> Index: patches/patch-cipher_sha1-armv8-aarch64-ce_S
> ===================================================================
> RCS file: patches/patch-cipher_sha1-armv8-aarch64-ce_S
> diff -N patches/patch-cipher_sha1-armv8-aarch64-ce_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_sha1-armv8-aarch64-ce_S 14 Jan 2023 19:25:39
> -0000
> @@ -0,0 +1,20 @@
> +Index: cipher/sha1-armv8-aarch64-ce.S
> +--- cipher/sha1-armv8-aarch64-ce.S.orig
> ++++ cipher/sha1-armv8-aarch64-ce.S
> +@@ -25,7 +25,7 @@
> +
> + .cpu generic+simd+crypto
> +
> +-.text
> ++.rodata
> +
> +
> + /* Constants */
> +@@ -90,6 +90,7 @@ gcry_sha1_aarch64_ce_K_VEC:
> +
> + #define CLEAR_REG(reg) movi reg.16b, #0;
> +
> ++.text
> +
> + /*
> + * unsigned int
> Index: patches/patch-cipher_sha256-armv8-aarch64-ce_S
> ===================================================================
> RCS file: patches/patch-cipher_sha256-armv8-aarch64-ce_S
> diff -N patches/patch-cipher_sha256-armv8-aarch64-ce_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_sha256-armv8-aarch64-ce_S 14 Jan 2023 19:18:35
> -0000
> @@ -0,0 +1,20 @@
> +Index: cipher/sha256-armv8-aarch64-ce.S
> +--- cipher/sha256-armv8-aarch64-ce.S.orig
> ++++ cipher/sha256-armv8-aarch64-ce.S
> +@@ -25,7 +25,7 @@
> +
> + .cpu generic+simd+crypto
> +
> +-.text
> ++.rodata
> +
> +
> + /* Constants */
> +@@ -100,6 +100,7 @@ gcry_sha256_aarch64_ce_K:
> +
> + #define CLEAR_REG(reg) movi reg.16b, #0;
> +
> ++.text
> +
> + /*
> + * unsigned int
> Index: patches/patch-cipher_sm3-aarch64_S
> ===================================================================
> RCS file: patches/patch-cipher_sm3-aarch64_S
> diff -N patches/patch-cipher_sm3-aarch64_S
> --- /dev/null 1 Jan 1970 00:00:00 -0000
> +++ patches/patch-cipher_sm3-aarch64_S 14 Jan 2023 19:18:35 -0000
> @@ -0,0 +1,20 @@
> +Index: cipher/sm3-aarch64.S
> +--- cipher/sm3-aarch64.S.orig
> ++++ cipher/sm3-aarch64.S
> +@@ -29,7 +29,7 @@
> +
> + /* Constants */
> +
> +-.text
> ++.rodata
> + .align 4
> + ELF(.type _gcry_sm3_aarch64_consts,@object)
> + _gcry_sm3_aarch64_consts:
> +@@ -383,6 +383,7 @@ ELF(.size _gcry_sm3_aarch64_consts,.-_gcry_sm3_aarch64
> + #define SCHED_W_W5W0W1W2W3W4_3(iop_num, round) \
> + SCHED_W_3_##iop_num(round, W5, W0, W1, W2, W3, W4)
> +
> ++.text
> + /*
> + * Transform nblks*64 bytes (nblks*16 32-bit words) at DATA.
> + *
--
Antoine