From 1840b7c35e822c20bacc562d580e716e9920c711 Mon Sep 17 00:00:00 2001 From: Islam Ragimov Date: Mon, 23 Jun 2025 18:03:28 +0100 Subject: [PATCH 1/2] lib: [sha1] secure zeroing of stack in safe data mode - issue was found in multi-buffer sha1 impl for aarch64 neon. - issue: Part of plain/ciphertext is presented on the stack. - fix: Addition of clearing sensitive data from the stack. Signed-off-by: Islam Ragimov --- lib/aarch64/sha1_mb_neon_x4.S | 38 +++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/lib/aarch64/sha1_mb_neon_x4.S b/lib/aarch64/sha1_mb_neon_x4.S index edf47482..eecc430c 100644 --- a/lib/aarch64/sha1_mb_neon_x4.S +++ b/lib/aarch64/sha1_mb_neon_x4.S @@ -158,6 +158,44 @@ sha1_mb_neon_x4: stp data0, data1, [args, #320] stp data2, data3, [args, #336] +#ifdef SAFE_DATA + // Secure zeroing of temporary area: + mov x15, sp // Start zeroing from current sp (lower bound) + sub tmp, savedsp, x15 // Compute length to clear (in bytes) + + // Bulk zeroing in 8-byte chunks + cmp tmp, #8 // If less than 8, skip to tail loop + blt .zeroing_tail + + // Alignment check: sp must be 8-byte aligned + ands x16, x15, #7 // x16 = x15 % 8 + cbz x16, .zeroing_loop_8byte // Start zeroing 8byte chunks if aligned + + // Zeroing head byte by byte untill aligned + sub tmp, tmp, x16 // Reducing length by unaligned head size +.zeroing_head: + strb wzr, [x15], #1 + subs x16, x16, #1 + b.ne .zeroing_head + +.zeroing_loop_8byte: + str xzr, [x15], #8 + subs tmp, tmp, #8 + cmp tmp, #8 + bge .zeroing_loop_8byte + +.zeroing_tail: + // Zero remaining 0–7 bytes, if any + cbz tmp, .zeroing_done + +.zeroing_loop_byte: + strb wzr, [x15], #1 + subs tmp, tmp, #1 + b.ne .zeroing_loop_byte + +.zeroing_done: +#endif /* SAFE_DATA */ + mov tmp, digest0 st4 {VA.s,VB.s,VC.s,VD.s}[0],[tmp],#16 st1 {VE.s}[0],[tmp] -- GitLab From 2738dbe2f19f44e08c44d6005dbeaa3de4c1fdee Mon Sep 17 00:00:00 2001 From: Islam Ragimov Date: Tue, 1 Apr 2025 15:33:07 +0100 Subject: [PATCH 2/2] lib: [aarch64] add safe_memcpy for aarch64 platforms - replace memcpy() with safe_memcpy() in imb_hmac_ipad_opad() Signed-off-by: Islam Ragimov --- lib/Makefile | 3 +- lib/aarch64/memcpy_aarch64.S | 63 ++++++++++++++++++++++++++++++++++++ lib/include/memcpy.h | 4 +++ lib/x86_64/hmac_ipad_opad.c | 6 ---- 4 files changed, 69 insertions(+), 7 deletions(-) create mode 100644 lib/aarch64/memcpy_aarch64.S diff --git a/lib/Makefile b/lib/Makefile index 55b35a70..4c5d09fd 100644 --- a/lib/Makefile +++ b/lib/Makefile @@ -388,7 +388,8 @@ asm_generic_lib_objs := \ sha1_mb_neon_x4.o \ sha1_sb_aarch64_x1.o \ zuc_simd.o \ - zuc_simd_no_aesni.o + zuc_simd_no_aesni.o\ + memcpy_aarch64.o else c_lib_objs := \ mb_mgr_avx.o \ diff --git a/lib/aarch64/memcpy_aarch64.S b/lib/aarch64/memcpy_aarch64.S new file mode 100644 index 00000000..3ac44b59 --- /dev/null +++ b/lib/aarch64/memcpy_aarch64.S @@ -0,0 +1,63 @@ +/********************************************************************** + SPDX-FileCopyrightText: Copyright 2025 Arm Limited and/or its + affiliates + SPDX-License-Identifier: BSD-3-Clause + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**********************************************************************/ + + .arch armv8-a + +/* void safe_memcpy(void *dst, const void *src, const size_t size); + * function is declare in lib/include/memcpy.h */ + + .global safe_memcpy + .type safe_memcpy, %function + + dst .req x0 + src .req x1 + size .req x2 + +safe_memcpy: + cbz size, .return + +.cpy_loop: + ldrb w3, [src], #1 // load byte from source, increment src + strb w3, [dst], #1 // store byte to destination, increment dst + subs size, size, #1 // decrement size val + b.ne .cpy_loop + +.return: + ret + + .size safe_memcpy, .-safe_memcpy + + .unreq dst + .unreq src + .unreq size diff --git a/lib/include/memcpy.h b/lib/include/memcpy.h index d4fc39db..0aee72ee 100644 --- a/lib/include/memcpy.h +++ b/lib/include/memcpy.h @@ -28,6 +28,8 @@ #ifndef MEMCPY_H #define MEMCPY_H +#ifndef __aarch64__ + /* Memcpy up to 16 bytes with SSE instructions */ void memcpy_fn_sse_16(void *dst, const void *src, const size_t size); @@ -40,6 +42,8 @@ memcpy_fn_avx_16(void *dst, const void *src, const size_t size); void memcpy_fn_sse_128(void *dst, const void *src); +#endif /* __aarch64__ */ + /* Basic memcpy that doesn't use stack */ void safe_memcpy(void *dst, const void *src, const size_t size); diff --git a/lib/x86_64/hmac_ipad_opad.c b/lib/x86_64/hmac_ipad_opad.c index 4c4272f2..86498355 100644 --- a/lib/x86_64/hmac_ipad_opad.c +++ b/lib/x86_64/hmac_ipad_opad.c @@ -33,9 +33,7 @@ #include #include "include/error.h" -#ifndef __aarch64__ #include "include/memcpy.h" -#endif #include "include/arch_sse_type1.h" /* sm3_one_block_sse(), sm3_msg_sse() */ @@ -109,11 +107,7 @@ imb_hmac_ipad_opad(IMB_MGR *mb_mgr, const IMB_HASH_ALG sha_type, const void *pke /* prepare the key */ if (local_key_len == key_len) { -#ifndef __aarch64__ safe_memcpy(key, pkey, key_len); -#else /* __aarhc64 */ - memcpy(key, pkey, key_len); -#endif /* __aarch64__ */ } else switch (sha_type) { case IMB_AUTH_HMAC_SHA_1: -- GitLab