From 0ab09ab1470f3b96d7965afa728865485eb7666d Mon Sep 17 00:00:00 2001 From: Sean Parkinson Date: Wed, 10 Dec 2025 15:36:07 +1000 Subject: [PATCH 01/27] PPC32 SHA-256 ASM: support comnpiling for PIC When compiling for PIC, 30 and 31 are not always available. Alternative implementation added not using them that puts registers on the stack. Small code size version implemented as well. --- .wolfssl_known_macro_extras | 1 + wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S | 3239 ++++++++++++++++ wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c | 3248 +++++++++++++++++ 3 files changed, 6488 insertions(+) diff --git a/.wolfssl_known_macro_extras b/.wolfssl_known_macro_extras index ca9dc8cce..64e9d0e7f 100644 --- a/.wolfssl_known_macro_extras +++ b/.wolfssl_known_macro_extras @@ -1025,6 +1025,7 @@ __MWERKS__ __NT__ __OS2__ __OpenBSD__ +__PIC__ __PIE__ __POWERPC__ __PPC__ diff --git a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S index e2fdd6473..bd55952be 100644 --- a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S +++ b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S @@ -1287,6 +1287,7 @@ L_SHA256_transform_len_k: .long 0xa4506ceb .long 0xbef9a3f7 .long 0xc67178f2 +#ifndef __PIC__ .section ".text" .align 4 .globl Transform_Sha256_Len @@ -4503,6 +4504,3244 @@ L_SHA256_transform_len_after_blk_15: addi 1, 1, 0x4c blr .size Transform_Sha256_Len,.-Transform_Sha256_Len +#else +/* PIC version not using register 30 or 31 */ + .section ".text" + .align 4 + .globl Transform_Sha256_Len + .type Transform_Sha256_Len, @function +Transform_Sha256_Len: + stwu 1, -68(1) + mflr 0 + stw 0, 64(1) + stw 14, 0(1) + stw 15, 4(1) + stw 16, 8(1) + stw 17, 12(1) + stw 18, 16(1) + stw 19, 20(1) + stw 20, 24(1) + stw 21, 28(1) + stw 22, 32(1) + stw 23, 36(1) + stw 24, 40(1) + stw 25, 44(1) + stw 26, 48(1) + stw 27, 52(1) + stw 28, 56(1) + stw 29, 60(1) + srwi 5, 5, 6 + lis 6, L_SHA256_transform_len_k@ha + la 6, L_SHA256_transform_len_k@l(6) +#ifndef WOLFSSL_PPC32_ASM_SMALL + subi 1, 1, 8 + stw 3, 0(1) + stw 4, 4(1) + mtctr 5 + # Copy digest to add in at end + lwz 0, 0(3) + lwz 4, 4(3) + lwz 7, 8(3) + lwz 8, 12(3) + lwz 9, 16(3) + lwz 10, 20(3) + lwz 11, 24(3) + lwz 12, 28(3) + lwz 3, 4(1) + # Start of loop processing a block +L_SHA256_transform_len_begin: + # Load W - 64 bytes + lwz 14, 0(3) + lwz 15, 4(3) + lwz 16, 8(3) + lwz 17, 12(3) + lwz 18, 16(3) + lwz 19, 20(3) + lwz 20, 24(3) + lwz 21, 28(3) + lwz 22, 32(3) + lwz 23, 36(3) + lwz 24, 40(3) + lwz 25, 44(3) + lwz 26, 48(3) + lwz 27, 52(3) + lwz 28, 56(3) + lwz 29, 60(3) + # Start of 16 rounds + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + subi 6, 6, 0xc0 + lwz 3, 0(1) + # Add in digest from start + lwz 5, 0(3) + add 0, 0, 5 + lwz 5, 4(3) + add 4, 4, 5 + lwz 5, 8(3) + add 7, 7, 5 + lwz 5, 12(3) + add 8, 8, 5 + lwz 5, 16(3) + add 9, 9, 5 + lwz 5, 20(3) + add 10, 10, 5 + lwz 5, 24(3) + add 11, 11, 5 + lwz 5, 28(3) + add 12, 12, 5 + stw 0, 0(3) + stw 4, 4(3) + stw 7, 8(3) + stw 8, 12(3) + stw 9, 16(3) + stw 10, 20(3) + stw 11, 24(3) + stw 12, 28(3) + lwz 3, 4(1) + addi 3, 3, 0x40 + stw 3, 4(1) + bdnz L_SHA256_transform_len_begin + addi 1, 1, 8 +#else + subi 1, 1, 12 + stw 3, 0(1) + stw 4, 4(1) + stw 5, 8(1) + # Copy digest to add in at end + lwz 0, 0(3) + lwz 4, 4(3) + lwz 7, 8(3) + lwz 8, 12(3) + lwz 9, 16(3) + lwz 10, 20(3) + lwz 11, 24(3) + lwz 12, 28(3) + lwz 3, 4(1) + # Start of loop processing a block +L_SHA256_transform_len_begin: + # Load W - 64 bytes + lwz 14, 0(3) + lwz 15, 4(3) + lwz 16, 8(3) + lwz 17, 12(3) + lwz 18, 16(3) + lwz 19, 20(3) + lwz 20, 24(3) + lwz 21, 28(3) + lwz 22, 32(3) + lwz 23, 36(3) + lwz 24, 40(3) + lwz 25, 44(3) + lwz 26, 48(3) + lwz 27, 52(3) + lwz 28, 56(3) + lwz 29, 60(3) + li 3, 4 + mtctr 3 + # Start of 16 rounds +L_SHA256_transform_len_start: + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_0 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 +L_SHA256_transform_len_after_blk_0: + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_1 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 +L_SHA256_transform_len_after_blk_1: + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_2 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 +L_SHA256_transform_len_after_blk_2: + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 +L_SHA256_transform_len_after_blk_3: + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_4 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 +L_SHA256_transform_len_after_blk_4: + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_5 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 +L_SHA256_transform_len_after_blk_5: + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_6 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 +L_SHA256_transform_len_after_blk_6: + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_7 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 +L_SHA256_transform_len_after_blk_7: + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_8 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 +L_SHA256_transform_len_after_blk_8: + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_9 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 +L_SHA256_transform_len_after_blk_9: + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_10 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 +L_SHA256_transform_len_after_blk_10: + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_11 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 +L_SHA256_transform_len_after_blk_11: + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_12 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 +L_SHA256_transform_len_after_blk_12: + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_13 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 +L_SHA256_transform_len_after_blk_13: + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_14 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 +L_SHA256_transform_len_after_blk_14: + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_15 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 +L_SHA256_transform_len_after_blk_15: + addi 6, 6, 0x40 + bdnz L_SHA256_transform_len_start + subi 6, 6, 0x100 + lwz 3, 0(1) + # Add in digest from start + lwz 5, 0(3) + add 0, 0, 5 + lwz 5, 4(3) + add 4, 4, 5 + lwz 5, 8(3) + add 7, 7, 5 + lwz 5, 12(3) + add 8, 8, 5 + lwz 5, 16(3) + add 9, 9, 5 + lwz 5, 20(3) + add 10, 10, 5 + lwz 5, 24(3) + add 11, 11, 5 + lwz 5, 28(3) + add 12, 12, 5 + stw 0, 0(3) + stw 4, 4(3) + stw 7, 8(3) + stw 8, 12(3) + stw 9, 16(3) + stw 10, 20(3) + stw 11, 24(3) + stw 12, 28(3) + lwz 3, 4(1) + lwz 5, 8(1) + mtctr 5 + subi 5, 5, 1 + addi 3, 3, 0x40 + stw 3, 4(1) + stw 5, 8(1) + bdnz L_SHA256_transform_len_begin + addi 1, 1, 12 +#endif /* WOLFSSL_PPC32_ASM_SMALL */ + lwz 0, 64(1) + mtlr 0 + lwz 14, 0(1) + lwz 15, 4(1) + lwz 16, 8(1) + lwz 17, 12(1) + lwz 18, 16(1) + lwz 19, 20(1) + lwz 20, 24(1) + lwz 21, 28(1) + lwz 22, 32(1) + lwz 23, 36(1) + lwz 24, 40(1) + lwz 25, 44(1) + lwz 26, 48(1) + lwz 27, 52(1) + lwz 28, 56(1) + lwz 29, 60(1) + addi 1, 1, 0x44 + blr + .size Transform_Sha256_Len,.-Transform_Sha256_Len +#endif /* __PIC__ */ #endif /* !WOLFSSL_PPC32_ASM_SPE */ #endif /* !NO_SHA256 */ diff --git a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c index 343e5fa5a..37a8a7237 100644 --- a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c +++ b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c @@ -1197,6 +1197,7 @@ static const word32 L_SHA256_transform_len_k[] = { 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, }; +#ifndef __PIC__ void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG @@ -4418,6 +4419,3253 @@ void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) ); } +#else +/* PIC version not using register 30 or 31 */ +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, + word32 len_p); +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p) +#else +void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) +#endif /* WOLFSSL_NO_VAR_ASSIGN_REG */ +{ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + register wc_Sha256* sha256 asm ("3") = (wc_Sha256*)sha256_p; + register const byte* data asm ("4") = (const byte*)data_p; + register word32 len asm ("5") = (word32)len_p; + register word32* L_SHA256_transform_len_k_c asm ("6") = + (word32*)&L_SHA256_transform_len_k; +#else + register word32* L_SHA256_transform_len_k_c = + (word32*)&L_SHA256_transform_len_k; + +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + + __asm__ __volatile__ ( + "srwi %[len], %[len], 6\n\t" + "mr 6, %[L_SHA256_transform_len_k]\n\t" +#ifndef WOLFSSL_PPC32_ASM_SMALL + "subi 1, 1, 8\n\t" + "stw %[sha256], 0(1)\n\t" + "stw %[data], 4(1)\n\t" + "mtctr %[len]\n\t" + /* Copy digest to add in at end */ + "lwz 0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz 7, 8(%[sha256])\n\t" + "lwz 8, 12(%[sha256])\n\t" + "lwz 9, 16(%[sha256])\n\t" + "lwz 10, 20(%[sha256])\n\t" + "lwz 11, 24(%[sha256])\n\t" + "lwz 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz 14, 0(%[sha256])\n\t" + "lwz 15, 4(%[sha256])\n\t" + "lwz 16, 8(%[sha256])\n\t" + "lwz 17, 12(%[sha256])\n\t" + "lwz 18, 16(%[sha256])\n\t" + "lwz 19, 20(%[sha256])\n\t" + "lwz 20, 24(%[sha256])\n\t" + "lwz 21, 28(%[sha256])\n\t" + "lwz 22, 32(%[sha256])\n\t" + "lwz 23, 36(%[sha256])\n\t" + "lwz 24, 40(%[sha256])\n\t" + "lwz 25, 44(%[sha256])\n\t" + "lwz 26, 48(%[sha256])\n\t" + "lwz 27, 52(%[sha256])\n\t" + "lwz 28, 56(%[sha256])\n\t" + "lwz 29, 60(%[sha256])\n\t" + /* Start of 16 rounds */ + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "subi 6, 6, 0xc0\n\t" + "lwz %[sha256], 0(1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add 0, 0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add 7, 7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add 8, 8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add 9, 9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add 10, 10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add 11, 11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add 12, 12, %[len]\n\t" + "stw 0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw 7, 8(%[sha256])\n\t" + "stw 8, 12(%[sha256])\n\t" + "stw 9, 16(%[sha256])\n\t" + "stw 10, 20(%[sha256])\n\t" + "stw 11, 24(%[sha256])\n\t" + "stw 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi 1, 1, 8\n\t" +#else + "subi 1, 1, 12\n\t" + "stw %[sha256], 0(1)\n\t" + "stw %[data], 4(1)\n\t" + "stw %[len], 8(1)\n\t" + /* Copy digest to add in at end */ + "lwz 0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz 7, 8(%[sha256])\n\t" + "lwz 8, 12(%[sha256])\n\t" + "lwz 9, 16(%[sha256])\n\t" + "lwz 10, 20(%[sha256])\n\t" + "lwz 11, 24(%[sha256])\n\t" + "lwz 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz 14, 0(%[sha256])\n\t" + "lwz 15, 4(%[sha256])\n\t" + "lwz 16, 8(%[sha256])\n\t" + "lwz 17, 12(%[sha256])\n\t" + "lwz 18, 16(%[sha256])\n\t" + "lwz 19, 20(%[sha256])\n\t" + "lwz 20, 24(%[sha256])\n\t" + "lwz 21, 28(%[sha256])\n\t" + "lwz 22, 32(%[sha256])\n\t" + "lwz 23, 36(%[sha256])\n\t" + "lwz 24, 40(%[sha256])\n\t" + "lwz 25, 44(%[sha256])\n\t" + "lwz 26, 48(%[sha256])\n\t" + "lwz 27, 52(%[sha256])\n\t" + "lwz 28, 56(%[sha256])\n\t" + "lwz 29, 60(%[sha256])\n\t" + "li %[sha256], 4\n\t" + "mtctr %[sha256]\n\t" + /* Start of 16 rounds */ + "\n" + "L_SHA256_transform_len_start_%=: \n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_0_%=\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + "\n" + "L_SHA256_transform_len_after_blk_0_%=: \n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_1_%=\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + "\n" + "L_SHA256_transform_len_after_blk_1_%=: \n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_2_%=\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + "\n" + "L_SHA256_transform_len_after_blk_2_%=: \n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_3_%=\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + "\n" + "L_SHA256_transform_len_after_blk_3_%=: \n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_4_%=\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + "\n" + "L_SHA256_transform_len_after_blk_4_%=: \n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_5_%=\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + "\n" + "L_SHA256_transform_len_after_blk_5_%=: \n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_6_%=\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + "\n" + "L_SHA256_transform_len_after_blk_6_%=: \n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_7_%=\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + "\n" + "L_SHA256_transform_len_after_blk_7_%=: \n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_8_%=\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + "\n" + "L_SHA256_transform_len_after_blk_8_%=: \n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_9_%=\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + "\n" + "L_SHA256_transform_len_after_blk_9_%=: \n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_10_%=\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + "\n" + "L_SHA256_transform_len_after_blk_10_%=: \n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_11_%=\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + "\n" + "L_SHA256_transform_len_after_blk_11_%=: \n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_12_%=\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + "\n" + "L_SHA256_transform_len_after_blk_12_%=: \n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_13_%=\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + "\n" + "L_SHA256_transform_len_after_blk_13_%=: \n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_14_%=\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + "\n" + "L_SHA256_transform_len_after_blk_14_%=: \n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_15_%=\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "\n" + "L_SHA256_transform_len_after_blk_15_%=: \n\t" + "addi 6, 6, 0x40\n\t" + "bdnz L_SHA256_transform_len_start_%=\n\t" + "subi 6, 6, 0x100\n\t" + "lwz %[sha256], 0(1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add 0, 0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add 7, 7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add 8, 8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add 9, 9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add 10, 10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add 11, 11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add 12, 12, %[len]\n\t" + "stw 0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw 7, 8(%[sha256])\n\t" + "stw 8, 12(%[sha256])\n\t" + "stw 9, 16(%[sha256])\n\t" + "stw 10, 20(%[sha256])\n\t" + "stw 11, 24(%[sha256])\n\t" + "stw 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + "lwz %[len], 8(1)\n\t" + "mtctr %[len]\n\t" + "subi %[len], %[len], 1\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(1)\n\t" + "stw %[len], 8(1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi 1, 1, 12\n\t" +#endif /* WOLFSSL_PPC32_ASM_SMALL */ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + : [sha256] "+r" (sha256), [data] "+r" (data), [len] "+r" (len), + [L_SHA256_transform_len_k] "+r" (L_SHA256_transform_len_k_c) + : +#else + : + : [sha256] "r" (sha256), [data] "r" (data), [len] "r" (len), + [L_SHA256_transform_len_k] "r" (L_SHA256_transform_len_k_c) +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + : "memory", "cc", "0", "7", "8", "9", "10", "11", "12", "14", "15", + "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", + "27", "28", "29" + ); +} + +#endif /* __PIC__ */ #endif /* !WOLFSSL_PPC32_ASM_SPE */ #endif /* !NO_SHA256 */ From f61bfd7805ecd887f815d5ba26b50c473928fbbd Mon Sep 17 00:00:00 2001 From: Juliusz Sosinowicz Date: Tue, 16 Dec 2025 17:10:26 +0100 Subject: [PATCH 02/27] Check KeyShare after HRR --- src/tls.c | 14 ++++++++ src/tls13.c | 9 +++++ tests/api.c | 2 +- tests/api/test_tls13.c | 80 ++++++++++++++++++++++++++++++++++++++++++ tests/api/test_tls13.h | 4 ++- tests/utils.h | 5 +++ wolfssl/internal.h | 1 + 7 files changed, 113 insertions(+), 2 deletions(-) diff --git a/src/tls.c b/src/tls.c index 94bda4772..743fbf3e9 100644 --- a/src/tls.c +++ b/src/tls.c @@ -9979,6 +9979,20 @@ int TLSX_KeyShare_Parse_ClientHello(const WOLFSSL* ssl, offset += ret; } + if (ssl->hrr_keyshare_group != 0) { + /* + * https://datatracker.ietf.org/doc/html/rfc8446#section-4.2.8 + * when sending the new ClientHello, the client MUST + * replace the original "key_share" extension with one containing only a + * new KeyShareEntry for the group indicated in the selected_group field + * of the triggering HelloRetryRequest + */ + if (seenGroupsCnt != 1 || seenGroups[0] != ssl->hrr_keyshare_group) { + WOLFSSL_ERROR_VERBOSE(BAD_KEY_SHARE_DATA); + return BAD_KEY_SHARE_DATA; + } + } + return 0; } diff --git a/src/tls13.c b/src/tls13.c index 6eeabb4ee..e4026c852 100644 --- a/src/tls13.c +++ b/src/tls13.c @@ -7475,6 +7475,15 @@ int SendTls13ServerHello(WOLFSSL* ssl, byte extMsgType) if (ret != 0) return ret; + if (extMsgType == hello_retry_request) { + TLSX* ksExt = TLSX_Find(ssl->extensions, TLSX_KEY_SHARE); + if (ksExt != NULL) { + KeyShareEntry* kse = (KeyShareEntry*)ksExt->data; + if (kse != NULL) + ssl->hrr_keyshare_group = kse->group; + } + } + #ifdef WOLFSSL_SEND_HRR_COOKIE if (ssl->options.sendCookie && extMsgType == hello_retry_request) { /* Reset the hashes from here. We will be able to restart the hashes diff --git a/tests/api.c b/tests/api.c index 631675c24..59ee2f423 100644 --- a/tests/api.c +++ b/tests/api.c @@ -268,7 +268,7 @@ #endif #ifdef WOLFSSL_DUMP_MEMIO_STREAM -const char* currentTestName; +const char* currentTestName = NULL; char tmpDirName[16]; int tmpDirNameSet = 0; #endif diff --git a/tests/api/test_tls13.c b/tests/api/test_tls13.c index e79e11466..abf0e1e02 100644 --- a/tests/api/test_tls13.c +++ b/tests/api/test_tls13.c @@ -2572,3 +2572,83 @@ int test_tls13_duplicate_extension(void) } + +int test_key_share_mismatch(void) +{ + EXPECT_DECLS; +#if defined(HAVE_MANUAL_MEMIO_TESTS_DEPENDENCIES) && defined(WOLFSSL_TLS13) && \ + defined(HAVE_SUPPORTED_CURVES) && defined(HAVE_ECC) && \ + defined(BUILD_TLS_AES_128_GCM_SHA256) + /* Taken from payload in https://github.com/wolfSSL/wolfssl/issues/9362 */ + const byte ch1_bin[] = { + 0x16, 0x03, 0x03, 0x00, 0x96, 0x01, 0x00, 0x00, 0x92, 0x03, 0x03, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x20, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x00, 0x02, 0x13, 0x01, 0x01, 0x00, 0x00, 0x47, + 0x00, 0x0a, 0x00, 0x08, 0x00, 0x06, 0x00, 0x18, 0x00, 0x17, 0x00, 0x1d, + 0x00, 0x0d, 0x00, 0x06, 0x00, 0x04, 0x04, 0x01, 0x08, 0x04, 0x00, 0x33, + 0x00, 0x26, 0x00, 0x24, 0x00, 0x1d, 0x00, 0x20, 0x07, 0xaa, 0xff, 0x3e, + 0x9f, 0xc1, 0x67, 0x27, 0x55, 0x44, 0xf4, 0xc3, 0xa6, 0xa1, 0x7c, 0xd8, + 0x37, 0xf2, 0xec, 0x6e, 0x78, 0xcd, 0x8a, 0x57, 0xb1, 0xe3, 0xdf, 0xb3, + 0xcc, 0x03, 0x5a, 0x76, 0x00, 0x2b, 0x00, 0x03, 0x02, 0x03, 0x04 + }; + const byte ch2_bin[] = { + 0x16, 0x03, 0x03, 0x00, 0xb7, 0x01, 0x00, 0x00, 0xb3, 0x03, 0x03, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x20, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, + 0x03, 0x03, 0x03, 0x03, 0x00, 0x02, 0x13, 0x01, 0x01, 0x00, 0x00, 0x68, + 0x00, 0x0a, 0x00, 0x08, 0x00, 0x06, 0x00, 0x18, 0x00, 0x17, 0x00, 0x1d, + 0x00, 0x0d, 0x00, 0x06, 0x00, 0x04, 0x04, 0x01, 0x08, 0x04, 0x00, 0x33, + 0x00, 0x47, 0x00, 0x45, 0x00, 0x17, 0x00, 0x41, 0x04, 0x0c, 0x90, 0x1d, + 0x42, 0x3c, 0x83, 0x1c, 0xa8, 0x5e, 0x27, 0xc7, 0x3c, 0x26, 0x3b, 0xa1, + 0x32, 0x72, 0x1b, 0xb9, 0xd7, 0xa8, 0x4c, 0x4f, 0x03, 0x80, 0xb2, 0xa6, + 0x75, 0x6f, 0xd6, 0x01, 0x33, 0x1c, 0x88, 0x70, 0x23, 0x4d, 0xec, 0x87, + 0x85, 0x04, 0xc1, 0x74, 0x14, 0x4f, 0xa4, 0xb1, 0x4b, 0x66, 0xa6, 0x51, + 0x69, 0x16, 0x06, 0xd8, 0x17, 0x3e, 0x55, 0xbd, 0x37, 0xe3, 0x81, 0x56, + 0x9e, 0x00, 0x2b, 0x00, 0x03, 0x02, 0x03, 0x04 + }; + WOLFSSL_CTX *ctx_c = NULL, *ctx_s = NULL; + WOLFSSL *ssl_c = NULL, *ssl_s = NULL; + struct test_memio_ctx test_ctx; + int client_group[] = {WOLFSSL_ECC_SECP521R1}; + int server_group[] = {WOLFSSL_ECC_SECP384R1, WOLFSSL_ECC_SECP256R1}; + + XMEMSET(&test_ctx, 0, sizeof(test_ctx)); + ExpectIntEQ(test_memio_setup(&test_ctx, &ctx_c, &ctx_s, &ssl_c, &ssl_s, + wolfTLSv1_3_client_method, wolfTLSv1_3_server_method), 0); + ExpectIntEQ(wolfSSL_set_groups(ssl_c, + client_group, XELEM_CNT(client_group)), WOLFSSL_SUCCESS); + ExpectIntEQ(wolfSSL_set_groups(ssl_s, + server_group, XELEM_CNT(server_group)), WOLFSSL_SUCCESS); + ExpectIntEQ(test_memio_do_handshake(ssl_c, ssl_s, 10, NULL), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_c, -1), BAD_KEY_SHARE_DATA); + + wolfSSL_free(ssl_s); + ssl_s = NULL; + XMEMSET(&test_ctx, 0, sizeof(test_ctx)); + ExpectIntEQ(test_memio_setup(&test_ctx, NULL, &ctx_s, NULL, &ssl_s, + NULL, wolfTLSv1_3_server_method), 0); + ExpectIntEQ(wolfSSL_set_groups(ssl_s, + server_group, XELEM_CNT(server_group)), WOLFSSL_SUCCESS); + ExpectIntEQ(test_memio_inject_message(&test_ctx, 0, (const char*)ch1_bin, + sizeof(ch1_bin)), 0); + ExpectIntEQ(wolfSSL_accept(ssl_s), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_s, -1), WOLFSSL_ERROR_WANT_READ); + ExpectIntEQ(test_memio_inject_message(&test_ctx, 0, (const char*)ch2_bin, + sizeof(ch2_bin)), 0); + ExpectIntEQ(wolfSSL_accept(ssl_s), -1); + ExpectIntEQ(wolfSSL_get_error(ssl_s, -1), BAD_KEY_SHARE_DATA); + + wolfSSL_free(ssl_c); + wolfSSL_free(ssl_s); + wolfSSL_CTX_free(ctx_c); + wolfSSL_CTX_free(ctx_s); +#endif + return EXPECT_RESULT(); +} diff --git a/tests/api/test_tls13.h b/tests/api/test_tls13.h index 5364e53eb..85669d818 100644 --- a/tests/api/test_tls13.h +++ b/tests/api/test_tls13.h @@ -31,6 +31,7 @@ int test_tls13_rpk_handshake(void); int test_tls13_pq_groups(void); int test_tls13_early_data(void); int test_tls13_same_ch(void); +int test_key_share_mismatch(void); int test_tls13_hrr_different_cs(void); int test_tls13_sg_missing(void); int test_tls13_ks_missing(void); @@ -47,6 +48,7 @@ int test_tls13_duplicate_extension(void); TEST_DECL_GROUP("tls13", test_tls13_hrr_different_cs), \ TEST_DECL_GROUP("tls13", test_tls13_sg_missing), \ TEST_DECL_GROUP("tls13", test_tls13_ks_missing), \ - TEST_DECL_GROUP("tls13", test_tls13_duplicate_extension) + TEST_DECL_GROUP("tls13", test_tls13_duplicate_extension), \ + TEST_DECL_GROUP("tls13", test_key_share_mismatch) #endif /* WOLFCRYPT_TEST_TLS13_H */ diff --git a/tests/utils.h b/tests/utils.h index 34ba47d34..ecc800f82 100644 --- a/tests/utils.h +++ b/tests/utils.h @@ -27,6 +27,11 @@ #ifndef TESTS_UTILS_H #define TESTS_UTILS_H +#ifdef WOLFSSL_DUMP_MEMIO_STREAM +extern char tmpDirName[16]; +extern const char* currentTestName; +#endif + #if !defined(NO_FILESYSTEM) && !defined(NO_CERTS) && \ (!defined(NO_RSA) || defined(HAVE_RPK)) && \ !defined(NO_WOLFSSL_SERVER) && !defined(NO_WOLFSSL_CLIENT) && \ diff --git a/wolfssl/internal.h b/wolfssl/internal.h index 79182b9ea..c975865ca 100644 --- a/wolfssl/internal.h +++ b/wolfssl/internal.h @@ -6150,6 +6150,7 @@ struct WOLFSSL { void* session_ticket_ctx; byte expect_session_ticket; #endif + word16 hrr_keyshare_group; #endif /* HAVE_TLS_EXTENSIONS */ #ifdef HAVE_OCSP void* ocspIOCtx; From e93835acd92a6af4ad37a0f26429695a9e90927d Mon Sep 17 00:00:00 2001 From: JacobBarthelmeh Date: Wed, 17 Dec 2025 10:15:32 -0700 Subject: [PATCH 03/27] sanity checks on buffer size with AES and CAAM Integrity use --- wolfcrypt/src/port/caam/caam_integrity.c | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/wolfcrypt/src/port/caam/caam_integrity.c b/wolfcrypt/src/port/caam/caam_integrity.c index e7cfb5fe9..791fa1e80 100644 --- a/wolfcrypt/src/port/caam/caam_integrity.c +++ b/wolfcrypt/src/port/caam/caam_integrity.c @@ -759,6 +759,9 @@ static Error caamAes(struct DescStruct* desc) ctx[ctxIdx] = buf; sz += buf->dataSz; + if (ctx[ctxIdx]->dataSz + offset > (MAX_CTX * sizeof(UINT4))) { + return SizeIsTooLarge; + } memcpy((unsigned char*)&local[offset], (unsigned char*)ctx[ctxIdx]->data, ctx[ctxIdx]->dataSz); offset += ctx[ctxIdx]->dataSz; @@ -958,6 +961,9 @@ static Error caamAead(struct DescStruct* desc) ctx[ctxIdx] = buf; sz += buf->dataSz; + if (ctx[ctxIdx]->dataSz + offset > (MAX_CTX * sizeof(UINT4))) { + return SizeIsTooLarge; + } memcpy((unsigned char*)&local[offset], (unsigned char*)ctx[ctxIdx]->data, ctx[ctxIdx]->dataSz); offset += ctx[ctxIdx]->dataSz; From 1484fb506983c6db0f856dada4d51ebaf71e581b Mon Sep 17 00:00:00 2001 From: Joseph Chen Date: Thu, 18 Dec 2025 15:46:35 +0800 Subject: [PATCH 04/27] Add IAR support to WC_OFFSETOF macro --- wolfssl/wolfcrypt/types.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wolfssl/wolfcrypt/types.h b/wolfssl/wolfcrypt/types.h index 19d8d8b86..feded367e 100644 --- a/wolfssl/wolfcrypt/types.h +++ b/wolfssl/wolfcrypt/types.h @@ -1233,7 +1233,7 @@ binding for XSNPRINTF #ifndef WC_OFFSETOF #if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 4)) #define WC_OFFSETOF(type, field) __builtin_offsetof(type, field) - #elif defined(__WATCOMC__) + #elif defined(__WATCOMC__) || defined(__IAR_SYSTEMS_ICC__) #include #define WC_OFFSETOF offsetof #else From d1a4677a8a6aa44acedddc84ae276630c18ff943 Mon Sep 17 00:00:00 2001 From: Eric Blankenhorn Date: Thu, 18 Dec 2025 10:10:57 -0600 Subject: [PATCH 05/27] Null deref check in Pkcs11ECDH --- wolfcrypt/src/wc_pkcs11.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/wolfcrypt/src/wc_pkcs11.c b/wolfcrypt/src/wc_pkcs11.c index d272a3158..3ac4911f2 100644 --- a/wolfcrypt/src/wc_pkcs11.c +++ b/wolfcrypt/src/wc_pkcs11.c @@ -2764,7 +2764,8 @@ static int Pkcs11ECDH(Pkcs11Session* session, wc_CryptoInfo* info) if (ret == 0) { secSz = *info->pk.ecdh.outlen; - if (secSz > (CK_ULONG)info->pk.ecdh.private_key->dp->size) + if (info->pk.ecdh.private_key->dp != NULL && + secSz > (CK_ULONG)info->pk.ecdh.private_key->dp->size) secSz = info->pk.ecdh.private_key->dp->size; params.kdf = CKD_NULL; From 59b3219c0f60cc75b96ee4892e18637d588aa546 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Thu, 18 Dec 2025 10:47:21 -0600 Subject: [PATCH 06/27] wolfcrypt/test/test.c: fix memory leaks in Hmac tests. --- wolfcrypt/test/test.c | 212 ++++++++++++++++++++++++++---------------- 1 file changed, 131 insertions(+), 81 deletions(-) diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index f10242742..4afcc03ab 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -7440,6 +7440,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_md5_test(void) test_hmac[2] = c; test_hmac[3] = d; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -7454,29 +7455,29 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_md5_test(void) ret = wc_HmacInit(&hmac, HEAP_HINT, devId); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, WC_MD5, (byte*)keys[i], (word32)XSTRLEN(keys[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_MD5_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -7484,28 +7485,35 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_md5_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_MD5_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) - if ((ret = wc_HmacSizeByType(WC_MD5)) != WC_MD5_DIGEST_SIZE) - return WC_TEST_RET_ENC_EC(ret); + if (ret == 0) { + if ((ret = wc_HmacSizeByType(WC_MD5)) != WC_MD5_DIGEST_SIZE) + return WC_TEST_RET_ENC_EC(ret); + ret = 0; + } #endif - return 0; + return ret; } #endif /* !NO_HMAC && !NO_MD5 && (!HAVE_FIPS || (HAVE_FIPS_VERSION < 5)) */ @@ -7577,6 +7585,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha_test(void) test_hmac[2] = c; test_hmac[3] = d; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -7589,38 +7598,38 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha_test(void) #endif if ((ret = wc_HmacInit(&hmac, HEAP_HINT, devId)) != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, WC_SHA, (byte*)keys[i], (word32)XSTRLEN(keys[i])); #if FIPS_VERSION3_GE(6,0,0) if (i == 1) { if (ret != WC_NO_ERR_TRACE(HMAC_MIN_KEYLEN_E)) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); /* Now use the ex and allow short keys with FIPS option */ ret = wc_HmacSetKey_ex(&hmac, WC_SHA, (byte*) keys[i], (word32)XSTRLEN(keys[i]), allowShortKeyWithFips); } #endif if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -7628,28 +7637,35 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) - if ((ret = wc_HmacSizeByType(WC_SHA)) != WC_SHA_DIGEST_SIZE) - return WC_TEST_RET_ENC_EC(ret); + if (ret == 0) { + if ((ret = wc_HmacSizeByType(WC_SHA)) != WC_SHA_DIGEST_SIZE) + return WC_TEST_RET_ENC_EC(ret); + ret = 0; + } #endif - return 0; + return ret; } #endif @@ -7720,6 +7736,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha224_test(void) test_hmac[2] = c; test_hmac[3] = d; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -7732,29 +7749,29 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha224_test(void) #endif if ((ret = wc_HmacInit(&hmac, HEAP_HINT, devId)) != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, WC_SHA224, (byte*)keys[i], (word32)XSTRLEN(keys[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA224_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -7762,28 +7779,35 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha224_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA224_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) - if ((ret = wc_HmacSizeByType(WC_SHA224)) != WC_SHA224_DIGEST_SIZE) - return WC_TEST_RET_ENC_EC(ret); + if (ret == 0) { + if ((ret = wc_HmacSizeByType(WC_SHA224)) != WC_SHA224_DIGEST_SIZE) + return WC_TEST_RET_ENC_EC(ret); + ret = 0; + } #endif - return 0; + return ret; } #endif @@ -7869,6 +7893,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha256_test(void) test_hmac[3] = d; test_hmac[4] = e; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -7885,31 +7910,31 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha256_test(void) #endif if (wc_HmacInit(&hmac, HEAP_HINT, devId) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); ret = wc_HmacSetKey(&hmac, WC_SHA256, (byte*)keys[i], (word32)XSTRLEN(keys[i])); if (ret != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif if (test_hmac[i].input != NULL) { ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); } ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA256_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -7918,20 +7943,24 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha256_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); } ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA256_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif @@ -8030,6 +8059,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha384_test(void) test_hmac[2] = c; test_hmac[3] = d; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -8042,29 +8072,29 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha384_test(void) #endif if ((ret = wc_HmacInit(&hmac, HEAP_HINT, devId)) != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, WC_SHA384, (byte*)keys[i], (word32)XSTRLEN(keys[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA384_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -8072,28 +8102,35 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha384_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA384_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) - if ((ret = wc_HmacSizeByType(WC_SHA384)) != WC_SHA384_DIGEST_SIZE) - return WC_TEST_RET_ENC_EC(ret); + if (ret == 0) { + if ((ret = wc_HmacSizeByType(WC_SHA384)) != WC_SHA384_DIGEST_SIZE) + return WC_TEST_RET_ENC_EC(ret); + ret = 0; + } #endif - return 0; + return ret; } #endif @@ -8177,6 +8214,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha512_test(void) test_hmac[2] = c; test_hmac[3] = d; + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -8189,29 +8227,29 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha512_test(void) #endif if ((ret = wc_HmacInit(&hmac, HEAP_HINT, devId)) != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, WC_SHA512, (byte*)keys[i], (word32)XSTRLEN(keys[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA512_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(&hmac); @@ -8219,28 +8257,35 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha512_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)test_hmac[i].input, (word32)test_hmac[i].inLen); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, test_hmac[i].output, WC_SHA512_DIGEST_SIZE) != 0) - return WC_TEST_RET_ENC_I(i); + ERROR_OUT(WC_TEST_RET_ENC_I(i), out); wc_HmacFree(hmac_copy); #endif } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) - if ((ret = wc_HmacSizeByType(WC_SHA512)) != WC_SHA512_DIGEST_SIZE) - return WC_TEST_RET_ENC_EC(ret); + if (ret == 0) { + if ((ret = wc_HmacSizeByType(WC_SHA512)) != WC_SHA512_DIGEST_SIZE) + return WC_TEST_RET_ENC_EC(ret); + ret = 0; + } #endif - return 0; + return ret; } #endif @@ -8381,6 +8426,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) int ret; WOLFSSL_ENTER("hmac_sha3_test"); + XMEMSET(&hmac, 0, sizeof(hmac)); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) WC_ALLOC_VAR_EX(hmac_copy, Hmac, 1, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER, return WC_TEST_RET_ENC_EC(MEMORY_E)); @@ -8394,28 +8440,28 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) for (; i < iMax; i++) { for (j = 0; j < jMax; j++) { if ((ret = wc_HmacInit(&hmac, HEAP_HINT, devId)) != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacSetKey(&hmac, hashType[j], (byte*)key[i], (word32)XSTRLEN(key[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) ret = wc_HmacCopy(&hmac, hmac_copy); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif ret = wc_HmacUpdate(&hmac, (byte*)input[i], (word32)XSTRLEN(input[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(&hmac, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, output[(i*jMax) + j], (size_t)hashSz[j]) != 0) - return WC_TEST_RET_ENC_NC; + ERROR_OUT(WC_TEST_RET_ENC_NC, out); wc_HmacFree(&hmac); @@ -8423,12 +8469,12 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) ret = wc_HmacUpdate(hmac_copy, (byte*)input[i], (word32)XSTRLEN(input[i])); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); ret = wc_HmacFinal(hmac_copy, hash); if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); if (XMEMCMP(hash, output[(i*jMax) + j], (size_t)hashSz[j]) != 0) - return WC_TEST_RET_ENC_NC; + ERROR_OUT(WC_TEST_RET_ENC_NC, out); wc_HmacFree(hmac_copy); #endif @@ -8439,16 +8485,20 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) #if !defined(HAVE_FIPS) || FIPS_VERSION3_GE(6,0,0) ret = wc_HmacSizeByType(hashType[j]); if (ret != hashSz[j]) - return WC_TEST_RET_ENC_EC(ret); + ERROR_OUT(WC_TEST_RET_ENC_EC(ret), out); #endif } } +out: + + wc_HmacFree(&hmac); #if !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GT(7,0,0)) + wc_HmacFree(hmac_copy); WC_FREE_VAR_EX(hmac_copy, HEAP_HINT, DYNAMIC_TYPE_TMP_BUFFER); #endif - return 0; + return ret; } #endif From 83e9a0780f6c1f8aca9179e634ca85978451c1cd Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Thu, 18 Dec 2025 11:09:37 -0600 Subject: [PATCH 07/27] wolfcrypt/src/wc_lms.c: fix leak in wc_LmsKey_Reload(). --- wolfcrypt/src/wc_lms.c | 1 + 1 file changed, 1 insertion(+) diff --git a/wolfcrypt/src/wc_lms.c b/wolfcrypt/src/wc_lms.c index 94634140e..0f4688ea1 100644 --- a/wolfcrypt/src/wc_lms.c +++ b/wolfcrypt/src/wc_lms.c @@ -913,6 +913,7 @@ int wc_LmsKey_Reload(LmsKey* key) /* Reload the key ready for signing. */ ret = wc_hss_reload_key(state, key->priv_raw, &key->priv, key->priv_data, NULL); + wc_lmskey_state_free(state); } ForceZero(state, sizeof(LmsState)); WC_FREE_VAR_EX(state, NULL, DYNAMIC_TYPE_TMP_BUFFER); From 4e15ccec35102677fe7f176b3fa5ecf0dd16e2d1 Mon Sep 17 00:00:00 2001 From: Juliusz Sosinowicz Date: Thu, 18 Dec 2025 18:40:54 +0100 Subject: [PATCH 08/27] rng-tools: increase jitter timeout --- .github/workflows/rng-tools.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rng-tools.yml b/.github/workflows/rng-tools.yml index 44d3a20e2..ea4b62840 100644 --- a/.github/workflows/rng-tools.yml +++ b/.github/workflows/rng-tools.yml @@ -101,7 +101,7 @@ jobs: # Retry up to five times for i in {1..5}; do TEST_RES=0 - LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$GITHUB_WORKSPACE/build-dir/lib make check || TEST_RES=$? + LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$GITHUB_WORKSPACE/build-dir/lib RNGD_JITTER_TIMEOUT=100 make check || TEST_RES=$? if [ "$TEST_RES" -eq "0" ]; then break fi From 8a8ef3512eb8ff843169b2d42d2bc76c19b6bfed Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Thu, 18 Dec 2025 11:48:31 -0600 Subject: [PATCH 09/27] src/internal.c: in FreeSSL_Ctx(), use wolfSSL_RefWithMutexFree(&ctx->ref), matching refactor in #8187. --- src/internal.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/internal.c b/src/internal.c index da7557070..111eb9ffd 100644 --- a/src/internal.c +++ b/src/internal.c @@ -3038,7 +3038,7 @@ void FreeSSL_Ctx(WOLFSSL_CTX* ctx) !defined(WOLFSSL_NO_DEF_TICKET_ENC_CB) && !defined(NO_TLS) TicketEncCbCtx_Free(&ctx->ticketKeyCtx); #endif - wolfSSL_RefFree(&ctx->ref); + wolfSSL_RefWithMutexFree(&ctx->ref); XFREE(ctx, heap, DYNAMIC_TYPE_CTX); } else { From 81d32f4fe62d069e5828f608285bff24dd7dafb6 Mon Sep 17 00:00:00 2001 From: Kareem Date: Thu, 18 Dec 2025 14:37:59 -0700 Subject: [PATCH 10/27] Move Curve25519 public key check to make_pub/make_pub_blind to cover the case where they are called directly by an application. --- wolfcrypt/src/curve25519.c | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/wolfcrypt/src/curve25519.c b/wolfcrypt/src/curve25519.c index 21b43e699..040be8388 100644 --- a/wolfcrypt/src/curve25519.c +++ b/wolfcrypt/src/curve25519.c @@ -202,6 +202,15 @@ int wc_curve25519_make_pub(int public_size, byte* pub, int private_size, #endif /* !WOLFSSL_CURVE25519_BLINDING */ #endif /* FREESCALE_LTC_ECC */ +/* If WOLFSSL_CURVE25519_BLINDING is defined, this check is run in + * wc_curve25519_make_pub_blind since it could be called directly. */ +#if !defined(WOLFSSL_CURVE25519_BLINDING) || defined(FREESCALE_LTC_ECC) + if (ret == 0) { + ret = wc_curve25519_check_public(pub, public_size, + EC25519_LITTLE_ENDIAN); + } +#endif + return ret; } @@ -297,6 +306,11 @@ int wc_curve25519_make_pub_blind(int public_size, byte* pub, int private_size, ret = curve25519_smul_blind(pub, priv, (byte*)kCurve25519BasePoint, rng); #endif + if (ret == 0) { + ret = wc_curve25519_check_public(pub, public_size, + EC25519_LITTLE_ENDIAN); + } + return ret; } #endif @@ -463,11 +477,6 @@ int wc_curve25519_make_key(WC_RNG* rng, int keysize, curve25519_key* key) ret = wc_curve25519_make_pub((int)sizeof(key->p.point), key->p.point, (int)sizeof(key->k), key->k); #endif - if (ret == 0) { - ret = wc_curve25519_check_public(key->p.point, - (word32)sizeof(key->p.point), - EC25519_LITTLE_ENDIAN); - } key->pubSet = (ret == 0); } #endif From 5eef52c6facde78993695714c1ead68993274258 Mon Sep 17 00:00:00 2001 From: Chris Conlon Date: Wed, 17 Dec 2025 15:52:10 -0700 Subject: [PATCH 11/27] Add test for PKCS#7 SignedData with non-OCTET_STRING content --- tests/api/test_pkcs7.c | 131 +++++++++++++++++++++++++++++++++++++++++ tests/api/test_pkcs7.h | 4 +- 2 files changed, 134 insertions(+), 1 deletion(-) diff --git a/tests/api/test_pkcs7.c b/tests/api/test_pkcs7.c index 6c5706f9d..d60e3abb6 100644 --- a/tests/api/test_pkcs7.c +++ b/tests/api/test_pkcs7.c @@ -4383,4 +4383,135 @@ int test_wc_PKCS7_DecodeCompressedData(void) return EXPECT_RESULT(); } +/* + * Test for PKCS#7 SignedData with non-OCTET_STRING content + * (PKCS#7 style vs CMS) + * + * Tests parsing PKCS#7 SignedData where the encapsulated content + * is a SEQUENCE (as allowed by original PKCS#7 spec "ANY DEFINED BY + * contentType") rather than an OCTET STRING (as mandated by CMS). This showed + * up in use case of Authenticode signatures. + */ +int test_wc_PKCS7_VerifySignedData_PKCS7ContentSeq(void) +{ + EXPECT_DECLS; +#if defined(HAVE_PKCS7) + PKCS7* pkcs7 = NULL; +#ifndef NO_PKCS7_STREAM + word32 idx; + int ret; +#endif + + /* + * Hand-crafted PKCS#7 SignedData (degenerate, no signers) with: + * - Content type OID (1.3.6.1.4.1.311.2.1.4 = SPC_INDIRECT_DATA) + * - Content is a SEQUENCE, NOT an OCTET STRING + * - eContent is encoded as "ANY" type per original PKCS#7 spec. + * + * This test ensures wolfSSL's PKCS7 streaming code can correctly + * parse SignedData types when the encapsulated content is not an OCTET + * STRING (as CMS requires) but rather a SEQUENCE or other type + * (as PKCS#7's "ANY" type allows). Microsoft Authenticode signatures + * use this format with SPC_INDIRECT_DATA content. + * + * Structure: + * ContentInfo SEQUENCE + * contentType OID signedData + * [0] SignedData SEQUENCE + * version INTEGER 1 + * digestAlgorithms SET { sha256 } + * encapContentInfo SEQUENCE + * eContentType OID 1.3.6.1.4.1.311.2.1.4 + * [0] eContent + * SEQUENCE { OID, OCTET STRING } - SEQUENCE not OCTET STRING + * signerInfos SET {} (empty = degenerate) + */ + static const byte pkcs7Content[] = { + /* ContentInfo SEQUENCE */ + 0x30, 0x56, + /* contentType OID: 1.2.840.113549.1.7.2 (signedData) */ + 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x07, 0x02, + /* [0] EXPLICIT - content */ + 0xA0, 0x49, + /* SignedData SEQUENCE */ + 0x30, 0x47, + /* version INTEGER 1 */ + 0x02, 0x01, 0x01, + /* digestAlgorithms SET */ + 0x31, 0x0F, + /* AlgorithmIdentifier SEQUENCE */ + 0x30, 0x0D, + /* OID sha256: 2.16.840.1.101.3.4.2.1 */ + 0x06, 0x09, 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, + 0x04, 0x02, 0x01, + /* NULL */ + 0x05, 0x00, + /* encapContentInfo SEQUENCE */ + 0x30, 0x2F, + /* eContentType OID: 1.3.6.1.4.1.311.2.1.4 (SPC_INDIRECT_DATA) */ + 0x06, 0x0A, 0x2B, 0x06, 0x01, 0x04, 0x01, 0x82, + 0x37, 0x02, 0x01, 0x04, + /* [0] EXPLICIT - eContent */ + 0xA0, 0x21, + /* Content SEQUENCE (0x30), not OCTET STRING (0x04) + * Following PKCS#7 "ANY" type, not CMS OCTET STRING */ + 0x30, 0x1F, + /* Content: SEQUENCE { OID, OCTET STRING with 24 bytes } */ + 0x06, 0x03, 0x55, 0x04, 0x03, /* OID 2.5.4.3 (5 bytes) */ + 0x04, 0x18, /* OCTET STRING length 24 */ + 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, /* "This is " */ + 0x74, 0x65, 0x73, 0x74, 0x20, 0x63, 0x6F, 0x6E, /* "test con" */ + 0x74, 0x65, 0x6E, 0x74, 0x20, 0x64, 0x61, 0x74, /* "tent dat" */ + /* signerInfos SET - empty for degenerate */ + 0x31, 0x00 + }; + + /* Test non-streaming verification */ + ExpectNotNull(pkcs7 = wc_PKCS7_New(HEAP_HINT, testDevId)); + ExpectIntEQ(wc_PKCS7_Init(pkcs7, HEAP_HINT, INVALID_DEVID), 0); + ExpectIntEQ(wc_PKCS7_InitWithCert(pkcs7, NULL, 0), 0); + ExpectIntEQ(wc_PKCS7_VerifySignedData(pkcs7, (byte*)pkcs7Content, + (word32)sizeof(pkcs7Content)), 0); + + /* Verify content was parsed correctly */ + if (pkcs7 != NULL) { + /* contentIsPkcs7Type should be set */ + ExpectIntEQ(pkcs7->contentIsPkcs7Type, 1); + /* Content should have been parsed (33 bytes) */ + ExpectIntEQ(pkcs7->contentSz, 33); + ExpectNotNull(pkcs7->content); + } + wc_PKCS7_Free(pkcs7); + pkcs7 = NULL; + +#ifndef NO_PKCS7_STREAM + /* Test streaming verification - feed data byte by byte */ + ExpectNotNull(pkcs7 = wc_PKCS7_New(HEAP_HINT, testDevId)); + ExpectIntEQ(wc_PKCS7_Init(pkcs7, HEAP_HINT, INVALID_DEVID), 0); + ExpectIntEQ(wc_PKCS7_InitWithCert(pkcs7, NULL, 0), 0); + + /* Feed data byte by byte to exercise streaming path */ + ret = WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E); + for (idx = 0; idx < (word32)sizeof(pkcs7Content) && ret != 0; idx++) { + ret = wc_PKCS7_VerifySignedData(pkcs7, + (byte*)pkcs7Content + idx, 1); + if (ret < 0 && ret != WC_NO_ERR_TRACE(WC_PKCS7_WANT_READ_E)) { + /* Unexpected error */ + break; + } + } + + /* Expecting ret = 0, not ASN_PARSE_E or other negative error */ + ExpectIntEQ(ret, 0); + + if (pkcs7 != NULL) { + ExpectIntEQ(pkcs7->contentIsPkcs7Type, 1); + ExpectIntEQ(pkcs7->contentSz, 33); + ExpectNotNull(pkcs7->content); + } + wc_PKCS7_Free(pkcs7); +#endif /* !NO_PKCS7_STREAM */ +#endif /* HAVE_PKCS7 */ + return EXPECT_RESULT(); +} diff --git a/tests/api/test_pkcs7.h b/tests/api/test_pkcs7.h index 054eda248..accd348ce 100644 --- a/tests/api/test_pkcs7.h +++ b/tests/api/test_pkcs7.h @@ -48,6 +48,7 @@ int test_wc_PKCS7_SetOriEncryptCtx(void); int test_wc_PKCS7_SetOriDecryptCtx(void); int test_wc_PKCS7_DecodeCompressedData(void); int test_wc_PKCS7_DecodeEnvelopedData_multiple_recipients(void); +int test_wc_PKCS7_VerifySignedData_PKCS7ContentSeq(void); #define TEST_PKCS7_DECLS \ @@ -63,7 +64,8 @@ int test_wc_PKCS7_DecodeEnvelopedData_multiple_recipients(void); TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_VerifySignedData_ECC), \ TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_Degenerate), \ TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_BER), \ - TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_NoDefaultSignedAttribs) + TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_NoDefaultSignedAttribs), \ + TEST_DECL_GROUP("pkcs7_sd", test_wc_PKCS7_VerifySignedData_PKCS7ContentSeq) #define TEST_PKCS7_ENCRYPTED_DATA_DECLS \ TEST_DECL_GROUP("pkcs7_ed", test_wc_PKCS7_DecodeEnvelopedData_stream), \ From c238defe23cdd5be2b5ea26fc8533740b56ab9a8 Mon Sep 17 00:00:00 2001 From: Kareem Date: Thu, 18 Dec 2025 15:32:59 -0700 Subject: [PATCH 12/27] Add cast for public_size --- wolfcrypt/src/curve25519.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/wolfcrypt/src/curve25519.c b/wolfcrypt/src/curve25519.c index 040be8388..d0db86b25 100644 --- a/wolfcrypt/src/curve25519.c +++ b/wolfcrypt/src/curve25519.c @@ -206,7 +206,7 @@ int wc_curve25519_make_pub(int public_size, byte* pub, int private_size, * wc_curve25519_make_pub_blind since it could be called directly. */ #if !defined(WOLFSSL_CURVE25519_BLINDING) || defined(FREESCALE_LTC_ECC) if (ret == 0) { - ret = wc_curve25519_check_public(pub, public_size, + ret = wc_curve25519_check_public(pub, (word32)public_size, EC25519_LITTLE_ENDIAN); } #endif @@ -307,7 +307,7 @@ int wc_curve25519_make_pub_blind(int public_size, byte* pub, int private_size, #endif if (ret == 0) { - ret = wc_curve25519_check_public(pub, public_size, + ret = wc_curve25519_check_public(pub, (word32)public_size, EC25519_LITTLE_ENDIAN); } From d6dcd307366ddfac74a33e5417c847e53aa3e5c8 Mon Sep 17 00:00:00 2001 From: Chris Conlon Date: Thu, 18 Dec 2025 14:19:47 -0700 Subject: [PATCH 13/27] Fix PKCS#7 streaming for non OCTET STRING content types --- wolfcrypt/src/pkcs7.c | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/wolfcrypt/src/pkcs7.c b/wolfcrypt/src/pkcs7.c index a107d835a..45dbe70c0 100644 --- a/wolfcrypt/src/pkcs7.c +++ b/wolfcrypt/src/pkcs7.c @@ -5455,7 +5455,6 @@ static int PKCS7_VerifySignedData(wc_PKCS7* pkcs7, const byte* hashBuf, if (ret == 0 && GetMyVersion(pkiMsg, &idx, &version, pkiMsgSz) < 0) ret = ASN_PARSE_E; - /* version 1 follows RFC 2315 */ /* version 3 follows RFC 4108 */ if (ret == 0 && (version != 1 && version != 3)) { @@ -5673,6 +5672,15 @@ static int PKCS7_VerifySignedData(wc_PKCS7* pkcs7, const byte* hashBuf, * this as start of content. */ localIdx = start; pkcs7->contentIsPkcs7Type = 1; + + #ifndef NO_PKCS7_STREAM + /* Set streaming variables for PKCS#7 type content. + * length contains the size from [0] EXPLICIT wrapper */ + pkcs7->stream->multi = 0; + pkcs7->stream->currContIdx = localIdx; + pkcs7->stream->currContSz = (word32)length; + pkcs7->stream->currContRmnSz = (word32)length; + #endif } else { /* CMS eContent OCTET_STRING */ @@ -5762,7 +5770,6 @@ static int PKCS7_VerifySignedData(wc_PKCS7* pkcs7, const byte* hashBuf, idx = localIdx; } else { - /* If either pkcs7->content and pkcs7->contentSz are set * (detached signature where user has set content explicitly * into pkcs7->content/contentSz) OR pkcs7->hashBuf and @@ -5862,7 +5869,7 @@ static int PKCS7_VerifySignedData(wc_PKCS7* pkcs7, const byte* hashBuf, /* copy content to pkcs7->contentDynamic */ if (keepContent && pkcs7->stream->content && - pkcs7->stream->contentSz >0) { + pkcs7->stream->contentSz > 0) { pkcs7->contentDynamic = (byte*)XMALLOC(pkcs7->stream->contentSz, pkcs7->heap, DYNAMIC_TYPE_PKCS7); if (pkcs7->contentDynamic == NULL) { From afe82b951282ac25ee19b5cd0288f49375fc7001 Mon Sep 17 00:00:00 2001 From: Chris Conlon Date: Thu, 18 Dec 2025 15:01:09 -0700 Subject: [PATCH 14/27] Fix PKCS#7 degenerate detection based on signerInfos length --- wolfcrypt/src/pkcs7.c | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/wolfcrypt/src/pkcs7.c b/wolfcrypt/src/pkcs7.c index 45dbe70c0..9c99f8cf5 100644 --- a/wolfcrypt/src/pkcs7.c +++ b/wolfcrypt/src/pkcs7.c @@ -6419,6 +6419,17 @@ static int PKCS7_VerifySignedData(wc_PKCS7* pkcs7, const byte* hashBuf, NO_USER_CHECK) < 0) ret = ASN_PARSE_E; + /* Update degenerate flag based on if signerInfos SET is empty. + * The earlier degenerate check at digestAlgorithms is an early + * optimization, but depending on degenerate case may not be + * detected until here. */ + if (ret == 0) { + degenerate = (length == 0) ? 1 : 0; + #ifndef NO_PKCS7_STREAM + pkcs7->stream->degenerate = (degenerate != 0); + #endif + } + if (ret != 0) break; #ifndef NO_PKCS7_STREAM From a3072c7a8dc43bbf50792cc3d25a5c4d67fdea04 Mon Sep 17 00:00:00 2001 From: JacobBarthelmeh Date: Thu, 18 Dec 2025 17:18:39 -0700 Subject: [PATCH 15/27] fix for shadows global declaration warning --- src/internal.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/internal.c b/src/internal.c index 111eb9ffd..042245852 100644 --- a/src/internal.c +++ b/src/internal.c @@ -42234,11 +42234,11 @@ static int DisplaySecTrustError(CFErrorRef error, SecTrustRef trust) /* Description */ desc = CFErrorCopyDescription(error); if (desc) { - char buffer[256]; - if (CFStringGetCString(desc, buffer, sizeof(buffer), + char buf[256]; + if (CFStringGetCString(desc, buf, sizeof(buf), kCFStringEncodingUTF8)) { WOLFSSL_MSG_EX("SecTrustEvaluateWithError Error description: %s\n", - buffer); + buf); } CFRelease(desc); } From dd35f10b579e1f389bf76fea4d51171dc8fb1b7b Mon Sep 17 00:00:00 2001 From: Juliusz Sosinowicz Date: Thu, 18 Dec 2025 13:44:11 +0100 Subject: [PATCH 16/27] ed25519: validate presence of keys in export functions --- tests/api/test_ed25519.c | 2 +- wolfcrypt/src/ed25519.c | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/api/test_ed25519.c b/tests/api/test_ed25519.c index 88d83d67a..6831f2162 100644 --- a/tests/api/test_ed25519.c +++ b/tests/api/test_ed25519.c @@ -490,7 +490,7 @@ int test_wc_Ed25519PublicKeyToDer(void) WC_NO_ERR_TRACE(BAD_FUNC_ARG)); ExpectIntEQ(wc_ed25519_init(&key), 0); ExpectIntEQ(wc_Ed25519PublicKeyToDer(&key, derBuf, 0, 0), - WC_NO_ERR_TRACE(BUFFER_E)); + WC_NO_ERR_TRACE(PUBLIC_KEY_E)); wc_ed25519_free(&key); /* Test good args */ diff --git a/wolfcrypt/src/ed25519.c b/wolfcrypt/src/ed25519.c index a03efb560..3744e1dfe 100644 --- a/wolfcrypt/src/ed25519.c +++ b/wolfcrypt/src/ed25519.c @@ -1127,6 +1127,9 @@ int wc_ed25519_export_public(const ed25519_key* key, byte* out, word32* outLen) return BUFFER_E; } + if (!key->pubKeySet) + return PUBLIC_KEY_E; + *outLen = ED25519_PUB_KEY_SIZE; XMEMCPY(out, key->p, ED25519_PUB_KEY_SIZE); @@ -1368,7 +1371,7 @@ int wc_ed25519_export_private_only(const ed25519_key* key, byte* out, word32* ou int wc_ed25519_export_private(const ed25519_key* key, byte* out, word32* outLen) { /* sanity checks on arguments */ - if (key == NULL || out == NULL || outLen == NULL) + if (key == NULL || !key->privKeySet || out == NULL || outLen == NULL) return BAD_FUNC_ARG; if (*outLen < ED25519_PRV_KEY_SIZE) { @@ -1398,6 +1401,8 @@ int wc_ed25519_export_key(const ed25519_key* key, /* export public part */ ret = wc_ed25519_export_public(key, pub, pubSz); + if (ret == WC_NO_ERR_TRACE(PUBLIC_KEY_E)) + ret = 0; /* ignore no public key */ return ret; } From 96c47cd18c33025b3027ef23d3c06a2f72e669c8 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Fri, 19 Dec 2025 08:55:35 -0600 Subject: [PATCH 17/27] wolfcrypt/test/test.c: in _rng_test(), inhibit the WC_RESEED_INTERVAL subtest if an rng callback is installed. --- wolfcrypt/test/test.c | 21 +++++++++++++++------ wolfssl/wolfcrypt/cryptocb.h | 2 +- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index 4afcc03ab..fac639f75 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -19748,14 +19748,23 @@ static wc_test_ret_t _rng_test(WC_RNG* rng) !defined(HAVE_SELFTEST) && (!defined(HAVE_FIPS) || FIPS_VERSION3_GE(5,0,0)) /* Test periodic reseed dynamics. */ - ((struct DRBG_internal *)rng->drbg)->reseedCtr = WC_RESEED_INTERVAL; +#ifdef WOLF_CRYPTO_CB + if (wc_CryptoCb_RandomBlock(rng, block, sizeof(block)) == + WC_NO_ERR_TRACE(CRYPTOCB_UNAVAILABLE)) + { +#endif + ((struct DRBG_internal *)rng->drbg)->reseedCtr = WC_RESEED_INTERVAL; - ret = wc_RNG_GenerateBlock(rng, block, sizeof(block)); - if (ret != 0) - return WC_TEST_RET_ENC_EC(ret); + ret = wc_RNG_GenerateBlock(rng, block, sizeof(block)); + if (ret != 0) + return WC_TEST_RET_ENC_EC(ret); + + if (((struct DRBG_internal *)rng->drbg)->reseedCtr == WC_RESEED_INTERVAL) + return WC_TEST_RET_ENC_NC; +#ifdef WOLF_CRYPTO_CB + } +#endif - if (((struct DRBG_internal *)rng->drbg)->reseedCtr == WC_RESEED_INTERVAL) - return WC_TEST_RET_ENC_NC; #endif /* HAVE_HASHDRBG && !CUSTOM_RAND_GENERATE_BLOCK && !HAVE_SELFTEST */ #if defined(WOLFSSL_TRACK_MEMORY) && defined(WOLFSSL_SMALL_STACK_CACHE) diff --git a/wolfssl/wolfcrypt/cryptocb.h b/wolfssl/wolfcrypt/cryptocb.h index c3e483222..455da94dd 100644 --- a/wolfssl/wolfcrypt/cryptocb.h +++ b/wolfssl/wolfcrypt/cryptocb.h @@ -737,7 +737,7 @@ WOLFSSL_LOCAL int wc_CryptoCb_Kdf_TwostepCmac(const byte * salt, word32 saltSz, #endif /* HAVE_CMAC_KDF */ #ifndef WC_NO_RNG -WOLFSSL_LOCAL int wc_CryptoCb_RandomBlock(WC_RNG* rng, byte* out, word32 sz); +WOLFSSL_TEST_VIS int wc_CryptoCb_RandomBlock(WC_RNG* rng, byte* out, word32 sz); WOLFSSL_LOCAL int wc_CryptoCb_RandomSeed(OS_Seed* os, byte* seed, word32 sz); #endif From fb26b2dfe11da733aaec3ab2d169088f467529e2 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Fri, 19 Dec 2025 09:07:14 -0600 Subject: [PATCH 18/27] wolfcrypt/test/test.c: in HMAC tests, initialize ret, to silence uninitvar from cppcheck-force-source. --- wolfcrypt/test/test.c | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index fac639f75..0983262e7 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -7402,7 +7402,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_md5_test(void) testVector a, b, c, d; testVector test_hmac[4]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; WOLFSSL_ENTER("hmac_md5_test"); @@ -7543,7 +7543,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha_test(void) testVector a, b, c, d; testVector test_hmac[4]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; #if FIPS_VERSION3_GE(6,0,0) @@ -7700,7 +7700,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha224_test(void) testVector a, b, c, d; testVector test_hmac[4]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; WOLFSSL_ENTER("hmac_sha224_test"); @@ -7844,7 +7844,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha256_test(void) testVector a, b, c, d, e; testVector test_hmac[5]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; WOLFSSL_ENTER("hmac_sha256_test"); @@ -8014,7 +8014,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha384_test(void) testVector a, b, c, d; testVector test_hmac[4]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; WOLFSSL_ENTER("hmac_sha384_test"); @@ -8165,7 +8165,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha512_test(void) testVector a, b, c, d; testVector test_hmac[4]; - wc_test_ret_t ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; int times = sizeof(test_hmac) / sizeof(testVector), i; WOLFSSL_ENTER("hmac_sha512_test"); @@ -8423,7 +8423,7 @@ WOLFSSL_TEST_SUBROUTINE wc_test_ret_t hmac_sha3_test(void) int i = 0, iMax = sizeof(input) / sizeof(input[0]), j, jMax = sizeof(hashType) / sizeof(hashType[0]); - int ret; + wc_test_ret_t ret = WC_TEST_RET_ENC_NC; WOLFSSL_ENTER("hmac_sha3_test"); XMEMSET(&hmac, 0, sizeof(hmac)); From 6f95a9c58e440411f7e495582f405769369c6649 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Fri, 19 Dec 2025 10:30:14 -0600 Subject: [PATCH 19/27] wolfcrypt/src/random.c: in _InitRng(), remove "drbg_instantiated" conditional cleanup logic (Coverity true-benign-positive: DEADCODE because drbg_instantiated is always false when ret != DRBG_SUCCESS). --- wolfcrypt/src/random.c | 5 ----- 1 file changed, 5 deletions(-) diff --git a/wolfcrypt/src/random.c b/wolfcrypt/src/random.c index 2945a88b9..2b395c463 100644 --- a/wolfcrypt/src/random.c +++ b/wolfcrypt/src/random.c @@ -813,7 +813,6 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, #ifdef HAVE_HASHDRBG word32 seedSz = SEED_SZ + SEED_BLOCK_SZ; WC_DECLARE_VAR(seed, byte, MAX_SEED_SZ, rng->heap); - int drbg_instantiated = 0; #ifdef WOLFSSL_SMALL_STACK_CACHE int drbg_scratch_instantiated = 0; #endif @@ -1020,8 +1019,6 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, ret = Hash_DRBG_Instantiate((DRBG_internal *)rng->drbg, seed + SEED_BLOCK_SZ, seedSz - SEED_BLOCK_SZ, nonce, nonceSz, rng->heap, devId); - if (ret == 0) - drbg_instantiated = 1; } /* ret == 0 */ #ifdef WOLFSSL_SMALL_STACK @@ -1033,8 +1030,6 @@ static int _InitRng(WC_RNG* rng, byte* nonce, word32 nonceSz, WC_FREE_VAR_EX(seed, rng->heap, DYNAMIC_TYPE_SEED); if (ret != DRBG_SUCCESS) { - if (drbg_instantiated) - (void)Hash_DRBG_Uninstantiate((DRBG_internal *)rng->drbg); #if !defined(WOLFSSL_NO_MALLOC) || defined(WOLFSSL_STATIC_MEMORY) XFREE(rng->drbg, rng->heap, DYNAMIC_TYPE_RNG); #endif From d3f74557fe75d62f8e4bc8728e068ce7a4799780 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Fri, 19 Dec 2025 15:45:17 -0600 Subject: [PATCH 20/27] wolfcrypt/src/wolfentropy.c: add volatile attribute to entropy_memuse_initialized declaration; in wc_Entropy_Get(), if HAVE_FIPS, call Entropy_Init() if necessary, to accommodate FIPS KATs; in Entropy_Init(), add thread safety. --- wolfcrypt/src/wolfentropy.c | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/wolfcrypt/src/wolfentropy.c b/wolfcrypt/src/wolfentropy.c index 9fe47109d..eb25d4a53 100644 --- a/wolfcrypt/src/wolfentropy.c +++ b/wolfcrypt/src/wolfentropy.c @@ -58,7 +58,7 @@ data, use this implementation to seed and re-seed the DRBG. #define MAX_NOISE_CNT (MAX_ENTROPY_BITS * 8 + ENTROPY_EXTRA) /* MemUse entropy global state initialized. */ -static int entropy_memuse_initialized = 0; +static volatile int entropy_memuse_initialized = 0; /* Global SHA-3 object used for conditioning entropy and creating noise. */ static wc_Sha3 entropyHash; /* Reset the health tests. */ @@ -740,6 +740,21 @@ int wc_Entropy_Get(int bits, unsigned char* entropy, word32 len) int noise_len = (bits + ENTROPY_EXTRA) / ENTROPY_MIN; static byte noise[MAX_NOISE_CNT]; +#ifdef HAVE_FIPS + /* FIPS KATs, e.g. EccPrimitiveZ_KnownAnswerTest(), call wc_Entropy_Get() + * incidental to wc_InitRng(), without first calling Entropy_Init(), neither + * directly, nor indirectly via wolfCrypt_Init(). This matters, because + * KATs must be usable before wolfCrypt_Init() (indeed, in the library + * embodiment, the HMAC KAT always runs before wolfCrypt_Init(), incidental + * to fipsEntry()). Without the InitSha3() under Entropy_Init(), the + * SHA3_BLOCK function pointer is null when Sha3Update() is called by + * Entropy_MemUse(), which ends badly. + */ + if (!entropy_memuse_initialized) { + ret = Entropy_Init(); + } +#endif + /* Lock the mutex as collection uses globals. */ if ((ret == 0) && (wc_LockMutex(&entropy_mutex) != 0)) { ret = BAD_MUTEX_E; @@ -851,6 +866,19 @@ int Entropy_Init(void) #if !defined(SINGLE_THREADED) && !defined(WOLFSSL_MUTEX_INITIALIZER) ret = wc_InitMutex(&entropy_mutex); #endif + if (ret == 0) + ret = wc_LockMutex(&entropy_mutex); + + if (entropy_memuse_initialized) { + /* Short circuit return -- a competing thread initialized the state + * while we were waiting. Note, this is only threadsafe when + * WOLFSSL_MUTEX_INITIALIZER is defined. + */ + if (ret == 0) + wc_UnLockMutex(&entropy_mutex); + return 0; + } + if (ret == 0) { /* Initialize a SHA3-256 object for use in entropy operations. */ ret = wc_InitSha3_256(&entropyHash, NULL, INVALID_DEVID); @@ -872,6 +900,10 @@ int Entropy_Init(void) Entropy_StopThread(); #endif } + + if (ret != WC_NO_ERR_TRACE(BAD_MUTEX_E)) { + wc_UnLockMutex(&entropy_mutex); + } } return ret; From a7550346dd2549b52473ba0311b144531d580d00 Mon Sep 17 00:00:00 2001 From: Daniel Pouzzner Date: Fri, 19 Dec 2025 15:50:27 -0600 Subject: [PATCH 21/27] wolfcrypt/test/test.c: in rng_seed_test(), fix gates for FIPS 5.2.4. --- wolfcrypt/test/test.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index 0983262e7..971502e29 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -19879,7 +19879,7 @@ static wc_test_ret_t rng_seed_test(void) * SEED_BLOCK_SZ, which depend on which seed back end is configured. */ #if defined(HAVE_ENTROPY_MEMUSE) && defined(HAVE_AMD_RDSEED) && \ - !(defined(HAVE_FIPS) && FIPS_VERSION_LT(6,0)) + !(defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0) && FIPS_VERSION3_NE(5,2,4)) #ifdef HAVE_FIPS WOLFSSL_SMALL_STACK_STATIC const byte check[] = { @@ -19917,7 +19917,7 @@ static wc_test_ret_t rng_seed_test(void) }; #endif #elif defined(HAVE_AMD_RDSEED) && \ - !(defined(HAVE_FIPS) && FIPS_VERSION_LT(6,0)) + !(defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0) && FIPS_VERSION3_NE(5,2,4)) WOLFSSL_SMALL_STACK_STATIC const byte check[] = { 0x2c, 0xd4, 0x9b, 0x1e, 0x1e, 0xe7, 0xb0, 0xb0, @@ -19926,7 +19926,7 @@ static wc_test_ret_t rng_seed_test(void) 0xa2, 0xe7, 0xe5, 0x90, 0x6d, 0x1f, 0x88, 0x98 }; #elif (defined(HAVE_INTEL_RDSEED) || defined(HAVE_INTEL_RDRAND)) && \ - !(defined(HAVE_FIPS) && FIPS_VERSION_LT(6,0)) + !(defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0) && FIPS_VERSION3_NE(5,2,4)) #ifdef HAVE_FIPS WOLFSSL_SMALL_STACK_STATIC const byte check[] = { @@ -19945,7 +19945,7 @@ static wc_test_ret_t rng_seed_test(void) }; #endif #elif defined(HAVE_INTEL_RDSEED) && \ - defined(HAVE_FIPS) && FIPS_VERSION_LT(6,0) + defined(HAVE_FIPS) && FIPS_VERSION3_LT(6,0,0) && FIPS_VERSION3_NE(5,2,4) WOLFSSL_SMALL_STACK_STATIC const byte check[] = { 0x27, 0xdd, 0xff, 0x5b, 0x21, 0x26, 0x0a, 0x48, From 1cb2231ff595c1922dd709b0bfff2f4abf8847ca Mon Sep 17 00:00:00 2001 From: David Garske Date: Fri, 19 Dec 2025 13:21:11 -0800 Subject: [PATCH 22/27] Added build option to allow certificate CA matching using AKID with signers SKID ( `WOLFSSL_ALLOW_AKID_SKID_MATCH`). Fixed issue with `cert->extAuthKeyIdSz` not being set with ASN template code. --- .wolfssl_known_macro_extras | 1 + wolfcrypt/src/asn.c | 61 +++++++++++++++++++------------------ wolfssl/wolfcrypt/asn.h | 32 +++++++++---------- 3 files changed, 48 insertions(+), 46 deletions(-) diff --git a/.wolfssl_known_macro_extras b/.wolfssl_known_macro_extras index b703f4db3..1b4255e69 100644 --- a/.wolfssl_known_macro_extras +++ b/.wolfssl_known_macro_extras @@ -653,6 +653,7 @@ WOLFSSL_AESNI_BY6 WOLFSSL_AES_CTR_EXAMPLE WOLFSSL_AFTER_DATE_CLOCK_SKEW WOLFSSL_ALGO_HW_MUTEX +WOLFSSL_ALLOW_AKID_SKID_MATCH WOLFSSL_ALLOW_BAD_TLS_LEGACY_VERSION WOLFSSL_ALLOW_CRIT_AIA WOLFSSL_ALLOW_CRIT_AKID diff --git a/wolfcrypt/src/asn.c b/wolfcrypt/src/asn.c index 8452f99c2..e64f27892 100644 --- a/wolfcrypt/src/asn.c +++ b/wolfcrypt/src/asn.c @@ -104,6 +104,9 @@ ASN Options: * DO NOT enable this unless required for interoperability. * WOLFSSL_ASN_EXTRA: Make more ASN.1 APIs available regardless of internal * usage. + * WOLFSSL_ALLOW_AKID_SKID_MATCH: By default cert issuer is found using hash + * of cert subject hash with signers subject hash. This option allows fallback + * to using AKID and SKID matching. */ #ifndef NO_RSA @@ -21339,42 +21342,25 @@ static int DecodeAuthKeyIdInternal(const byte* input, word32 sz, ret = DecodeAuthKeyId(input, sz, &extAuthKeyId, &extAuthKeyIdSz, &extAuthKeyIdIssuer, &extAuthKeyIdIssuerSz, &extAuthKeyIdIssuerSN, &extAuthKeyIdIssuerSNSz); - - if (ret != 0) - return ret; - -#ifndef WOLFSSL_ASN_TEMPLATE - - if (extAuthKeyIdSz == 0) - { + if (ret != 0) { cert->extAuthKeyIdSet = 0; - return 0; + return ret; } - cert->extAuthKeyIdSz = extAuthKeyIdSz; - -#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) -#ifdef WOLFSSL_AKID_NAME - cert->extRawAuthKeyIdSrc = input; - cert->extRawAuthKeyIdSz = sz; -#endif - cert->extAuthKeyIdSrc = extAuthKeyId; -#endif /* OPENSSL_EXTRA */ - - return GetHashId(extAuthKeyId, extAuthKeyIdSz, cert->extAuthKeyId, - HashIdAlg(cert->signatureOID)); -#else - /* Each field is optional */ if (extAuthKeyIdSz > 0) { -#ifdef OPENSSL_EXTRA - cert->extAuthKeyIdSrc = extAuthKeyId; + cert->extAuthKeyIdSet = 1; cert->extAuthKeyIdSz = extAuthKeyIdSz; -#endif /* OPENSSL_EXTRA */ + +#if defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL) + cert->extAuthKeyIdSrc = extAuthKeyId; +#endif + /* Get the hash or hash of the hash if wrong size. */ ret = GetHashId(extAuthKeyId, (int)extAuthKeyIdSz, cert->extAuthKeyId, HashIdAlg(cert->signatureOID)); } + #ifdef WOLFSSL_AKID_NAME if (ret == 0 && extAuthKeyIdIssuerSz > 0) { cert->extAuthKeyIdIssuer = extAuthKeyIdIssuer; @@ -21386,15 +21372,15 @@ static int DecodeAuthKeyIdInternal(const byte* input, word32 sz, } #endif /* WOLFSSL_AKID_NAME */ if (ret == 0) { -#if defined(OPENSSL_EXTRA) && defined(WOLFSSL_AKID_NAME) +#if (defined(OPENSSL_EXTRA) || defined(OPENSSL_EXTRA_X509_SMALL)) && \ + defined(WOLFSSL_AKID_NAME) /* Store the raw authority key id. */ cert->extRawAuthKeyIdSrc = input; cert->extRawAuthKeyIdSz = sz; -#endif /* OPENSSL_EXTRA */ +#endif } return ret; -#endif /* WOLFSSL_ASN_TEMPLATE */ } /* Decode subject key id extension. @@ -25723,7 +25709,22 @@ int ParseCertRelative(DecodedCert* cert, int type, int verify, void* cm, } if (cert->ca != NULL && XMEMCMP(cert->issuerHash, cert->ca->subjectNameHash, KEYID_SIZE) != 0) { - cert->ca = NULL; + #ifdef WOLFSSL_ALLOW_AKID_SKID_MATCH + /* if hash of cert subject does not match hash of issuer + * then try with AKID/SKID if available */ + if (cert->extAuthKeyIdSet && cert->extAuthKeyIdSz > 0 && + cert->extAuthKeyIdSz == + (word32)sizeof(cert->ca->subjectKeyIdHash) && + XMEMCMP(cert->extAuthKeyId, cert->ca->subjectKeyIdHash, + cert->extAuthKeyIdSz) == 0) { + WOLFSSL_MSG("Cert AKID matches CA SKID"); + } + else + #endif + { + WOLFSSL_MSG("Cert subject hash does not match issuer hash"); + cert->ca = NULL; + } } if (cert->ca == NULL) { cert->ca = GetCAByName(cm, cert->issuerHash); diff --git a/wolfssl/wolfcrypt/asn.h b/wolfssl/wolfcrypt/asn.h index 6e08f3596..84ad93f1b 100644 --- a/wolfssl/wolfcrypt/asn.h +++ b/wolfssl/wolfcrypt/asn.h @@ -1952,24 +1952,24 @@ struct Signer { int nameLen; char* name; /* common name */ #ifndef IGNORE_NAME_CONSTRAINTS - Base_entry* permittedNames; - Base_entry* excludedNames; -#endif /* !IGNORE_NAME_CONSTRAINTS */ + Base_entry* permittedNames; + Base_entry* excludedNames; +#endif byte subjectNameHash[SIGNER_DIGEST_SIZE]; /* sha hash of names in certificate */ - #if defined(HAVE_OCSP) || defined(HAVE_CRL) || defined(WOLFSSL_AKID_NAME) - byte issuerNameHash[SIGNER_DIGEST_SIZE]; - /* sha hash of issuer names in certificate. - * Used in OCSP to check for authorized - * responders. */ - #endif - #ifndef NO_SKID - byte subjectKeyIdHash[SIGNER_DIGEST_SIZE]; - /* sha hash of key in certificate */ - #endif - #ifdef HAVE_OCSP - byte subjectKeyHash[KEYID_SIZE]; - #endif +#if defined(HAVE_OCSP) || defined(HAVE_CRL) || defined(WOLFSSL_AKID_NAME) + byte issuerNameHash[SIGNER_DIGEST_SIZE]; + /* sha hash of issuer names in certificate. + * Used in OCSP to check for authorized + * responders. */ +#endif +#ifndef NO_SKID + byte subjectKeyIdHash[SIGNER_DIGEST_SIZE]; + /* sha hash of key in certificate */ +#endif +#ifdef HAVE_OCSP + byte subjectKeyHash[KEYID_SIZE]; +#endif #if defined(WOLFSSL_AKID_NAME) || defined(HAVE_CRL) byte serialHash[SIGNER_DIGEST_SIZE]; /* serial number hash */ #endif From a1999d29ed86986e1b83e15b7fd185f323d1b9f3 Mon Sep 17 00:00:00 2001 From: Kareem Date: Fri, 19 Dec 2025 16:37:50 -0700 Subject: [PATCH 23/27] Only enforce !NO_FILESYSTEM for WOLFSSL_SYS_CA_CERTS on non Windows/Mac systems. wolfSSL's support for WOLFSSL_SYS_CA_CERTS uses APIs which don't depend on !NO_FILESYSTEM on Windows/Mac. Fixes #8152. --- configure.ac | 15 +- src/ssl_load.c | 597 +++++++++++++++++++++++++------------------------ 2 files changed, 310 insertions(+), 302 deletions(-) diff --git a/configure.ac b/configure.ac index d6c44305d..a2e98dd35 100644 --- a/configure.ac +++ b/configure.ac @@ -10111,10 +10111,7 @@ fi if test "x$ENABLED_SYS_CA_CERTS" = "xyes" then - if test "x$ENABLED_FILESYSTEM" = "xno" - then - ENABLED_SYS_CA_CERTS="no" - elif test "x$ENABLED_CERTS" = "xno" + if test "x$ENABLED_CERTS" = "xno" then ENABLED_SYS_CA_CERTS="no" fi @@ -10146,6 +10143,16 @@ then AC_MSG_ERROR([Unable to find Apple Security.framework headers]) ]) ;; + mingw*) + ;; + *) + # Only disable on no filesystem non Mac/Windows, as Mac and Windows + # depend on APIs which don't need filesystem support enabled in wolfSSL. + if test "x$ENABLED_FILESYSTEM" = "xno" + then + ENABLED_SYS_CA_CERTS="no" + fi + ;; esac fi diff --git a/src/ssl_load.c b/src/ssl_load.c index 34f328d13..aedc629cd 100644 --- a/src/ssl_load.c +++ b/src/ssl_load.c @@ -28,7 +28,8 @@ */ #ifdef WOLFSSL_SYS_CA_CERTS -/* Will be turned off automatically when NO_FILESYSTEM is defined */ +/* Will be turned off automatically when NO_FILESYSTEM is defined + * for non Mac/Windows systems */ #ifdef _WIN32 #define _WINSOCKAPI_ /* block inclusion of winsock.h header file */ @@ -3041,303 +3042,6 @@ int wolfSSL_CTX_load_verify_locations_compat(WOLFSSL_CTX* ctx, const char* file, return WS_RETURN_CODE(ret, 0); } -#ifdef WOLFSSL_SYS_CA_CERTS - -#ifdef USE_WINDOWS_API - -/* Load CA certificate from Windows store. - * - * Assumes loaded is 0. - * - * @param [in, out] ctx SSL context object. - * @param [out] loaded Whether CA certificates were loaded. - * @return 1 on success. - * @return 0 on failure. - */ -static int LoadSystemCaCertsWindows(WOLFSSL_CTX* ctx, byte* loaded) -{ - int ret = 1; - word32 i; - HANDLE handle = NULL; - PCCERT_CONTEXT certCtx = NULL; - LPCSTR storeNames[2] = {"ROOT", "CA"}; - HCRYPTPROV_LEGACY hProv = (HCRYPTPROV_LEGACY)NULL; - - if ((ctx == NULL) || (loaded == NULL)) { - ret = 0; - } - - for (i = 0; (ret == 1) && (i < sizeof(storeNames)/sizeof(*storeNames)); - ++i) { - handle = CertOpenSystemStoreA(hProv, storeNames[i]); - if (handle != NULL) { - while ((certCtx = CertEnumCertificatesInStore(handle, certCtx)) - != NULL) { - if (certCtx->dwCertEncodingType == X509_ASN_ENCODING) { - if (ProcessBuffer(ctx, certCtx->pbCertEncoded, - certCtx->cbCertEncoded, WOLFSSL_FILETYPE_ASN1, - CA_TYPE, NULL, NULL, 0, - GET_VERIFY_SETTING_CTX(ctx), - storeNames[i]) == 1) { - /* - * Set "loaded" as long as we've loaded one CA - * cert. - */ - *loaded = 1; - } - } - } - } - else { - WOLFSSL_MSG_EX("Failed to open cert store %s.", storeNames[i]); - } - - if (handle != NULL && !CertCloseStore(handle, 0)) { - WOLFSSL_MSG_EX("Failed to close cert store %s.", storeNames[i]); - ret = 0; - } - } - - return ret; -} - -#elif defined(__APPLE__) - -#if defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) \ - && !defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) -/* Manually obtains certificates from the system trust store and loads them - * directly into wolfSSL "the old way". - * - * As of MacOS 14.0 we are still able to use this method to access system - * certificates. Accessibility of this API is indicated by the presence of the - * Security/SecTrustSettings.h header. In the likely event that Apple removes - * access to this API on Macs, this function should be removed and the - * DoAppleNativeCertValidation() routine should be used for all devices. - * - * Assumes loaded is 0. - * - * @param [in, out] ctx SSL context object. - * @param [out] loaded Whether CA certificates were loaded. - * @return 1 on success. - * @return 0 on failure. - */ -static int LoadSystemCaCertsMac(WOLFSSL_CTX* ctx, byte* loaded) -{ - int ret = 1; - word32 i; - const unsigned int trustDomains[] = { - kSecTrustSettingsDomainUser, - kSecTrustSettingsDomainAdmin, - kSecTrustSettingsDomainSystem - }; - CFArrayRef certs; - OSStatus stat; - CFIndex numCerts; - CFDataRef der; - CFIndex j; - - if ((ctx == NULL) || (loaded == NULL)) { - ret = 0; - } - - for (i = 0; (ret == 1) && (i < sizeof(trustDomains)/sizeof(*trustDomains)); - ++i) { - stat = SecTrustSettingsCopyCertificates( - (SecTrustSettingsDomain)trustDomains[i], &certs); - if (stat == errSecSuccess) { - numCerts = CFArrayGetCount(certs); - for (j = 0; j < numCerts; ++j) { - der = SecCertificateCopyData((SecCertificateRef) - CFArrayGetValueAtIndex(certs, j)); - if (der != NULL) { - if (ProcessBuffer(ctx, CFDataGetBytePtr(der), - CFDataGetLength(der), WOLFSSL_FILETYPE_ASN1, - CA_TYPE, NULL, NULL, 0, - GET_VERIFY_SETTING_CTX(ctx), - "MacOSX trustDomains") == 1) { - /* - * Set "loaded" as long as we've loaded one CA - * cert. - */ - *loaded = 1; - } - - CFRelease(der); - } - } - - CFRelease(certs); - } - else if (stat == errSecNoTrustSettings) { - WOLFSSL_MSG_EX("No trust settings for domain %d, moving to next " - "domain.", trustDomains[i]); - } - else { - WOLFSSL_MSG_EX("SecTrustSettingsCopyCertificates failed with" - " status %d.", stat); - ret = 0; - break; - } - } - - return ret; -} -#endif /* defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) */ - -#else - -/* Potential system CA certs directories on Linux/Unix distros. */ -static const char* systemCaDirs[] = { -#if defined(__ANDROID__) || defined(ANDROID) - "/system/etc/security/cacerts" /* Android */ -#else - "/etc/ssl/certs", /* Debian, Ubuntu, Gentoo, others */ - "/etc/pki/ca-trust/source/anchors", /* Fedora, RHEL */ - "/etc/pki/tls/certs" /* Older RHEL */ -#endif -}; - -/* Get CA directory list. - * - * @param [out] num Number of CA directories. - * @return CA directory list. - * @return NULL when num is NULL. - */ -const char** wolfSSL_get_system_CA_dirs(word32* num) -{ - const char** ret; - - /* Validate parameters. */ - if (num == NULL) { - ret = NULL; - } - else { - ret = systemCaDirs; - *num = sizeof(systemCaDirs)/sizeof(*systemCaDirs); - } - - return ret; -} - -/* Load CA certificate from default system directories. - * - * Assumes loaded is 0. - * - * @param [in, out] ctx SSL context object. - * @param [out] loaded Whether CA certificates were loaded. - * @return 1 on success. - * @return 0 on failure. - */ -static int LoadSystemCaCertsNix(WOLFSSL_CTX* ctx, byte* loaded) { - int ret = 1; - word32 i; - - if ((ctx == NULL) || (loaded == NULL)) { - ret = 0; - } - - for (i = 0; (ret == 1) && (i < sizeof(systemCaDirs)/sizeof(*systemCaDirs)); - ++i) { - WOLFSSL_MSG_EX("Attempting to load system CA certs from %s.", - systemCaDirs[i]); - /* - * We want to keep trying to load more CA certs even if one cert in - * the directory is bad and can't be used (e.g. if one is expired), - * so we use WOLFSSL_LOAD_FLAG_IGNORE_ERR. - */ - if (wolfSSL_CTX_load_verify_locations_ex(ctx, NULL, systemCaDirs[i], - WOLFSSL_LOAD_FLAG_IGNORE_ERR) != 1) { - WOLFSSL_MSG_EX("Failed to load CA certs from %s, trying " - "next possible location.", systemCaDirs[i]); - } - else { - WOLFSSL_MSG_EX("Loaded CA certs from %s.", - systemCaDirs[i]); - *loaded = 1; - /* Stop searching after we've loaded one directory. */ - break; - } - } - - return ret; -} - -#endif - -/* Load CA certificates from system defined locations. - * - * @param [in, out] ctx SSL context object. - * @return 1 on success. - * @return 0 on failure. - * @return WOLFSSL_BAD_PATH when no error but no certificates loaded. - */ -int wolfSSL_CTX_load_system_CA_certs(WOLFSSL_CTX* ctx) -{ - int ret; - byte loaded = 0; - - WOLFSSL_ENTER("wolfSSL_CTX_load_system_CA_certs"); - -#ifdef USE_WINDOWS_API - - ret = LoadSystemCaCertsWindows(ctx, &loaded); - -#elif defined(__APPLE__) - -#if defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) \ - && !defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) - /* As of MacOS 14.0 we are still able to access system certificates and - * load them manually into wolfSSL "the old way". Accessibility of this API - * is indicated by the presence of the Security/SecTrustSettings.h header */ - ret = LoadSystemCaCertsMac(ctx, &loaded); -#elif defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) - /* For other Apple devices, Apple has removed the ability to obtain - * certificates from the trust store, so we can't use wolfSSL's built-in - * certificate validation mechanisms anymore. We instead must call into the - * Security Framework APIs to authenticate peer certificates when received. - * (see src/internal.c:DoAppleNativeCertValidation()). - * Thus, there is no CA "loading" required, but to keep behavior consistent - * with the current API (not using system CA certs unless this function has - * been called), we simply set a flag indicating that the new apple trust - * verification routine should be used later */ - ctx->doAppleNativeCertValidationFlag = 1; - ret = 1; - loaded = 1; - -#if FIPS_VERSION_GE(2,0) /* Gate back to cert 3389 FIPS modules */ -#warning "Cryptographic operations may occur outside the FIPS module boundary" \ - "Please review FIPS claims for cryptography on this Apple device" -#endif /* FIPS_VERSION_GE(2,0) */ - -#else -/* HAVE_SECURITY_SECXXX_H macros are set by autotools or CMake when searching - * system for the required SDK headers. If building with user_settings.h, you - * will need to manually define WOLFSSL_APPLE_NATIVE_CERT_VALIDATION - * and ensure the appropriate Security.framework headers and libraries are - * visible to your compiler */ -#error "WOLFSSL_SYS_CA_CERTS on Apple devices requires Security.framework" \ - " header files to be detected, or a manual override with" \ - " WOLFSSL_APPLE_NATIVE_CERT_VALIDATION" -#endif - -#else - - ret = LoadSystemCaCertsNix(ctx, &loaded); - -#endif - - /* If we didn't fail but didn't load then we error out. */ - if ((ret == 1) && (!loaded)) { - ret = WOLFSSL_BAD_PATH; - } - - WOLFSSL_LEAVE("wolfSSL_CTX_load_system_CA_certs", ret); - - return ret; -} - -#endif /* WOLFSSL_SYS_CA_CERTS */ - #ifdef WOLFSSL_TRUST_PEER_CERT /* Load a trusted peer certificate into SSL context. * @@ -3563,6 +3267,303 @@ int wolfSSL_CTX_use_certificate_chain_file_format(WOLFSSL_CTX* ctx, #endif /* NO_FILESYSTEM */ +#ifdef WOLFSSL_SYS_CA_CERTS + +#ifdef USE_WINDOWS_API + +/* Load CA certificate from Windows store. + * + * Assumes loaded is 0. + * + * @param [in, out] ctx SSL context object. + * @param [out] loaded Whether CA certificates were loaded. + * @return 1 on success. + * @return 0 on failure. + */ +static int LoadSystemCaCertsWindows(WOLFSSL_CTX* ctx, byte* loaded) +{ + int ret = 1; + word32 i; + HANDLE handle = NULL; + PCCERT_CONTEXT certCtx = NULL; + LPCSTR storeNames[2] = {"ROOT", "CA"}; + HCRYPTPROV_LEGACY hProv = (HCRYPTPROV_LEGACY)NULL; + + if ((ctx == NULL) || (loaded == NULL)) { + ret = 0; + } + + for (i = 0; (ret == 1) && (i < sizeof(storeNames)/sizeof(*storeNames)); + ++i) { + handle = CertOpenSystemStoreA(hProv, storeNames[i]); + if (handle != NULL) { + while ((certCtx = CertEnumCertificatesInStore(handle, certCtx)) + != NULL) { + if (certCtx->dwCertEncodingType == X509_ASN_ENCODING) { + if (ProcessBuffer(ctx, certCtx->pbCertEncoded, + certCtx->cbCertEncoded, WOLFSSL_FILETYPE_ASN1, + CA_TYPE, NULL, NULL, 0, + GET_VERIFY_SETTING_CTX(ctx), + storeNames[i]) == 1) { + /* + * Set "loaded" as long as we've loaded one CA + * cert. + */ + *loaded = 1; + } + } + } + } + else { + WOLFSSL_MSG_EX("Failed to open cert store %s.", storeNames[i]); + } + + if (handle != NULL && !CertCloseStore(handle, 0)) { + WOLFSSL_MSG_EX("Failed to close cert store %s.", storeNames[i]); + ret = 0; + } + } + + return ret; +} + +#elif defined(__APPLE__) + +#if defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) \ + && !defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) +/* Manually obtains certificates from the system trust store and loads them + * directly into wolfSSL "the old way". + * + * As of MacOS 14.0 we are still able to use this method to access system + * certificates. Accessibility of this API is indicated by the presence of the + * Security/SecTrustSettings.h header. In the likely event that Apple removes + * access to this API on Macs, this function should be removed and the + * DoAppleNativeCertValidation() routine should be used for all devices. + * + * Assumes loaded is 0. + * + * @param [in, out] ctx SSL context object. + * @param [out] loaded Whether CA certificates were loaded. + * @return 1 on success. + * @return 0 on failure. + */ +static int LoadSystemCaCertsMac(WOLFSSL_CTX* ctx, byte* loaded) +{ + int ret = 1; + word32 i; + const unsigned int trustDomains[] = { + kSecTrustSettingsDomainUser, + kSecTrustSettingsDomainAdmin, + kSecTrustSettingsDomainSystem + }; + CFArrayRef certs; + OSStatus stat; + CFIndex numCerts; + CFDataRef der; + CFIndex j; + + if ((ctx == NULL) || (loaded == NULL)) { + ret = 0; + } + + for (i = 0; (ret == 1) && (i < sizeof(trustDomains)/sizeof(*trustDomains)); + ++i) { + stat = SecTrustSettingsCopyCertificates( + (SecTrustSettingsDomain)trustDomains[i], &certs); + if (stat == errSecSuccess) { + numCerts = CFArrayGetCount(certs); + for (j = 0; j < numCerts; ++j) { + der = SecCertificateCopyData((SecCertificateRef) + CFArrayGetValueAtIndex(certs, j)); + if (der != NULL) { + if (ProcessBuffer(ctx, CFDataGetBytePtr(der), + CFDataGetLength(der), WOLFSSL_FILETYPE_ASN1, + CA_TYPE, NULL, NULL, 0, + GET_VERIFY_SETTING_CTX(ctx), + "MacOSX trustDomains") == 1) { + /* + * Set "loaded" as long as we've loaded one CA + * cert. + */ + *loaded = 1; + } + + CFRelease(der); + } + } + + CFRelease(certs); + } + else if (stat == errSecNoTrustSettings) { + WOLFSSL_MSG_EX("No trust settings for domain %d, moving to next " + "domain.", trustDomains[i]); + } + else { + WOLFSSL_MSG_EX("SecTrustSettingsCopyCertificates failed with" + " status %d.", stat); + ret = 0; + break; + } + } + + return ret; +} +#endif /* defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) */ + +#elif !defined(NO_FILESYSTEM) + +/* Potential system CA certs directories on Linux/Unix distros. */ +static const char* systemCaDirs[] = { +#if defined(__ANDROID__) || defined(ANDROID) + "/system/etc/security/cacerts" /* Android */ +#else + "/etc/ssl/certs", /* Debian, Ubuntu, Gentoo, others */ + "/etc/pki/ca-trust/source/anchors", /* Fedora, RHEL */ + "/etc/pki/tls/certs" /* Older RHEL */ +#endif +}; + +/* Get CA directory list. + * + * @param [out] num Number of CA directories. + * @return CA directory list. + * @return NULL when num is NULL. + */ +const char** wolfSSL_get_system_CA_dirs(word32* num) +{ + const char** ret; + + /* Validate parameters. */ + if (num == NULL) { + ret = NULL; + } + else { + ret = systemCaDirs; + *num = sizeof(systemCaDirs)/sizeof(*systemCaDirs); + } + + return ret; +} + +/* Load CA certificate from default system directories. + * + * Assumes loaded is 0. + * + * @param [in, out] ctx SSL context object. + * @param [out] loaded Whether CA certificates were loaded. + * @return 1 on success. + * @return 0 on failure. + */ +static int LoadSystemCaCertsNix(WOLFSSL_CTX* ctx, byte* loaded) { + int ret = 1; + word32 i; + + if ((ctx == NULL) || (loaded == NULL)) { + ret = 0; + } + + for (i = 0; (ret == 1) && (i < sizeof(systemCaDirs)/sizeof(*systemCaDirs)); + ++i) { + WOLFSSL_MSG_EX("Attempting to load system CA certs from %s.", + systemCaDirs[i]); + /* + * We want to keep trying to load more CA certs even if one cert in + * the directory is bad and can't be used (e.g. if one is expired), + * so we use WOLFSSL_LOAD_FLAG_IGNORE_ERR. + */ + if (wolfSSL_CTX_load_verify_locations_ex(ctx, NULL, systemCaDirs[i], + WOLFSSL_LOAD_FLAG_IGNORE_ERR) != 1) { + WOLFSSL_MSG_EX("Failed to load CA certs from %s, trying " + "next possible location.", systemCaDirs[i]); + } + else { + WOLFSSL_MSG_EX("Loaded CA certs from %s.", + systemCaDirs[i]); + *loaded = 1; + /* Stop searching after we've loaded one directory. */ + break; + } + } + + return ret; +} + +#endif + +/* Load CA certificates from system defined locations. + * + * @param [in, out] ctx SSL context object. + * @return 1 on success. + * @return 0 on failure. + * @return WOLFSSL_BAD_PATH when no error but no certificates loaded. + */ +int wolfSSL_CTX_load_system_CA_certs(WOLFSSL_CTX* ctx) +{ + int ret; + byte loaded = 0; + + WOLFSSL_ENTER("wolfSSL_CTX_load_system_CA_certs"); + +#ifdef USE_WINDOWS_API + + ret = LoadSystemCaCertsWindows(ctx, &loaded); + +#elif defined(__APPLE__) + +#if defined(HAVE_SECURITY_SECTRUSTSETTINGS_H) \ + && !defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) + /* As of MacOS 14.0 we are still able to access system certificates and + * load them manually into wolfSSL "the old way". Accessibility of this API + * is indicated by the presence of the Security/SecTrustSettings.h header */ + ret = LoadSystemCaCertsMac(ctx, &loaded); +#elif defined(WOLFSSL_APPLE_NATIVE_CERT_VALIDATION) + /* For other Apple devices, Apple has removed the ability to obtain + * certificates from the trust store, so we can't use wolfSSL's built-in + * certificate validation mechanisms anymore. We instead must call into the + * Security Framework APIs to authenticate peer certificates when received. + * (see src/internal.c:DoAppleNativeCertValidation()). + * Thus, there is no CA "loading" required, but to keep behavior consistent + * with the current API (not using system CA certs unless this function has + * been called), we simply set a flag indicating that the new apple trust + * verification routine should be used later */ + ctx->doAppleNativeCertValidationFlag = 1; + ret = 1; + loaded = 1; + +#if FIPS_VERSION_GE(2,0) /* Gate back to cert 3389 FIPS modules */ +#warning "Cryptographic operations may occur outside the FIPS module boundary" \ + "Please review FIPS claims for cryptography on this Apple device" +#endif /* FIPS_VERSION_GE(2,0) */ + +#else +/* HAVE_SECURITY_SECXXX_H macros are set by autotools or CMake when searching + * system for the required SDK headers. If building with user_settings.h, you + * will need to manually define WOLFSSL_APPLE_NATIVE_CERT_VALIDATION + * and ensure the appropriate Security.framework headers and libraries are + * visible to your compiler */ +#error "WOLFSSL_SYS_CA_CERTS on Apple devices requires Security.framework" \ + " header files to be detected, or a manual override with" \ + " WOLFSSL_APPLE_NATIVE_CERT_VALIDATION" +#endif + +#else + + ret = LoadSystemCaCertsNix(ctx, &loaded); + +#endif + + /* If we didn't fail but didn't load then we error out. */ + if ((ret == 1) && (!loaded)) { + ret = WOLFSSL_BAD_PATH; + } + + WOLFSSL_LEAVE("wolfSSL_CTX_load_system_CA_certs", ret); + + return ret; +} + +#endif /* WOLFSSL_SYS_CA_CERTS */ + #ifdef OPENSSL_EXTRA /* Load a private key into SSL. From ca7899429844e8bd3824fe92a709978b51f750c4 Mon Sep 17 00:00:00 2001 From: Mattia Moffa Date: Mon, 22 Dec 2025 16:13:27 +0100 Subject: [PATCH 24/27] Add missing length check in sniffer for AES-GCM/AES-CCM/ARIA-GCM --- src/sniffer.c | 49 +++++++++++++++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 18 deletions(-) diff --git a/src/sniffer.c b/src/sniffer.c index f65455b66..128c0dc40 100644 --- a/src/sniffer.c +++ b/src/sniffer.c @@ -4847,18 +4847,25 @@ static int DecryptDo(WOLFSSL* ssl, byte* plain, const byte* input, XMEMCPY(ssl->decrypt.nonce, ssl->keys.aead_dec_imp_IV, AESGCM_IMP_IV_SZ); XMEMCPY(ssl->decrypt.nonce + AESGCM_IMP_IV_SZ, input, AESGCM_EXP_IV_SZ); - if ((ret = aes_auth_fn(ssl->decrypt.aes, - plain, - input + AESGCM_EXP_IV_SZ, - sz - AESGCM_EXP_IV_SZ - ssl->specs.aead_mac_size, - ssl->decrypt.nonce, AESGCM_NONCE_SZ, - ssl->decrypt.additional, AEAD_AUTH_DATA_SZ, - NULL, 0)) < 0) { - #ifdef WOLFSSL_ASYNC_CRYPT - if (ret == WC_NO_ERR_TRACE(WC_PENDING_E)) { - ret = wolfSSL_AsyncPush(ssl, &ssl->decrypt.aes->asyncDev); + if (sz < AESGCM_EXP_IV_SZ + ssl->specs.aead_mac_size) { + ret = BUFFER_ERROR; + } + + if (ret == 0) { + ret = aes_auth_fn(ssl->decrypt.aes, + plain, + input + AESGCM_EXP_IV_SZ, + sz - AESGCM_EXP_IV_SZ - ssl->specs.aead_mac_size, + ssl->decrypt.nonce, AESGCM_NONCE_SZ, + ssl->decrypt.additional, AEAD_AUTH_DATA_SZ, + NULL, 0); + if (ret < 0) { + #ifdef WOLFSSL_ASYNC_CRYPT + if (ret == WC_NO_ERR_TRACE(WC_PENDING_E)) { + ret = wolfSSL_AsyncPush(ssl, &ssl->decrypt.aes->asyncDev); + } + #endif } - #endif } } break; @@ -4866,13 +4873,19 @@ static int DecryptDo(WOLFSSL* ssl, byte* plain, const byte* input, #ifdef HAVE_ARIA case wolfssl_aria_gcm: - ret = wc_AriaDecrypt(ssl->decrypt.aria, - plain, - (byte *)input + AESGCM_EXP_IV_SZ, - sz - AESGCM_EXP_IV_SZ - ssl->specs.aead_mac_size, - ssl->decrypt.nonce, AESGCM_NONCE_SZ, - ssl->decrypt.additional, ssl->specs.aead_mac_size, - NULL, 0); + if (sz < AESGCM_EXP_IV_SZ + ssl->specs.aead_mac_size) { + ret = BUFFER_ERROR; + } + + if (ret == 0) { + ret = wc_AriaDecrypt(ssl->decrypt.aria, + plain, + (byte *)input + AESGCM_EXP_IV_SZ, + sz - AESGCM_EXP_IV_SZ - ssl->specs.aead_mac_size, + ssl->decrypt.nonce, AESGCM_NONCE_SZ, + ssl->decrypt.additional, ssl->specs.aead_mac_size, + NULL, 0); + } break; #endif From afbc65a6c37331ab70333a7f132458f3cc360faa Mon Sep 17 00:00:00 2001 From: night1rider Date: Mon, 22 Dec 2025 10:48:49 -0700 Subject: [PATCH 25/27] Aes Free callback support --- tests/api.c | 17 +++++++++++++++++ wolfcrypt/src/aes.c | 24 ++++++++++++++++++++++++ wolfcrypt/test/test.c | 17 +++++++++++++++++ 3 files changed, 58 insertions(+) diff --git a/tests/api.c b/tests/api.c index 1a3162b24..bdfa09f26 100644 --- a/tests/api.c +++ b/tests/api.c @@ -35747,6 +35747,23 @@ static int test_CryptoCb_Func(int thisDevId, wc_CryptoInfo* info, void* ctx) ret = 0; break; } + #endif + default: + ret = WC_NO_ERR_TRACE(NOT_COMPILED_IN); + break; + } + } + else if (info->free.algo == WC_ALGO_TYPE_CIPHER) { + switch (info->free.type) { + #ifndef NO_AES + case WC_CIPHER_AES: + { + Aes* aes = (Aes*)info->free.obj; + aes->devId = INVALID_DEVID; + wc_AesFree(aes); + ret = 0; + break; + } #endif default: ret = WC_NO_ERR_TRACE(NOT_COMPILED_IN); diff --git a/wolfcrypt/src/aes.c b/wolfcrypt/src/aes.c index cdc32c739..ddb7e31db 100644 --- a/wolfcrypt/src/aes.c +++ b/wolfcrypt/src/aes.c @@ -13402,10 +13402,34 @@ int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId) /* Free Aes resources */ void wc_AesFree(Aes* aes) { +#if defined(WOLF_CRYPTO_CB) && defined(WOLF_CRYPTO_CB_FREE) + int ret = 0; +#endif + if (aes == NULL) { return; } +#if defined(WOLF_CRYPTO_CB) && defined(WOLF_CRYPTO_CB_FREE) + #ifndef WOLF_CRYPTO_CB_FIND + if (aes->devId != INVALID_DEVID) + #endif + { + ret = wc_CryptoCb_Free(aes->devId, WC_ALGO_TYPE_CIPHER, + WC_CIPHER_AES, (void*)aes); + /* If they want the standard free, they can call it themselves */ + /* via their callback setting devId to INVALID_DEVID */ + /* otherwise assume the callback handled it */ + if (ret != WC_NO_ERR_TRACE(CRYPTOCB_UNAVAILABLE)) + return; + /* fall-through when unavailable */ + } + + /* silence compiler warning */ + (void)ret; + +#endif /* WOLF_CRYPTO_CB && WOLF_CRYPTO_CB_FREE */ + #ifdef WC_DEBUG_CIPHER_LIFECYCLE (void)wc_debug_CipherLifecycleFree(&aes->CipherLifecycleTag, aes->heap, 1); #endif diff --git a/wolfcrypt/test/test.c b/wolfcrypt/test/test.c index 971502e29..c87ab9c53 100644 --- a/wolfcrypt/test/test.c +++ b/wolfcrypt/test/test.c @@ -62522,6 +62522,23 @@ static int myCryptoDevCb(int devIdArg, wc_CryptoInfo* info, void* ctx) ret = 0; break; } +#endif + default: + ret = WC_NO_ERR_TRACE(NOT_COMPILED_IN); + break; + } + } + else if (info->free.algo == WC_ALGO_TYPE_CIPHER) { + switch (info->free.type) { +#ifndef NO_AES + case WC_CIPHER_AES: + { + Aes* aes = (Aes*)info->free.obj; + aes->devId = INVALID_DEVID; + wc_AesFree(aes); + ret = 0; + break; + } #endif default: ret = WC_NO_ERR_TRACE(NOT_COMPILED_IN); From b766f11e7b352ae648dc193802b61bed8eee8922 Mon Sep 17 00:00:00 2001 From: Sean Parkinson Date: Mon, 24 Nov 2025 12:47:40 +1000 Subject: [PATCH 26/27] TLS 1.3, plaintext alert: ignore when expecting encrypted In TLS 1.3, ignore valid unencrypted alerts that appear after encryption has started. Only ignore WOLFSSL_ALERT_COUNT_MAX-1 alerts. --- .github/workflows/os-check.yml | 2 + .wolfssl_known_macro_extras | 1 + src/internal.c | 93 ++++++++------ tests/api/test_tls13.c | 223 ++++++++++++++++++++++++++++++++- tests/api/test_tls13.h | 30 ++--- 5 files changed, 298 insertions(+), 51 deletions(-) diff --git a/.github/workflows/os-check.yml b/.github/workflows/os-check.yml index 33f5255a0..e043a1980 100644 --- a/.github/workflows/os-check.yml +++ b/.github/workflows/os-check.yml @@ -63,6 +63,8 @@ jobs: '--enable-coding=no', '--enable-dtls --enable-dtls13 --enable-ocspstapling --enable-ocspstapling2 --enable-cert-setup-cb --enable-sessioncerts', + '--enable-dtls --enable-dtls13 --enable-tls13 + CPPFLAGS=-DWOLFSSL_TLS13_IGNORE_PT_ALERT_ON_ENC', '--disable-sni --disable-ecc --disable-tls13 --disable-secure-renegotiation-info', 'CPPFLAGS=-DWOLFSSL_BLIND_PRIVATE_KEY', '--enable-all --enable-certgencache', diff --git a/.wolfssl_known_macro_extras b/.wolfssl_known_macro_extras index 1b4255e69..253822bf4 100644 --- a/.wolfssl_known_macro_extras +++ b/.wolfssl_known_macro_extras @@ -903,6 +903,7 @@ WOLFSSL_TICKET_ENC_HMAC_SHA512 WOLFSSL_TI_CURRTIME WOLFSSL_TLS13_DRAFT WOLFSSL_TLS13_IGNORE_AEAD_LIMITS +WOLFSSL_TLS13_IGNORE_PT_ALERT_ON_ENC WOLFSSL_TLS13_SHA512 WOLFSSL_TLS13_TICKET_BEFORE_FINISHED WOLFSSL_TLSX_PQC_MLKEM_STORE_PRIV_KEY diff --git a/src/internal.c b/src/internal.c index 111eb9ffd..2a3cb1ab8 100644 --- a/src/internal.c +++ b/src/internal.c @@ -21697,20 +21697,20 @@ static int DoAlert(WOLFSSL* ssl, byte* input, word32* inOutIdx, int* type) byte code; word32 dataSz = (word32)ssl->curSize; - #if defined(WOLFSSL_CALLBACKS) || defined(OPENSSL_EXTRA) - if (ssl->hsInfoOn) - AddPacketName(ssl, "Alert"); - if (ssl->toInfoOn) { - /* add record header back on to info + alert bytes level/code */ - int ret = AddPacketInfo(ssl, "Alert", alert, input + *inOutIdx, - ALERT_SIZE, READ_PROTO, RECORD_HEADER_SZ, ssl->heap); - if (ret != 0) - return ret; - #ifdef WOLFSSL_CALLBACKS - AddLateRecordHeader(&ssl->curRL, &ssl->timeoutInfo); - #endif - } - #endif +#if defined(WOLFSSL_CALLBACKS) || defined(OPENSSL_EXTRA) + if (ssl->hsInfoOn) + AddPacketName(ssl, "Alert"); + if (ssl->toInfoOn) { + /* add record header back on to info + alert bytes level/code */ + int ret = AddPacketInfo(ssl, "Alert", alert, input + *inOutIdx, + ALERT_SIZE, READ_PROTO, RECORD_HEADER_SZ, ssl->heap); + if (ret != 0) + return ret; + #ifdef WOLFSSL_CALLBACKS + AddLateRecordHeader(&ssl->curRL, &ssl->timeoutInfo); + #endif + } +#endif if (IsEncryptionOn(ssl, 0)) dataSz -= ssl->keys.padSz; @@ -21725,11 +21725,18 @@ static int DoAlert(WOLFSSL* ssl, byte* input, word32* inOutIdx, int* type) level = input[(*inOutIdx)++]; code = input[(*inOutIdx)++]; - ssl->alert_history.last_rx.code = code; - ssl->alert_history.last_rx.level = level; *type = code; - if (level == alert_fatal) { - ssl->options.isClosed = 1; /* Don't send close_notify */ +#ifdef WOLFSSL_TLS13_IGNORE_PT_ALERT_ON_ENC + /* Don't process alert when TLS 1.3 and encrypting but plaintext alert. */ + if (!IsAtLeastTLSv1_3(ssl->version) || !IsEncryptionOn(ssl, 0) || + ssl->keys.decryptedCur) +#endif + { + ssl->alert_history.last_rx.code = code; + ssl->alert_history.last_rx.level = level; + if (level == alert_fatal) { + ssl->options.isClosed = 1; /* Don't send close_notify */ + } } if (++ssl->options.alertCount >= WOLFSSL_ALERT_COUNT_MAX) { @@ -21743,20 +21750,35 @@ static int DoAlert(WOLFSSL* ssl, byte* input, word32* inOutIdx, int* type) } LogAlert(*type); - if (*type == close_notify) { - ssl->options.closeNotify = 1; + if (IsAtLeastTLSv1_3(ssl->version) && IsEncryptionOn(ssl, 0) && + !ssl->keys.decryptedCur) + { +#ifdef WOLFSSL_TLS13_IGNORE_PT_ALERT_ON_ENC + /* Ignore alert if TLS 1.3 and encrypting but was plaintext alert. */ + *type = invalid_alert; + level = alert_none; + +#else + /* Unexpected message when encryption is on and alert not encrypted. */ + SendAlert(ssl, alert_fatal, unexpected_message); + WOLFSSL_ERROR_VERBOSE(PARSE_ERROR); + return PARSE_ERROR; +#endif } else { - /* - * A close_notify alert doesn't mean there's been an error, so we only - * add other types of alerts to the error queue - */ - WOLFSSL_ERROR(*type); + if (*type == close_notify) { + ssl->options.closeNotify = 1; + } + else { + /* + * A close_notify alert doesn't mean there's been an error, so we + * only add other types of alerts to the error queue + */ + WOLFSSL_ERROR(*type); + } } - - if (IsEncryptionOn(ssl, 0)) { + if (IsEncryptionOn(ssl, 0)) *inOutIdx += ssl->keys.padSz; - } return level; } @@ -22507,7 +22529,8 @@ default: #ifdef WOLFSSL_TLS13 if (IsAtLeastTLSv1_3(ssl->version) && IsEncryptionOn(ssl, 0) && ssl->curRL.type != application_data && - ssl->curRL.type != change_cipher_spec) { + ssl->curRL.type != change_cipher_spec && + ssl->curRL.type != alert) { SendAlert(ssl, alert_fatal, unexpected_message); WOLFSSL_ERROR_VERBOSE(PARSE_ERROR); return PARSE_ERROR; @@ -22615,9 +22638,9 @@ default: case decryptMessage: if (IsEncryptionOn(ssl, 0) && ssl->keys.decryptedCur == 0 && - (!IsAtLeastTLSv1_3(ssl->version) || - ssl->curRL.type != change_cipher_spec)) - { + (!IsAtLeastTLSv1_3(ssl->version) || + (ssl->curRL.type != change_cipher_spec && + ssl->curRL.type != alert))) { ret = DoDecrypt(ssl); #ifdef WOLFSSL_ASYNC_CRYPT if (ret == WC_NO_ERR_TRACE(WC_PENDING_E)) @@ -22694,9 +22717,9 @@ default: case verifyMessage: if (IsEncryptionOn(ssl, 0) && ssl->keys.decryptedCur == 0 && - (!IsAtLeastTLSv1_3(ssl->version) || - ssl->curRL.type != change_cipher_spec)) - { + (!IsAtLeastTLSv1_3(ssl->version) || + (ssl->curRL.type != change_cipher_spec && + ssl->curRL.type != alert))) { if (!atomicUser #if defined(HAVE_ENCRYPT_THEN_MAC) && !defined(WOLFSSL_AEAD_ONLY) && !ssl->options.startedETMRead diff --git a/tests/api/test_tls13.c b/tests/api/test_tls13.c index abf0e1e02..3901c7bb7 100644 --- a/tests/api/test_tls13.c +++ b/tests/api/test_tls13.c @@ -2339,7 +2339,6 @@ static int MERecv(WOLFSSL* ssl, char* buf, int sz, void* ctx) int len = (int)msg->length; (void)ssl; - (void)sz; /* Pass back as much of message as will fit in buffer. */ if (len > sz) @@ -2572,7 +2571,6 @@ int test_tls13_duplicate_extension(void) } - int test_key_share_mismatch(void) { EXPECT_DECLS; @@ -2652,3 +2650,224 @@ int test_key_share_mismatch(void) #endif return EXPECT_RESULT(); } + + +#if defined(WOLFSSL_TLS13) && !defined(NO_RSA) && defined(HAVE_ECC) && \ + defined(HAVE_AESGCM) && !defined(NO_WOLFSSL_SERVER) +/* Called when writing. */ +static int Tls13PTASend(WOLFSSL* ssl, char* buf, int sz, void* ctx) +{ + (void)ssl; + (void)buf; + (void)ctx; + + return sz; +} +static int Tls13PTARecv(WOLFSSL* ssl, char* buf, int sz, void* ctx) +{ + WOLFSSL_BUFFER_INFO* msg = (WOLFSSL_BUFFER_INFO*)ctx; + int len; + + (void)ssl; + + if (msg->length == 0) { + /* Only do as many alerts as required to get to max alert count. */ + msg->buffer[0]--; + if (msg->buffer[0] > 0) { + msg->buffer -= 7; + msg->length += 7; + } + else { + return -1; + } + } + + len = (int)msg->length; + /* Pass back as much of message as will fit in buffer. */ + if (len > sz) + len = sz; + XMEMCPY(buf, msg->buffer, len); + /* Move over returned data. */ + msg->buffer += len; + msg->length -= len; + + /* Amount actually copied. */ + return len; +} +#endif + +int test_tls13_plaintext_alert(void) +{ + EXPECT_DECLS; + +#if defined(WOLFSSL_TLS13) && !defined(NO_RSA) && defined(HAVE_ECC) && \ + defined(HAVE_AESGCM) && !defined(NO_WOLFSSL_SERVER) + byte clientMsgs[] = { + /* Client Hello */ + 0x16, 0x03, 0x03, 0x01, 0x9b, 0x01, 0x00, 0x01, + 0x97, 0x03, 0x03, 0xf4, 0x65, 0xbd, 0x22, 0xfe, + 0x6e, 0xab, 0x66, 0xdd, 0xcf, 0xe9, 0x65, 0x55, + 0xe8, 0xdf, 0xc3, 0x8e, 0x4b, 0x00, 0xbc, 0xf8, + 0x23, 0x57, 0x1b, 0xa0, 0xc8, 0xa9, 0xe2, 0x8c, + 0x91, 0x6e, 0xf9, 0x20, 0xf7, 0x5c, 0xc5, 0x5b, + 0x75, 0x8c, 0x47, 0x0a, 0x0e, 0xc4, 0x1a, 0xda, + 0xef, 0x75, 0xe5, 0x21, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x13, 0x01, + 0x13, 0x02, 0x01, 0x00, 0x01, 0x4a, 0x00, 0x2d, + 0x00, 0x03, 0x02, 0x00, 0x01, 0x00, 0x33, 0x00, + 0x47, 0x00, 0x45, 0x00, 0x17, 0x00, 0x41, 0x04, + 0x90, 0xfc, 0xe2, 0x97, 0x05, 0x7c, 0xb5, 0x23, + 0x5d, 0x5f, 0x5b, 0xcd, 0x0c, 0x1e, 0xe0, 0xe9, + 0xab, 0x38, 0x6b, 0x1e, 0x20, 0x5c, 0x1c, 0x90, + 0x2a, 0x9e, 0x68, 0x8e, 0x70, 0x05, 0x10, 0xa8, + 0x02, 0x1b, 0xf9, 0x5c, 0xef, 0xc9, 0xaf, 0xca, + 0x1a, 0x3b, 0x16, 0x8b, 0xe4, 0x1b, 0x3c, 0x15, + 0xb8, 0x0d, 0xbd, 0xaf, 0x62, 0x8d, 0xa7, 0x13, + 0xa0, 0x7c, 0xe0, 0x59, 0x0c, 0x4f, 0x8a, 0x6d, + 0x00, 0x2b, 0x00, 0x03, 0x02, 0x03, 0x04, 0x00, + 0x0d, 0x00, 0x20, 0x00, 0x1e, 0x06, 0x03, 0x05, + 0x03, 0x04, 0x03, 0x02, 0x03, 0x08, 0x06, 0x08, + 0x0b, 0x08, 0x05, 0x08, 0x0a, 0x08, 0x04, 0x08, + 0x09, 0x06, 0x01, 0x05, 0x01, 0x04, 0x01, 0x03, + 0x01, 0x02, 0x01, 0x00, 0x0a, 0x00, 0x04, 0x00, + 0x02, 0x00, 0x17, 0x00, 0x16, 0x00, 0x00, 0x00, + 0x23, 0x00, 0x00, 0x00, 0x29, 0x00, 0xb9, 0x00, + 0x94, 0x00, 0x8e, 0x0f, 0x12, 0xfa, 0x84, 0x1f, + 0x76, 0x94, 0xd7, 0x09, 0x5e, 0xad, 0x08, 0x51, + 0xb6, 0x80, 0x28, 0x31, 0x8b, 0xfd, 0xc6, 0xbd, + 0x9e, 0xf5, 0x3b, 0x4d, 0x02, 0xbe, 0x1d, 0x73, + 0xea, 0x13, 0x68, 0x00, 0x4c, 0xfd, 0x3d, 0x48, + 0x51, 0xf9, 0x06, 0xbb, 0x92, 0xed, 0x42, 0x9f, + 0x7f, 0x2c, 0x73, 0x9f, 0xd9, 0xb4, 0xef, 0x05, + 0x26, 0x5b, 0x60, 0x5c, 0x0a, 0xfc, 0xa3, 0xbd, + 0x2d, 0x2d, 0x8b, 0xf9, 0xaa, 0x5c, 0x96, 0x3a, + 0xf2, 0xec, 0xfa, 0xe5, 0x57, 0x2e, 0x87, 0xbe, + 0x27, 0xc5, 0x3d, 0x4f, 0x5d, 0xdd, 0xde, 0x1c, + 0x1b, 0xb3, 0xcc, 0x27, 0x27, 0x57, 0x5a, 0xd9, + 0xea, 0x99, 0x27, 0x23, 0xa6, 0x0e, 0xea, 0x9c, + 0x0d, 0x85, 0xcb, 0x72, 0xeb, 0xd7, 0x93, 0xe3, + 0xfe, 0xf7, 0x5c, 0xc5, 0x5b, 0x75, 0x8c, 0x47, + 0x0a, 0x0e, 0xc4, 0x1a, 0xda, 0xef, 0x75, 0xe5, + 0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0xfb, 0x92, 0xce, 0xaa, 0x00, 0x21, 0x20, + 0xcb, 0x73, 0x25, 0x80, 0x46, 0x78, 0x4f, 0xe5, + 0x34, 0xf6, 0x91, 0x13, 0x7f, 0xc8, 0x8d, 0xdc, + 0x81, 0x04, 0xb7, 0x0d, 0x49, 0x85, 0x2e, 0x12, + 0x7a, 0x07, 0x23, 0xe9, 0x13, 0xa4, 0x6d, 0x8c, + 0x15, 0x03, 0x03, 0x00, 0x02, 0x01, 0x00, 0x00 + }; + + WOLFSSL_CTX* ctx = NULL; + WOLFSSL* ssl = NULL; + WOLFSSL_BUFFER_INFO msg; + +#ifdef WOLFSSL_TLS13_IGNORE_PT_ALERT_ON_ENC + /* We fail on WOLFSSL_ALERT_COUNT_MAX alerts. */ + + /* Set up wolfSSL context. */ + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfTLSv1_3_server_method())); + ExpectTrue(wolfSSL_CTX_use_certificate_file(ctx, svrCertFile, + CERT_FILETYPE)); + ExpectTrue(wolfSSL_CTX_use_PrivateKey_file(ctx, svrKeyFile, + CERT_FILETYPE)); + if (EXPECT_SUCCESS()) { + wolfSSL_CTX_set_verify(ctx, WOLFSSL_VERIFY_NONE, NULL); + } + /* Read from 'msg'. */ + wolfSSL_SetIORecv(ctx, Tls13PTARecv); + /* No where to send to - dummy sender. */ + wolfSSL_SetIOSend(ctx, Tls13PTASend); + + ExpectNotNull(ssl = wolfSSL_new(ctx)); + msg.buffer = clientMsgs; + msg.length = (unsigned int)sizeof(clientMsgs) - 1; + clientMsgs[sizeof(clientMsgs) - 1] = WOLFSSL_ALERT_COUNT_MAX; + if (EXPECT_SUCCESS()) { + wolfSSL_SetIOReadCtx(ssl, &msg); + } + /* Alert will be ignored until too many. */ + /* Read all message include CertificateVerify with invalid signature + * algorithm. */ + ExpectIntEQ(wolfSSL_accept(ssl), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + /* Expect an invalid parameter error. */ + ExpectIntEQ(wolfSSL_get_error(ssl, WOLFSSL_FATAL_ERROR), + WC_NO_ERR_TRACE(ALERT_COUNT_E)); + + wolfSSL_free(ssl); + ssl = NULL; + wolfSSL_CTX_free(ctx); + ctx = NULL; + + /* Set up wolfSSL context. */ + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfTLSv1_3_server_method())); + ExpectTrue(wolfSSL_CTX_use_certificate_file(ctx, svrCertFile, + CERT_FILETYPE)); + ExpectTrue(wolfSSL_CTX_use_PrivateKey_file(ctx, svrKeyFile, + CERT_FILETYPE)); + if (EXPECT_SUCCESS()) { + wolfSSL_CTX_set_verify(ctx, WOLFSSL_VERIFY_NONE, NULL); + } + /* Read from 'msg'. */ + wolfSSL_SetIORecv(ctx, Tls13PTARecv); + /* No where to send to - dummy sender. */ + wolfSSL_SetIOSend(ctx, Tls13PTASend); + + ExpectNotNull(ssl = wolfSSL_new(ctx)); + msg.buffer = clientMsgs; + msg.length = (unsigned int)sizeof(clientMsgs) - 1; + clientMsgs[sizeof(clientMsgs) - 1] = WOLFSSL_ALERT_COUNT_MAX - 1; + if (EXPECT_SUCCESS()) { + wolfSSL_SetIOReadCtx(ssl, &msg); + } + /* Alert will be ignored until too many. */ + /* Read all message include CertificateVerify with invalid signature + * algorithm. */ + ExpectIntEQ(wolfSSL_accept(ssl), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + /* Expect an invalid parameter error. */ + ExpectIntEQ(wolfSSL_get_error(ssl, WOLFSSL_FATAL_ERROR), + WC_NO_ERR_TRACE(SOCKET_ERROR_E)); + + wolfSSL_free(ssl); + wolfSSL_CTX_free(ctx); +#else + /* Fail on plaintext alert when encryption keys on. */ + + /* Set up wolfSSL context. */ + ExpectNotNull(ctx = wolfSSL_CTX_new(wolfTLSv1_3_server_method())); + ExpectTrue(wolfSSL_CTX_use_certificate_file(ctx, svrCertFile, + CERT_FILETYPE)); + ExpectTrue(wolfSSL_CTX_use_PrivateKey_file(ctx, svrKeyFile, + CERT_FILETYPE)); + if (EXPECT_SUCCESS()) { + wolfSSL_CTX_set_verify(ctx, WOLFSSL_VERIFY_NONE, NULL); + } + /* Read from 'msg'. */ + wolfSSL_SetIORecv(ctx, Tls13PTARecv); + /* No where to send to - dummy sender. */ + wolfSSL_SetIOSend(ctx, Tls13PTASend); + + ExpectNotNull(ssl = wolfSSL_new(ctx)); + msg.buffer = clientMsgs; + msg.length = (unsigned int)sizeof(clientMsgs) - 1; + clientMsgs[sizeof(clientMsgs) - 1] = 1; + if (EXPECT_SUCCESS()) { + wolfSSL_SetIOReadCtx(ssl, &msg); + } + /* Alert will be ignored until too many. */ + /* Read all message include CertificateVerify with invalid signature + * algorithm. */ + ExpectIntEQ(wolfSSL_accept(ssl), WC_NO_ERR_TRACE(WOLFSSL_FATAL_ERROR)); + /* Expect an invalid parameter error. */ + ExpectIntEQ(wolfSSL_get_error(ssl, WOLFSSL_FATAL_ERROR), + WC_NO_ERR_TRACE(PARSE_ERROR)); + + wolfSSL_free(ssl); + wolfSSL_CTX_free(ctx); +#endif +#endif + + return EXPECT_RESULT(); +} + diff --git a/tests/api/test_tls13.h b/tests/api/test_tls13.h index 85669d818..8bee0bc74 100644 --- a/tests/api/test_tls13.h +++ b/tests/api/test_tls13.h @@ -31,24 +31,26 @@ int test_tls13_rpk_handshake(void); int test_tls13_pq_groups(void); int test_tls13_early_data(void); int test_tls13_same_ch(void); -int test_key_share_mismatch(void); int test_tls13_hrr_different_cs(void); int test_tls13_sg_missing(void); int test_tls13_ks_missing(void); int test_tls13_duplicate_extension(void); +int test_key_share_mismatch(void); +int test_tls13_plaintext_alert(void); -#define TEST_TLS13_DECLS \ - TEST_DECL_GROUP("tls13", test_tls13_apis), \ - TEST_DECL_GROUP("tls13", test_tls13_cipher_suites), \ - TEST_DECL_GROUP("tls13", test_tls13_bad_psk_binder), \ - TEST_DECL_GROUP("tls13", test_tls13_rpk_handshake), \ - TEST_DECL_GROUP("tls13", test_tls13_pq_groups), \ - TEST_DECL_GROUP("tls13", test_tls13_early_data), \ - TEST_DECL_GROUP("tls13", test_tls13_same_ch), \ - TEST_DECL_GROUP("tls13", test_tls13_hrr_different_cs), \ - TEST_DECL_GROUP("tls13", test_tls13_sg_missing), \ - TEST_DECL_GROUP("tls13", test_tls13_ks_missing), \ - TEST_DECL_GROUP("tls13", test_tls13_duplicate_extension), \ - TEST_DECL_GROUP("tls13", test_key_share_mismatch) +#define TEST_TLS13_DECLS \ + TEST_DECL_GROUP("tls13", test_tls13_apis), \ + TEST_DECL_GROUP("tls13", test_tls13_cipher_suites), \ + TEST_DECL_GROUP("tls13", test_tls13_bad_psk_binder), \ + TEST_DECL_GROUP("tls13", test_tls13_rpk_handshake), \ + TEST_DECL_GROUP("tls13", test_tls13_pq_groups), \ + TEST_DECL_GROUP("tls13", test_tls13_early_data), \ + TEST_DECL_GROUP("tls13", test_tls13_same_ch), \ + TEST_DECL_GROUP("tls13", test_tls13_hrr_different_cs), \ + TEST_DECL_GROUP("tls13", test_tls13_sg_missing), \ + TEST_DECL_GROUP("tls13", test_tls13_ks_missing), \ + TEST_DECL_GROUP("tls13", test_tls13_duplicate_extension), \ + TEST_DECL_GROUP("tls13", test_key_share_mismatch), \ + TEST_DECL_GROUP("tls13", test_tls13_plaintext_alert) #endif /* WOLFCRYPT_TEST_TLS13_H */ From 0b5e9c76edcad7dc18c0eeaa1873157a2a49bc2c Mon Sep 17 00:00:00 2001 From: Anthony Hu Date: Tue, 23 Dec 2025 10:08:02 -0500 Subject: [PATCH 27/27] Correct the API docs for wolfSSL_write_early_data() --- doc/dox_comments/header_files/ssl.h | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/doc/dox_comments/header_files/ssl.h b/doc/dox_comments/header_files/ssl.h index 45aa18777..80d7ad75d 100644 --- a/doc/dox_comments/header_files/ssl.h +++ b/doc/dox_comments/header_files/ssl.h @@ -14418,8 +14418,7 @@ int wolfSSL_set_max_early_data(WOLFSSL* ssl, unsigned int sz); \ingroup IO \brief This function writes early data to the server on resumption. - Call this function instead of wolfSSL_connect() or wolfSSL_connect_TLSv13() - to connect to the server and send the data in the handshake. + Call this function before wolfSSL_connect() or wolfSSL_connect_TLSv13(). This function is only used with clients. \param [in,out] ssl a pointer to a WOLFSSL structure, created using wolfSSL_new(). @@ -14431,7 +14430,7 @@ int wolfSSL_set_max_early_data(WOLFSSL* ssl, unsigned int sz); not using TLSv1.3. \return SIDE_ERROR if called with a server. \return WOLFSSL_FATAL_ERROR if the connection is not made. - \return WOLFSSL_SUCCESS if successful. + \return the amount of early data written in bytes if successful. _Example_ \code @@ -14444,7 +14443,7 @@ int wolfSSL_set_max_early_data(WOLFSSL* ssl, unsigned int sz); ... ret = wolfSSL_write_early_data(ssl, earlyData, sizeof(earlyData), &outSz); - if (ret != WOLFSSL_SUCCESS) { + if (ret < 0) { err = wolfSSL_get_error(ssl, ret); printf(“error = %d, %s\n”, err, wolfSSL_ERR_error_string(err, buffer)); goto err_label;