From 0ab09ab1470f3b96d7965afa728865485eb7666d Mon Sep 17 00:00:00 2001 From: Sean Parkinson Date: Wed, 10 Dec 2025 15:36:07 +1000 Subject: [PATCH] PPC32 SHA-256 ASM: support comnpiling for PIC When compiling for PIC, 30 and 31 are not always available. Alternative implementation added not using them that puts registers on the stack. Small code size version implemented as well. --- .wolfssl_known_macro_extras | 1 + wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S | 3239 ++++++++++++++++ wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c | 3248 +++++++++++++++++ 3 files changed, 6488 insertions(+) diff --git a/.wolfssl_known_macro_extras b/.wolfssl_known_macro_extras index ca9dc8cce..64e9d0e7f 100644 --- a/.wolfssl_known_macro_extras +++ b/.wolfssl_known_macro_extras @@ -1025,6 +1025,7 @@ __MWERKS__ __NT__ __OS2__ __OpenBSD__ +__PIC__ __PIE__ __POWERPC__ __PPC__ diff --git a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S index e2fdd6473..bd55952be 100644 --- a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S +++ b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm.S @@ -1287,6 +1287,7 @@ L_SHA256_transform_len_k: .long 0xa4506ceb .long 0xbef9a3f7 .long 0xc67178f2 +#ifndef __PIC__ .section ".text" .align 4 .globl Transform_Sha256_Len @@ -4503,6 +4504,3244 @@ L_SHA256_transform_len_after_blk_15: addi 1, 1, 0x4c blr .size Transform_Sha256_Len,.-Transform_Sha256_Len +#else +/* PIC version not using register 30 or 31 */ + .section ".text" + .align 4 + .globl Transform_Sha256_Len + .type Transform_Sha256_Len, @function +Transform_Sha256_Len: + stwu 1, -68(1) + mflr 0 + stw 0, 64(1) + stw 14, 0(1) + stw 15, 4(1) + stw 16, 8(1) + stw 17, 12(1) + stw 18, 16(1) + stw 19, 20(1) + stw 20, 24(1) + stw 21, 28(1) + stw 22, 32(1) + stw 23, 36(1) + stw 24, 40(1) + stw 25, 44(1) + stw 26, 48(1) + stw 27, 52(1) + stw 28, 56(1) + stw 29, 60(1) + srwi 5, 5, 6 + lis 6, L_SHA256_transform_len_k@ha + la 6, L_SHA256_transform_len_k@l(6) +#ifndef WOLFSSL_PPC32_ASM_SMALL + subi 1, 1, 8 + stw 3, 0(1) + stw 4, 4(1) + mtctr 5 + # Copy digest to add in at end + lwz 0, 0(3) + lwz 4, 4(3) + lwz 7, 8(3) + lwz 8, 12(3) + lwz 9, 16(3) + lwz 10, 20(3) + lwz 11, 24(3) + lwz 12, 28(3) + lwz 3, 4(1) + # Start of loop processing a block +L_SHA256_transform_len_begin: + # Load W - 64 bytes + lwz 14, 0(3) + lwz 15, 4(3) + lwz 16, 8(3) + lwz 17, 12(3) + lwz 18, 16(3) + lwz 19, 20(3) + lwz 20, 24(3) + lwz 21, 28(3) + lwz 22, 32(3) + lwz 23, 36(3) + lwz 24, 40(3) + lwz 25, 44(3) + lwz 26, 48(3) + lwz 27, 52(3) + lwz 28, 56(3) + lwz 29, 60(3) + # Start of 16 rounds + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 + addi 6, 6, 0x40 + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + subi 6, 6, 0xc0 + lwz 3, 0(1) + # Add in digest from start + lwz 5, 0(3) + add 0, 0, 5 + lwz 5, 4(3) + add 4, 4, 5 + lwz 5, 8(3) + add 7, 7, 5 + lwz 5, 12(3) + add 8, 8, 5 + lwz 5, 16(3) + add 9, 9, 5 + lwz 5, 20(3) + add 10, 10, 5 + lwz 5, 24(3) + add 11, 11, 5 + lwz 5, 28(3) + add 12, 12, 5 + stw 0, 0(3) + stw 4, 4(3) + stw 7, 8(3) + stw 8, 12(3) + stw 9, 16(3) + stw 10, 20(3) + stw 11, 24(3) + stw 12, 28(3) + lwz 3, 4(1) + addi 3, 3, 0x40 + stw 3, 4(1) + bdnz L_SHA256_transform_len_begin + addi 1, 1, 8 +#else + subi 1, 1, 12 + stw 3, 0(1) + stw 4, 4(1) + stw 5, 8(1) + # Copy digest to add in at end + lwz 0, 0(3) + lwz 4, 4(3) + lwz 7, 8(3) + lwz 8, 12(3) + lwz 9, 16(3) + lwz 10, 20(3) + lwz 11, 24(3) + lwz 12, 28(3) + lwz 3, 4(1) + # Start of loop processing a block +L_SHA256_transform_len_begin: + # Load W - 64 bytes + lwz 14, 0(3) + lwz 15, 4(3) + lwz 16, 8(3) + lwz 17, 12(3) + lwz 18, 16(3) + lwz 19, 20(3) + lwz 20, 24(3) + lwz 21, 28(3) + lwz 22, 32(3) + lwz 23, 36(3) + lwz 24, 40(3) + lwz 25, 44(3) + lwz 26, 48(3) + lwz 27, 52(3) + lwz 28, 56(3) + lwz 29, 60(3) + li 3, 4 + mtctr 3 + # Start of 16 rounds +L_SHA256_transform_len_start: + # Round 0 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 0(6) + add 12, 12, 14 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_0 + # Calc new W[0] + rotlwi 3, 15, 25 + rotlwi 5, 15, 14 + xor 3, 3, 5 + srwi 5, 15, 3 + xor 3, 3, 5 + add 14, 14, 3 + rotlwi 3, 28, 15 + rotlwi 5, 28, 13 + xor 3, 3, 5 + srwi 5, 28, 10 + xor 3, 3, 5 + add 14, 14, 3 + add 14, 14, 23 +L_SHA256_transform_len_after_blk_0: + # Round 1 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 4(6) + add 11, 11, 15 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_1 + # Calc new W[1] + rotlwi 3, 16, 25 + rotlwi 5, 16, 14 + xor 3, 3, 5 + srwi 5, 16, 3 + xor 3, 3, 5 + add 15, 15, 3 + rotlwi 3, 29, 15 + rotlwi 5, 29, 13 + xor 3, 3, 5 + srwi 5, 29, 10 + xor 3, 3, 5 + add 15, 15, 3 + add 15, 15, 24 +L_SHA256_transform_len_after_blk_1: + # Round 2 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 8(6) + add 10, 10, 16 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_2 + # Calc new W[2] + rotlwi 3, 17, 25 + rotlwi 5, 17, 14 + xor 3, 3, 5 + srwi 5, 17, 3 + xor 3, 3, 5 + add 16, 16, 3 + rotlwi 3, 14, 15 + rotlwi 5, 14, 13 + xor 3, 3, 5 + srwi 5, 14, 10 + xor 3, 3, 5 + add 16, 16, 3 + add 16, 16, 25 +L_SHA256_transform_len_after_blk_2: + # Round 3 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 12(6) + add 9, 9, 17 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_3 + # Calc new W[3] + rotlwi 3, 18, 25 + rotlwi 5, 18, 14 + xor 3, 3, 5 + srwi 5, 18, 3 + xor 3, 3, 5 + add 17, 17, 3 + rotlwi 3, 15, 15 + rotlwi 5, 15, 13 + xor 3, 3, 5 + srwi 5, 15, 10 + xor 3, 3, 5 + add 17, 17, 3 + add 17, 17, 26 +L_SHA256_transform_len_after_blk_3: + # Round 4 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 16(6) + add 8, 8, 18 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_4 + # Calc new W[4] + rotlwi 3, 19, 25 + rotlwi 5, 19, 14 + xor 3, 3, 5 + srwi 5, 19, 3 + xor 3, 3, 5 + add 18, 18, 3 + rotlwi 3, 16, 15 + rotlwi 5, 16, 13 + xor 3, 3, 5 + srwi 5, 16, 10 + xor 3, 3, 5 + add 18, 18, 3 + add 18, 18, 27 +L_SHA256_transform_len_after_blk_4: + # Round 5 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 20(6) + add 7, 7, 19 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_5 + # Calc new W[5] + rotlwi 3, 20, 25 + rotlwi 5, 20, 14 + xor 3, 3, 5 + srwi 5, 20, 3 + xor 3, 3, 5 + add 19, 19, 3 + rotlwi 3, 17, 15 + rotlwi 5, 17, 13 + xor 3, 3, 5 + srwi 5, 17, 10 + xor 3, 3, 5 + add 19, 19, 3 + add 19, 19, 28 +L_SHA256_transform_len_after_blk_5: + # Round 6 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 24(6) + add 4, 4, 20 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_6 + # Calc new W[6] + rotlwi 3, 21, 25 + rotlwi 5, 21, 14 + xor 3, 3, 5 + srwi 5, 21, 3 + xor 3, 3, 5 + add 20, 20, 3 + rotlwi 3, 18, 15 + rotlwi 5, 18, 13 + xor 3, 3, 5 + srwi 5, 18, 10 + xor 3, 3, 5 + add 20, 20, 3 + add 20, 20, 29 +L_SHA256_transform_len_after_blk_6: + # Round 7 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 28(6) + add 0, 0, 21 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_7 + # Calc new W[7] + rotlwi 3, 22, 25 + rotlwi 5, 22, 14 + xor 3, 3, 5 + srwi 5, 22, 3 + xor 3, 3, 5 + add 21, 21, 3 + rotlwi 3, 19, 15 + rotlwi 5, 19, 13 + xor 3, 3, 5 + srwi 5, 19, 10 + xor 3, 3, 5 + add 21, 21, 3 + add 21, 21, 14 +L_SHA256_transform_len_after_blk_7: + # Round 8 + rotlwi 3, 9, 26 + rotlwi 5, 9, 21 + xor 3, 3, 5 + rotlwi 5, 9, 7 + xor 3, 3, 5 + add 12, 12, 3 + xor 3, 10, 11 + and 3, 3, 9 + xor 3, 3, 11 + add 12, 12, 3 + lwz 3, 32(6) + add 12, 12, 22 + add 12, 12, 3 + add 8, 8, 12 + rotlwi 3, 0, 30 + rotlwi 5, 0, 19 + xor 3, 3, 5 + rotlwi 5, 0, 10 + xor 3, 3, 5 + add 12, 12, 3 + xor 5, 0, 4 + xor 3, 4, 7 + and 3, 3, 5 + xor 3, 3, 4 + add 12, 12, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_8 + # Calc new W[8] + rotlwi 3, 23, 25 + rotlwi 5, 23, 14 + xor 3, 3, 5 + srwi 5, 23, 3 + xor 3, 3, 5 + add 22, 22, 3 + rotlwi 3, 20, 15 + rotlwi 5, 20, 13 + xor 3, 3, 5 + srwi 5, 20, 10 + xor 3, 3, 5 + add 22, 22, 3 + add 22, 22, 15 +L_SHA256_transform_len_after_blk_8: + # Round 9 + rotlwi 3, 8, 26 + rotlwi 5, 8, 21 + xor 3, 3, 5 + rotlwi 5, 8, 7 + xor 3, 3, 5 + add 11, 11, 3 + xor 3, 9, 10 + and 3, 3, 8 + xor 3, 3, 10 + add 11, 11, 3 + lwz 3, 36(6) + add 11, 11, 23 + add 11, 11, 3 + add 7, 7, 11 + rotlwi 3, 12, 30 + rotlwi 5, 12, 19 + xor 3, 3, 5 + rotlwi 5, 12, 10 + xor 3, 3, 5 + add 11, 11, 3 + xor 5, 12, 0 + xor 3, 0, 4 + and 3, 3, 5 + xor 3, 3, 0 + add 11, 11, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_9 + # Calc new W[9] + rotlwi 3, 24, 25 + rotlwi 5, 24, 14 + xor 3, 3, 5 + srwi 5, 24, 3 + xor 3, 3, 5 + add 23, 23, 3 + rotlwi 3, 21, 15 + rotlwi 5, 21, 13 + xor 3, 3, 5 + srwi 5, 21, 10 + xor 3, 3, 5 + add 23, 23, 3 + add 23, 23, 16 +L_SHA256_transform_len_after_blk_9: + # Round 10 + rotlwi 3, 7, 26 + rotlwi 5, 7, 21 + xor 3, 3, 5 + rotlwi 5, 7, 7 + xor 3, 3, 5 + add 10, 10, 3 + xor 3, 8, 9 + and 3, 3, 7 + xor 3, 3, 9 + add 10, 10, 3 + lwz 3, 40(6) + add 10, 10, 24 + add 10, 10, 3 + add 4, 4, 10 + rotlwi 3, 11, 30 + rotlwi 5, 11, 19 + xor 3, 3, 5 + rotlwi 5, 11, 10 + xor 3, 3, 5 + add 10, 10, 3 + xor 5, 11, 12 + xor 3, 12, 0 + and 3, 3, 5 + xor 3, 3, 12 + add 10, 10, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_10 + # Calc new W[10] + rotlwi 3, 25, 25 + rotlwi 5, 25, 14 + xor 3, 3, 5 + srwi 5, 25, 3 + xor 3, 3, 5 + add 24, 24, 3 + rotlwi 3, 22, 15 + rotlwi 5, 22, 13 + xor 3, 3, 5 + srwi 5, 22, 10 + xor 3, 3, 5 + add 24, 24, 3 + add 24, 24, 17 +L_SHA256_transform_len_after_blk_10: + # Round 11 + rotlwi 3, 4, 26 + rotlwi 5, 4, 21 + xor 3, 3, 5 + rotlwi 5, 4, 7 + xor 3, 3, 5 + add 9, 9, 3 + xor 3, 7, 8 + and 3, 3, 4 + xor 3, 3, 8 + add 9, 9, 3 + lwz 3, 44(6) + add 9, 9, 25 + add 9, 9, 3 + add 0, 0, 9 + rotlwi 3, 10, 30 + rotlwi 5, 10, 19 + xor 3, 3, 5 + rotlwi 5, 10, 10 + xor 3, 3, 5 + add 9, 9, 3 + xor 5, 10, 11 + xor 3, 11, 12 + and 3, 3, 5 + xor 3, 3, 11 + add 9, 9, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_11 + # Calc new W[11] + rotlwi 3, 26, 25 + rotlwi 5, 26, 14 + xor 3, 3, 5 + srwi 5, 26, 3 + xor 3, 3, 5 + add 25, 25, 3 + rotlwi 3, 23, 15 + rotlwi 5, 23, 13 + xor 3, 3, 5 + srwi 5, 23, 10 + xor 3, 3, 5 + add 25, 25, 3 + add 25, 25, 18 +L_SHA256_transform_len_after_blk_11: + # Round 12 + rotlwi 3, 0, 26 + rotlwi 5, 0, 21 + xor 3, 3, 5 + rotlwi 5, 0, 7 + xor 3, 3, 5 + add 8, 8, 3 + xor 3, 4, 7 + and 3, 3, 0 + xor 3, 3, 7 + add 8, 8, 3 + lwz 3, 48(6) + add 8, 8, 26 + add 8, 8, 3 + add 12, 12, 8 + rotlwi 3, 9, 30 + rotlwi 5, 9, 19 + xor 3, 3, 5 + rotlwi 5, 9, 10 + xor 3, 3, 5 + add 8, 8, 3 + xor 5, 9, 10 + xor 3, 10, 11 + and 3, 3, 5 + xor 3, 3, 10 + add 8, 8, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_12 + # Calc new W[12] + rotlwi 3, 27, 25 + rotlwi 5, 27, 14 + xor 3, 3, 5 + srwi 5, 27, 3 + xor 3, 3, 5 + add 26, 26, 3 + rotlwi 3, 24, 15 + rotlwi 5, 24, 13 + xor 3, 3, 5 + srwi 5, 24, 10 + xor 3, 3, 5 + add 26, 26, 3 + add 26, 26, 19 +L_SHA256_transform_len_after_blk_12: + # Round 13 + rotlwi 3, 12, 26 + rotlwi 5, 12, 21 + xor 3, 3, 5 + rotlwi 5, 12, 7 + xor 3, 3, 5 + add 7, 7, 3 + xor 3, 0, 4 + and 3, 3, 12 + xor 3, 3, 4 + add 7, 7, 3 + lwz 3, 52(6) + add 7, 7, 27 + add 7, 7, 3 + add 11, 11, 7 + rotlwi 3, 8, 30 + rotlwi 5, 8, 19 + xor 3, 3, 5 + rotlwi 5, 8, 10 + xor 3, 3, 5 + add 7, 7, 3 + xor 5, 8, 9 + xor 3, 9, 10 + and 3, 3, 5 + xor 3, 3, 9 + add 7, 7, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_13 + # Calc new W[13] + rotlwi 3, 28, 25 + rotlwi 5, 28, 14 + xor 3, 3, 5 + srwi 5, 28, 3 + xor 3, 3, 5 + add 27, 27, 3 + rotlwi 3, 25, 15 + rotlwi 5, 25, 13 + xor 3, 3, 5 + srwi 5, 25, 10 + xor 3, 3, 5 + add 27, 27, 3 + add 27, 27, 20 +L_SHA256_transform_len_after_blk_13: + # Round 14 + rotlwi 3, 11, 26 + rotlwi 5, 11, 21 + xor 3, 3, 5 + rotlwi 5, 11, 7 + xor 3, 3, 5 + add 4, 4, 3 + xor 3, 12, 0 + and 3, 3, 11 + xor 3, 3, 0 + add 4, 4, 3 + lwz 3, 56(6) + add 4, 4, 28 + add 4, 4, 3 + add 10, 10, 4 + rotlwi 3, 7, 30 + rotlwi 5, 7, 19 + xor 3, 3, 5 + rotlwi 5, 7, 10 + xor 3, 3, 5 + add 4, 4, 3 + xor 5, 7, 8 + xor 3, 8, 9 + and 3, 3, 5 + xor 3, 3, 8 + add 4, 4, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_14 + # Calc new W[14] + rotlwi 3, 29, 25 + rotlwi 5, 29, 14 + xor 3, 3, 5 + srwi 5, 29, 3 + xor 3, 3, 5 + add 28, 28, 3 + rotlwi 3, 26, 15 + rotlwi 5, 26, 13 + xor 3, 3, 5 + srwi 5, 26, 10 + xor 3, 3, 5 + add 28, 28, 3 + add 28, 28, 21 +L_SHA256_transform_len_after_blk_14: + # Round 15 + rotlwi 3, 10, 26 + rotlwi 5, 10, 21 + xor 3, 3, 5 + rotlwi 5, 10, 7 + xor 3, 3, 5 + add 0, 0, 3 + xor 3, 11, 12 + and 3, 3, 10 + xor 3, 3, 12 + add 0, 0, 3 + lwz 3, 60(6) + add 0, 0, 29 + add 0, 0, 3 + add 9, 9, 0 + rotlwi 3, 4, 30 + rotlwi 5, 4, 19 + xor 3, 3, 5 + rotlwi 5, 4, 10 + xor 3, 3, 5 + add 0, 0, 3 + xor 5, 4, 7 + xor 3, 7, 8 + and 3, 3, 5 + xor 3, 3, 7 + add 0, 0, 3 + mfctr 5 + cmpwi 3, 5, 1 + beq 3, L_SHA256_transform_len_after_blk_15 + # Calc new W[15] + rotlwi 3, 14, 25 + rotlwi 5, 14, 14 + xor 3, 3, 5 + srwi 5, 14, 3 + xor 3, 3, 5 + add 29, 29, 3 + rotlwi 3, 27, 15 + rotlwi 5, 27, 13 + xor 3, 3, 5 + srwi 5, 27, 10 + xor 3, 3, 5 + add 29, 29, 3 + add 29, 29, 22 +L_SHA256_transform_len_after_blk_15: + addi 6, 6, 0x40 + bdnz L_SHA256_transform_len_start + subi 6, 6, 0x100 + lwz 3, 0(1) + # Add in digest from start + lwz 5, 0(3) + add 0, 0, 5 + lwz 5, 4(3) + add 4, 4, 5 + lwz 5, 8(3) + add 7, 7, 5 + lwz 5, 12(3) + add 8, 8, 5 + lwz 5, 16(3) + add 9, 9, 5 + lwz 5, 20(3) + add 10, 10, 5 + lwz 5, 24(3) + add 11, 11, 5 + lwz 5, 28(3) + add 12, 12, 5 + stw 0, 0(3) + stw 4, 4(3) + stw 7, 8(3) + stw 8, 12(3) + stw 9, 16(3) + stw 10, 20(3) + stw 11, 24(3) + stw 12, 28(3) + lwz 3, 4(1) + lwz 5, 8(1) + mtctr 5 + subi 5, 5, 1 + addi 3, 3, 0x40 + stw 3, 4(1) + stw 5, 8(1) + bdnz L_SHA256_transform_len_begin + addi 1, 1, 12 +#endif /* WOLFSSL_PPC32_ASM_SMALL */ + lwz 0, 64(1) + mtlr 0 + lwz 14, 0(1) + lwz 15, 4(1) + lwz 16, 8(1) + lwz 17, 12(1) + lwz 18, 16(1) + lwz 19, 20(1) + lwz 20, 24(1) + lwz 21, 28(1) + lwz 22, 32(1) + lwz 23, 36(1) + lwz 24, 40(1) + lwz 25, 44(1) + lwz 26, 48(1) + lwz 27, 52(1) + lwz 28, 56(1) + lwz 29, 60(1) + addi 1, 1, 0x44 + blr + .size Transform_Sha256_Len,.-Transform_Sha256_Len +#endif /* __PIC__ */ #endif /* !WOLFSSL_PPC32_ASM_SPE */ #endif /* !NO_SHA256 */ diff --git a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c index 343e5fa5a..37a8a7237 100644 --- a/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c +++ b/wolfcrypt/src/port/ppc32/ppc32-sha256-asm_c.c @@ -1197,6 +1197,7 @@ static const word32 L_SHA256_transform_len_k[] = { 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, }; +#ifndef __PIC__ void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p); #ifndef WOLFSSL_NO_VAR_ASSIGN_REG @@ -4418,6 +4419,3253 @@ void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) ); } +#else +/* PIC version not using register 30 or 31 */ +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, + word32 len_p); +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG +void Transform_Sha256_Len(wc_Sha256* sha256_p, const byte* data_p, word32 len_p) +#else +void Transform_Sha256_Len(wc_Sha256* sha256, const byte* data, word32 len) +#endif /* WOLFSSL_NO_VAR_ASSIGN_REG */ +{ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + register wc_Sha256* sha256 asm ("3") = (wc_Sha256*)sha256_p; + register const byte* data asm ("4") = (const byte*)data_p; + register word32 len asm ("5") = (word32)len_p; + register word32* L_SHA256_transform_len_k_c asm ("6") = + (word32*)&L_SHA256_transform_len_k; +#else + register word32* L_SHA256_transform_len_k_c = + (word32*)&L_SHA256_transform_len_k; + +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + + __asm__ __volatile__ ( + "srwi %[len], %[len], 6\n\t" + "mr 6, %[L_SHA256_transform_len_k]\n\t" +#ifndef WOLFSSL_PPC32_ASM_SMALL + "subi 1, 1, 8\n\t" + "stw %[sha256], 0(1)\n\t" + "stw %[data], 4(1)\n\t" + "mtctr %[len]\n\t" + /* Copy digest to add in at end */ + "lwz 0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz 7, 8(%[sha256])\n\t" + "lwz 8, 12(%[sha256])\n\t" + "lwz 9, 16(%[sha256])\n\t" + "lwz 10, 20(%[sha256])\n\t" + "lwz 11, 24(%[sha256])\n\t" + "lwz 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz 14, 0(%[sha256])\n\t" + "lwz 15, 4(%[sha256])\n\t" + "lwz 16, 8(%[sha256])\n\t" + "lwz 17, 12(%[sha256])\n\t" + "lwz 18, 16(%[sha256])\n\t" + "lwz 19, 20(%[sha256])\n\t" + "lwz 20, 24(%[sha256])\n\t" + "lwz 21, 28(%[sha256])\n\t" + "lwz 22, 32(%[sha256])\n\t" + "lwz 23, 36(%[sha256])\n\t" + "lwz 24, 40(%[sha256])\n\t" + "lwz 25, 44(%[sha256])\n\t" + "lwz 26, 48(%[sha256])\n\t" + "lwz 27, 52(%[sha256])\n\t" + "lwz 28, 56(%[sha256])\n\t" + "lwz 29, 60(%[sha256])\n\t" + /* Start of 16 rounds */ + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "addi 6, 6, 0x40\n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "subi 6, 6, 0xc0\n\t" + "lwz %[sha256], 0(1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add 0, 0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add 7, 7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add 8, 8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add 9, 9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add 10, 10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add 11, 11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add 12, 12, %[len]\n\t" + "stw 0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw 7, 8(%[sha256])\n\t" + "stw 8, 12(%[sha256])\n\t" + "stw 9, 16(%[sha256])\n\t" + "stw 10, 20(%[sha256])\n\t" + "stw 11, 24(%[sha256])\n\t" + "stw 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi 1, 1, 8\n\t" +#else + "subi 1, 1, 12\n\t" + "stw %[sha256], 0(1)\n\t" + "stw %[data], 4(1)\n\t" + "stw %[len], 8(1)\n\t" + /* Copy digest to add in at end */ + "lwz 0, 0(%[sha256])\n\t" + "lwz %[data], 4(%[sha256])\n\t" + "lwz 7, 8(%[sha256])\n\t" + "lwz 8, 12(%[sha256])\n\t" + "lwz 9, 16(%[sha256])\n\t" + "lwz 10, 20(%[sha256])\n\t" + "lwz 11, 24(%[sha256])\n\t" + "lwz 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + /* Start of loop processing a block */ + "\n" + "L_SHA256_transform_len_begin_%=: \n\t" + /* Load W - 64 bytes */ + "lwz 14, 0(%[sha256])\n\t" + "lwz 15, 4(%[sha256])\n\t" + "lwz 16, 8(%[sha256])\n\t" + "lwz 17, 12(%[sha256])\n\t" + "lwz 18, 16(%[sha256])\n\t" + "lwz 19, 20(%[sha256])\n\t" + "lwz 20, 24(%[sha256])\n\t" + "lwz 21, 28(%[sha256])\n\t" + "lwz 22, 32(%[sha256])\n\t" + "lwz 23, 36(%[sha256])\n\t" + "lwz 24, 40(%[sha256])\n\t" + "lwz 25, 44(%[sha256])\n\t" + "lwz 26, 48(%[sha256])\n\t" + "lwz 27, 52(%[sha256])\n\t" + "lwz 28, 56(%[sha256])\n\t" + "lwz 29, 60(%[sha256])\n\t" + "li %[sha256], 4\n\t" + "mtctr %[sha256]\n\t" + /* Start of 16 rounds */ + "\n" + "L_SHA256_transform_len_start_%=: \n\t" + /* Round 0 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 0(6)\n\t" + "add 12, 12, 14\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_0_%=\n\t" + /* Calc new W[0] */ + "rotlwi %[sha256], 15, 25\n\t" + "rotlwi %[len], 15, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "rotlwi %[sha256], 28, 15\n\t" + "rotlwi %[len], 28, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 14, 14, %[sha256]\n\t" + "add 14, 14, 23\n\t" + "\n" + "L_SHA256_transform_len_after_blk_0_%=: \n\t" + /* Round 1 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 4(6)\n\t" + "add 11, 11, 15\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_1_%=\n\t" + /* Calc new W[1] */ + "rotlwi %[sha256], 16, 25\n\t" + "rotlwi %[len], 16, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "rotlwi %[sha256], 29, 15\n\t" + "rotlwi %[len], 29, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 15, 15, %[sha256]\n\t" + "add 15, 15, 24\n\t" + "\n" + "L_SHA256_transform_len_after_blk_1_%=: \n\t" + /* Round 2 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 8(6)\n\t" + "add 10, 10, 16\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_2_%=\n\t" + /* Calc new W[2] */ + "rotlwi %[sha256], 17, 25\n\t" + "rotlwi %[len], 17, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "rotlwi %[sha256], 14, 15\n\t" + "rotlwi %[len], 14, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 16, 16, %[sha256]\n\t" + "add 16, 16, 25\n\t" + "\n" + "L_SHA256_transform_len_after_blk_2_%=: \n\t" + /* Round 3 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 12(6)\n\t" + "add 9, 9, 17\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_3_%=\n\t" + /* Calc new W[3] */ + "rotlwi %[sha256], 18, 25\n\t" + "rotlwi %[len], 18, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "rotlwi %[sha256], 15, 15\n\t" + "rotlwi %[len], 15, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 15, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 17, 17, %[sha256]\n\t" + "add 17, 17, 26\n\t" + "\n" + "L_SHA256_transform_len_after_blk_3_%=: \n\t" + /* Round 4 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 16(6)\n\t" + "add 8, 8, 18\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_4_%=\n\t" + /* Calc new W[4] */ + "rotlwi %[sha256], 19, 25\n\t" + "rotlwi %[len], 19, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "rotlwi %[sha256], 16, 15\n\t" + "rotlwi %[len], 16, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 16, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 18, 18, %[sha256]\n\t" + "add 18, 18, 27\n\t" + "\n" + "L_SHA256_transform_len_after_blk_4_%=: \n\t" + /* Round 5 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 20(6)\n\t" + "add 7, 7, 19\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_5_%=\n\t" + /* Calc new W[5] */ + "rotlwi %[sha256], 20, 25\n\t" + "rotlwi %[len], 20, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "rotlwi %[sha256], 17, 15\n\t" + "rotlwi %[len], 17, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 17, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 19, 19, %[sha256]\n\t" + "add 19, 19, 28\n\t" + "\n" + "L_SHA256_transform_len_after_blk_5_%=: \n\t" + /* Round 6 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 24(6)\n\t" + "add %[data], %[data], 20\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_6_%=\n\t" + /* Calc new W[6] */ + "rotlwi %[sha256], 21, 25\n\t" + "rotlwi %[len], 21, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "rotlwi %[sha256], 18, 15\n\t" + "rotlwi %[len], 18, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 18, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 20, 20, %[sha256]\n\t" + "add 20, 20, 29\n\t" + "\n" + "L_SHA256_transform_len_after_blk_6_%=: \n\t" + /* Round 7 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 28(6)\n\t" + "add 0, 0, 21\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_7_%=\n\t" + /* Calc new W[7] */ + "rotlwi %[sha256], 22, 25\n\t" + "rotlwi %[len], 22, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "rotlwi %[sha256], 19, 15\n\t" + "rotlwi %[len], 19, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 19, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 21, 21, %[sha256]\n\t" + "add 21, 21, 14\n\t" + "\n" + "L_SHA256_transform_len_after_blk_7_%=: \n\t" + /* Round 8 */ + "rotlwi %[sha256], 9, 26\n\t" + "rotlwi %[len], 9, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], 9\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 12, 12, %[sha256]\n\t" + "lwz %[sha256], 32(6)\n\t" + "add 12, 12, 22\n\t" + "add 12, 12, %[sha256]\n\t" + "add 8, 8, 12\n\t" + "rotlwi %[sha256], 0, 30\n\t" + "rotlwi %[len], 0, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 12, 12, %[sha256]\n\t" + "xor %[len], 0, %[data]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 12, 12, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_8_%=\n\t" + /* Calc new W[8] */ + "rotlwi %[sha256], 23, 25\n\t" + "rotlwi %[len], 23, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "rotlwi %[sha256], 20, 15\n\t" + "rotlwi %[len], 20, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 20, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 22, 22, %[sha256]\n\t" + "add 22, 22, 15\n\t" + "\n" + "L_SHA256_transform_len_after_blk_8_%=: \n\t" + /* Round 9 */ + "rotlwi %[sha256], 8, 26\n\t" + "rotlwi %[len], 8, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], 8\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 11, 11, %[sha256]\n\t" + "lwz %[sha256], 36(6)\n\t" + "add 11, 11, 23\n\t" + "add 11, 11, %[sha256]\n\t" + "add 7, 7, 11\n\t" + "rotlwi %[sha256], 12, 30\n\t" + "rotlwi %[len], 12, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 11, 11, %[sha256]\n\t" + "xor %[len], 12, 0\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add 11, 11, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_9_%=\n\t" + /* Calc new W[9] */ + "rotlwi %[sha256], 24, 25\n\t" + "rotlwi %[len], 24, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "rotlwi %[sha256], 21, 15\n\t" + "rotlwi %[len], 21, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 21, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 23, 23, %[sha256]\n\t" + "add 23, 23, 16\n\t" + "\n" + "L_SHA256_transform_len_after_blk_9_%=: \n\t" + /* Round 10 */ + "rotlwi %[sha256], 7, 26\n\t" + "rotlwi %[len], 7, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], 7\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 10, 10, %[sha256]\n\t" + "lwz %[sha256], 40(6)\n\t" + "add 10, 10, 24\n\t" + "add 10, 10, %[sha256]\n\t" + "add %[data], %[data], 10\n\t" + "rotlwi %[sha256], 11, 30\n\t" + "rotlwi %[len], 11, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 10, 10, %[sha256]\n\t" + "xor %[len], 11, 12\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 10, 10, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_10_%=\n\t" + /* Calc new W[10] */ + "rotlwi %[sha256], 25, 25\n\t" + "rotlwi %[len], 25, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "rotlwi %[sha256], 22, 15\n\t" + "rotlwi %[len], 22, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 22, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 24, 24, %[sha256]\n\t" + "add 24, 24, 17\n\t" + "\n" + "L_SHA256_transform_len_after_blk_10_%=: \n\t" + /* Round 11 */ + "rotlwi %[sha256], %[data], 26\n\t" + "rotlwi %[len], %[data], 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[data]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add 9, 9, %[sha256]\n\t" + "lwz %[sha256], 44(6)\n\t" + "add 9, 9, 25\n\t" + "add 9, 9, %[sha256]\n\t" + "add 0, 0, 9\n\t" + "rotlwi %[sha256], 10, 30\n\t" + "rotlwi %[len], 10, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 9, 9, %[sha256]\n\t" + "xor %[len], 10, 11\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 11\n\t" + "add 9, 9, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_11_%=\n\t" + /* Calc new W[11] */ + "rotlwi %[sha256], 26, 25\n\t" + "rotlwi %[len], 26, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "rotlwi %[sha256], 23, 15\n\t" + "rotlwi %[len], 23, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 23, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 25, 25, %[sha256]\n\t" + "add 25, 25, 18\n\t" + "\n" + "L_SHA256_transform_len_after_blk_11_%=: \n\t" + /* Round 12 */ + "rotlwi %[sha256], 0, 26\n\t" + "rotlwi %[len], 0, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 0, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[sha256], %[data], 7\n\t" + "and %[sha256], %[sha256], 0\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 8, 8, %[sha256]\n\t" + "lwz %[sha256], 48(6)\n\t" + "add 8, 8, 26\n\t" + "add 8, 8, %[sha256]\n\t" + "add 12, 12, 8\n\t" + "rotlwi %[sha256], 9, 30\n\t" + "rotlwi %[len], 9, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 9, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 8, 8, %[sha256]\n\t" + "xor %[len], 9, 10\n\t" + "xor %[sha256], 10, 11\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 10\n\t" + "add 8, 8, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_12_%=\n\t" + /* Calc new W[12] */ + "rotlwi %[sha256], 27, 25\n\t" + "rotlwi %[len], 27, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "rotlwi %[sha256], 24, 15\n\t" + "rotlwi %[len], 24, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 24, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 26, 26, %[sha256]\n\t" + "add 26, 26, 19\n\t" + "\n" + "L_SHA256_transform_len_after_blk_12_%=: \n\t" + /* Round 13 */ + "rotlwi %[sha256], 12, 26\n\t" + "rotlwi %[len], 12, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 12, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[sha256], 0, %[data]\n\t" + "and %[sha256], %[sha256], 12\n\t" + "xor %[sha256], %[sha256], %[data]\n\t" + "add 7, 7, %[sha256]\n\t" + "lwz %[sha256], 52(6)\n\t" + "add 7, 7, 27\n\t" + "add 7, 7, %[sha256]\n\t" + "add 11, 11, 7\n\t" + "rotlwi %[sha256], 8, 30\n\t" + "rotlwi %[len], 8, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 8, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 7, 7, %[sha256]\n\t" + "xor %[len], 8, 9\n\t" + "xor %[sha256], 9, 10\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 9\n\t" + "add 7, 7, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_13_%=\n\t" + /* Calc new W[13] */ + "rotlwi %[sha256], 28, 25\n\t" + "rotlwi %[len], 28, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 28, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "rotlwi %[sha256], 25, 15\n\t" + "rotlwi %[len], 25, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 25, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 27, 27, %[sha256]\n\t" + "add 27, 27, 20\n\t" + "\n" + "L_SHA256_transform_len_after_blk_13_%=: \n\t" + /* Round 14 */ + "rotlwi %[sha256], 11, 26\n\t" + "rotlwi %[len], 11, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 11, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[sha256], 12, 0\n\t" + "and %[sha256], %[sha256], 11\n\t" + "xor %[sha256], %[sha256], 0\n\t" + "add %[data], %[data], %[sha256]\n\t" + "lwz %[sha256], 56(6)\n\t" + "add %[data], %[data], 28\n\t" + "add %[data], %[data], %[sha256]\n\t" + "add 10, 10, %[data]\n\t" + "rotlwi %[sha256], 7, 30\n\t" + "rotlwi %[len], 7, 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 7, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add %[data], %[data], %[sha256]\n\t" + "xor %[len], 7, 8\n\t" + "xor %[sha256], 8, 9\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 8\n\t" + "add %[data], %[data], %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_14_%=\n\t" + /* Calc new W[14] */ + "rotlwi %[sha256], 29, 25\n\t" + "rotlwi %[len], 29, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 29, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "rotlwi %[sha256], 26, 15\n\t" + "rotlwi %[len], 26, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 26, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 28, 28, %[sha256]\n\t" + "add 28, 28, 21\n\t" + "\n" + "L_SHA256_transform_len_after_blk_14_%=: \n\t" + /* Round 15 */ + "rotlwi %[sha256], 10, 26\n\t" + "rotlwi %[len], 10, 21\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], 10, 7\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[sha256], 11, 12\n\t" + "and %[sha256], %[sha256], 10\n\t" + "xor %[sha256], %[sha256], 12\n\t" + "add 0, 0, %[sha256]\n\t" + "lwz %[sha256], 60(6)\n\t" + "add 0, 0, 29\n\t" + "add 0, 0, %[sha256]\n\t" + "add 9, 9, 0\n\t" + "rotlwi %[sha256], %[data], 30\n\t" + "rotlwi %[len], %[data], 19\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "rotlwi %[len], %[data], 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 0, 0, %[sha256]\n\t" + "xor %[len], %[data], 7\n\t" + "xor %[sha256], 7, 8\n\t" + "and %[sha256], %[sha256], %[len]\n\t" + "xor %[sha256], %[sha256], 7\n\t" + "add 0, 0, %[sha256]\n\t" + "mfctr %[len]\n\t" + "cmpwi %[sha256], %[len], 1\n\t" + "beq %[sha256], L_SHA256_transform_len_after_blk_15_%=\n\t" + /* Calc new W[15] */ + "rotlwi %[sha256], 14, 25\n\t" + "rotlwi %[len], 14, 14\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 14, 3\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "rotlwi %[sha256], 27, 15\n\t" + "rotlwi %[len], 27, 13\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "srwi %[len], 27, 10\n\t" + "xor %[sha256], %[sha256], %[len]\n\t" + "add 29, 29, %[sha256]\n\t" + "add 29, 29, 22\n\t" + "\n" + "L_SHA256_transform_len_after_blk_15_%=: \n\t" + "addi 6, 6, 0x40\n\t" + "bdnz L_SHA256_transform_len_start_%=\n\t" + "subi 6, 6, 0x100\n\t" + "lwz %[sha256], 0(1)\n\t" + /* Add in digest from start */ + "lwz %[len], 0(%[sha256])\n\t" + "add 0, 0, %[len]\n\t" + "lwz %[len], 4(%[sha256])\n\t" + "add %[data], %[data], %[len]\n\t" + "lwz %[len], 8(%[sha256])\n\t" + "add 7, 7, %[len]\n\t" + "lwz %[len], 12(%[sha256])\n\t" + "add 8, 8, %[len]\n\t" + "lwz %[len], 16(%[sha256])\n\t" + "add 9, 9, %[len]\n\t" + "lwz %[len], 20(%[sha256])\n\t" + "add 10, 10, %[len]\n\t" + "lwz %[len], 24(%[sha256])\n\t" + "add 11, 11, %[len]\n\t" + "lwz %[len], 28(%[sha256])\n\t" + "add 12, 12, %[len]\n\t" + "stw 0, 0(%[sha256])\n\t" + "stw %[data], 4(%[sha256])\n\t" + "stw 7, 8(%[sha256])\n\t" + "stw 8, 12(%[sha256])\n\t" + "stw 9, 16(%[sha256])\n\t" + "stw 10, 20(%[sha256])\n\t" + "stw 11, 24(%[sha256])\n\t" + "stw 12, 28(%[sha256])\n\t" + "lwz %[sha256], 4(1)\n\t" + "lwz %[len], 8(1)\n\t" + "mtctr %[len]\n\t" + "subi %[len], %[len], 1\n\t" + "addi %[sha256], %[sha256], 0x40\n\t" + "stw %[sha256], 4(1)\n\t" + "stw %[len], 8(1)\n\t" + "bdnz L_SHA256_transform_len_begin_%=\n\t" + "addi 1, 1, 12\n\t" +#endif /* WOLFSSL_PPC32_ASM_SMALL */ +#ifndef WOLFSSL_NO_VAR_ASSIGN_REG + : [sha256] "+r" (sha256), [data] "+r" (data), [len] "+r" (len), + [L_SHA256_transform_len_k] "+r" (L_SHA256_transform_len_k_c) + : +#else + : + : [sha256] "r" (sha256), [data] "r" (data), [len] "r" (len), + [L_SHA256_transform_len_k] "r" (L_SHA256_transform_len_k_c) +#endif /* !WOLFSSL_NO_VAR_ASSIGN_REG */ + : "memory", "cc", "0", "7", "8", "9", "10", "11", "12", "14", "15", + "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", + "27", "28", "29" + ); +} + +#endif /* __PIC__ */ #endif /* !WOLFSSL_PPC32_ASM_SPE */ #endif /* !NO_SHA256 */