diff --git a/src/include.am b/src/include.am index ed640e63e..efa15c0f1 100644 --- a/src/include.am +++ b/src/include.am @@ -233,12 +233,17 @@ endif if !BUILD_FIPS_V2 if BUILD_SHA512 +if BUILD_ARMASM +src_libwolfssl_la_SOURCES += wolfcrypt/src/port/arm/armv8-sha512.c +src_libwolfssl_la_SOURCES += wolfcrypt/src/port/arm/armv8-sha512-asm.S +else src_libwolfssl_la_SOURCES += wolfcrypt/src/sha512.c if BUILD_INTELASM src_libwolfssl_la_SOURCES += wolfcrypt/src/sha512_asm.S endif endif endif +endif if !BUILD_FIPS_V2 if BUILD_SHA3 diff --git a/wolfcrypt/src/port/arm/armv8-sha512-asm.S b/wolfcrypt/src/port/arm/armv8-sha512-asm.S new file mode 100644 index 000000000..471267509 --- /dev/null +++ b/wolfcrypt/src/port/arm/armv8-sha512-asm.S @@ -0,0 +1,1173 @@ +/* armv8-sha512-asm + * + * Copyright (C) 2006-2019 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifdef __aarch64__ + .text + .section .rodata + .type L_SHA512_transform_neon_len_k, %object + .size L_SHA512_transform_neon_len_k, 640 + .align 3 +L_SHA512_transform_neon_len_k: + .xword 0x428a2f98d728ae22 + .xword 0x7137449123ef65cd + .xword 0xb5c0fbcfec4d3b2f + .xword 0xe9b5dba58189dbbc + .xword 0x3956c25bf348b538 + .xword 0x59f111f1b605d019 + .xword 0x923f82a4af194f9b + .xword 0xab1c5ed5da6d8118 + .xword 0xd807aa98a3030242 + .xword 0x12835b0145706fbe + .xword 0x243185be4ee4b28c + .xword 0x550c7dc3d5ffb4e2 + .xword 0x72be5d74f27b896f + .xword 0x80deb1fe3b1696b1 + .xword 0x9bdc06a725c71235 + .xword 0xc19bf174cf692694 + .xword 0xe49b69c19ef14ad2 + .xword 0xefbe4786384f25e3 + .xword 0xfc19dc68b8cd5b5 + .xword 0x240ca1cc77ac9c65 + .xword 0x2de92c6f592b0275 + .xword 0x4a7484aa6ea6e483 + .xword 0x5cb0a9dcbd41fbd4 + .xword 0x76f988da831153b5 + .xword 0x983e5152ee66dfab + .xword 0xa831c66d2db43210 + .xword 0xb00327c898fb213f + .xword 0xbf597fc7beef0ee4 + .xword 0xc6e00bf33da88fc2 + .xword 0xd5a79147930aa725 + .xword 0x6ca6351e003826f + .xword 0x142929670a0e6e70 + .xword 0x27b70a8546d22ffc + .xword 0x2e1b21385c26c926 + .xword 0x4d2c6dfc5ac42aed + .xword 0x53380d139d95b3df + .xword 0x650a73548baf63de + .xword 0x766a0abb3c77b2a8 + .xword 0x81c2c92e47edaee6 + .xword 0x92722c851482353b + .xword 0xa2bfe8a14cf10364 + .xword 0xa81a664bbc423001 + .xword 0xc24b8b70d0f89791 + .xword 0xc76c51a30654be30 + .xword 0xd192e819d6ef5218 + .xword 0xd69906245565a910 + .xword 0xf40e35855771202a + .xword 0x106aa07032bbd1b8 + .xword 0x19a4c116b8d2d0c8 + .xword 0x1e376c085141ab53 + .xword 0x2748774cdf8eeb99 + .xword 0x34b0bcb5e19b48a8 + .xword 0x391c0cb3c5c95a63 + .xword 0x4ed8aa4ae3418acb + .xword 0x5b9cca4f7763e373 + .xword 0x682e6ff3d6b2b8a3 + .xword 0x748f82ee5defb2fc + .xword 0x78a5636f43172f60 + .xword 0x84c87814a1f0ab72 + .xword 0x8cc702081a6439ec + .xword 0x90befffa23631e28 + .xword 0xa4506cebde82bde9 + .xword 0xbef9a3f7b2c67915 + .xword 0xc67178f2e372532b + .xword 0xca273eceea26619c + .xword 0xd186b8c721c0c207 + .xword 0xeada7dd6cde0eb1e + .xword 0xf57d4f7fee6ed178 + .xword 0x6f067aa72176fba + .xword 0xa637dc5a2c898a6 + .xword 0x113f9804bef90dae + .xword 0x1b710b35131c471b + .xword 0x28db77f523047d84 + .xword 0x32caab7b40c72493 + .xword 0x3c9ebe0a15c9bebc + .xword 0x431d67c49c100d4c + .xword 0x4cc5d4becb3e42b6 + .xword 0x597f299cfc657e2a + .xword 0x5fcb6fab3ad6faec + .xword 0x6c44198c4a475817 + .text + .section .rodata + .type L_SHA512_transform_neon_len_ror8, %object + .align 4 + .size L_SHA512_transform_neon_len_ror8, 16 +L_SHA512_transform_neon_len_ror8: + .xword 0x7060504030201, 0x80f0e0d0c0b0a09 + .text + .align 2 + .globl Transform_Sha512_Len + .type Transform_Sha512_Len, %function +Transform_Sha512_Len: + stp x29, x30, [sp, #-144]! + add x29, sp, #0 + str x17, [x29, #16] + stp x18, x19, [x29, #24] + stp x20, x21, [x29, #40] + stp x22, x23, [x29, #56] + stp x24, x25, [x29, #72] + stp x26, x27, [x29, #88] + str x28, [x29, #104] + stp d8, d9, [x29, #112] + stp d10, d11, [x29, #128] + adr x3, L_SHA512_transform_neon_len_k + adr x28, L_SHA512_transform_neon_len_ror8 + ld1 {v11.16b}, [x28] + # Load digest into working vars + ldp x4, x5, [x0] + ldp x6, x7, [x0, #16] + ldp x8, x9, [x0, #32] + ldp x10, x11, [x0, #48] + # Start of loop processing a block +L_sha512_len_neon_begin: + # Load W + # Copy digest to add in at end + ld1 {v0.2d, v1.2d, v2.2d, v3.2d}, [x1], #0x40 + mov x20, x4 + ld1 {v4.2d, v5.2d, v6.2d, v7.2d}, [x1], #0x40 + mov x21, x5 + rev64 v0.16b, v0.16b + mov x22, x6 + rev64 v1.16b, v1.16b + mov x23, x7 + rev64 v2.16b, v2.16b + mov x24, x8 + rev64 v3.16b, v3.16b + mov x25, x9 + rev64 v4.16b, v4.16b + mov x26, x10 + rev64 v5.16b, v5.16b + mov x27, x11 + rev64 v6.16b, v6.16b + rev64 v7.16b, v7.16b + # Pre-calc: b ^ c + eor x16, x5, x6 + mov x28, #4 + # Start of 16 rounds +L_sha512_len_neon_start: + # Round 0 + mov x18, v0.d[0] + ldr x19, [x3], #8 + ror x12, x8, #14 + ror x14, x4, #28 + ror x13, x8, #18 + ror x15, x4, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x8, #41 + ror x15, x4, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x11, x11, x12 + eor x17, x4, x5 + eor x12, x9, x10 + and x16, x17, x16 + and x12, x12, x8 + add x11, x11, x18 + eor x12, x12, x10 + add x11, x11, x19 + eor x16, x16, x5 + add x11, x11, x12 + add x15, x15, x16 + add x7, x7, x11 + add x11, x11, x15 + # Round 1 + mov x18, v0.d[1] + ldr x19, [x3], #8 + ext v10.16b, v0.16b, v1.16b, #8 + ror x12, x7, #14 + ext v9.16b, v4.16b, v5.16b, #8 + ror x14, x11, #28 + add v0.2d, v0.2d, v9.2d + ror x13, x7, #18 + shl v8.2d, v7.2d, #45 + ror x15, x11, #34 + sri v8.2d, v7.2d, #19 + eor x12, x13, x12 + shl v9.2d, v7.2d, #3 + eor x14, x15, x14 + sri v9.2d, v7.2d, #61 + ror x13, x7, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x11, #39 + ushr v8.2d, v7.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v0.2d, v0.2d, v9.2d + add x10, x10, x12 + shl v8.2d, v10.2d, #63 + eor x16, x11, x4 + sri v8.2d, v10.2d, #1 + eor x12, x8, x9 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x7 + ushr v10.2d, v10.2d, #7 + add x10, x10, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x9 + add v0.2d, v0.2d, v9.2d + add x10, x10, x19 + eor x17, x17, x4 + add x10, x10, x12 + add x15, x15, x17 + add x6, x6, x10 + add x10, x10, x15 + # Round 2 + mov x18, v1.d[0] + ldr x19, [x3], #8 + ror x12, x6, #14 + ror x14, x10, #28 + ror x13, x6, #18 + ror x15, x10, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x6, #41 + ror x15, x10, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x9, x9, x12 + eor x17, x10, x11 + eor x12, x7, x8 + and x16, x17, x16 + and x12, x12, x6 + add x9, x9, x18 + eor x12, x12, x8 + add x9, x9, x19 + eor x16, x16, x11 + add x9, x9, x12 + add x15, x15, x16 + add x5, x5, x9 + add x9, x9, x15 + # Round 3 + mov x18, v1.d[1] + ldr x19, [x3], #8 + ext v10.16b, v1.16b, v2.16b, #8 + ror x12, x5, #14 + ext v9.16b, v5.16b, v6.16b, #8 + ror x14, x9, #28 + add v1.2d, v1.2d, v9.2d + ror x13, x5, #18 + shl v8.2d, v0.2d, #45 + ror x15, x9, #34 + sri v8.2d, v0.2d, #19 + eor x12, x13, x12 + shl v9.2d, v0.2d, #3 + eor x14, x15, x14 + sri v9.2d, v0.2d, #61 + ror x13, x5, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x9, #39 + ushr v8.2d, v0.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v1.2d, v1.2d, v9.2d + add x8, x8, x12 + shl v8.2d, v10.2d, #63 + eor x16, x9, x10 + sri v8.2d, v10.2d, #1 + eor x12, x6, x7 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x5 + ushr v10.2d, v10.2d, #7 + add x8, x8, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x7 + add v1.2d, v1.2d, v9.2d + add x8, x8, x19 + eor x17, x17, x10 + add x8, x8, x12 + add x15, x15, x17 + add x4, x4, x8 + add x8, x8, x15 + # Round 4 + mov x18, v2.d[0] + ldr x19, [x3], #8 + ror x12, x4, #14 + ror x14, x8, #28 + ror x13, x4, #18 + ror x15, x8, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x4, #41 + ror x15, x8, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x7, x7, x12 + eor x17, x8, x9 + eor x12, x5, x6 + and x16, x17, x16 + and x12, x12, x4 + add x7, x7, x18 + eor x12, x12, x6 + add x7, x7, x19 + eor x16, x16, x9 + add x7, x7, x12 + add x15, x15, x16 + add x11, x11, x7 + add x7, x7, x15 + # Round 5 + mov x18, v2.d[1] + ldr x19, [x3], #8 + ext v10.16b, v2.16b, v3.16b, #8 + ror x12, x11, #14 + ext v9.16b, v6.16b, v7.16b, #8 + ror x14, x7, #28 + add v2.2d, v2.2d, v9.2d + ror x13, x11, #18 + shl v8.2d, v1.2d, #45 + ror x15, x7, #34 + sri v8.2d, v1.2d, #19 + eor x12, x13, x12 + shl v9.2d, v1.2d, #3 + eor x14, x15, x14 + sri v9.2d, v1.2d, #61 + ror x13, x11, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x7, #39 + ushr v8.2d, v1.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v2.2d, v2.2d, v9.2d + add x6, x6, x12 + shl v8.2d, v10.2d, #63 + eor x16, x7, x8 + sri v8.2d, v10.2d, #1 + eor x12, x4, x5 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x11 + ushr v10.2d, v10.2d, #7 + add x6, x6, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x5 + add v2.2d, v2.2d, v9.2d + add x6, x6, x19 + eor x17, x17, x8 + add x6, x6, x12 + add x15, x15, x17 + add x10, x10, x6 + add x6, x6, x15 + # Round 6 + mov x18, v3.d[0] + ldr x19, [x3], #8 + ror x12, x10, #14 + ror x14, x6, #28 + ror x13, x10, #18 + ror x15, x6, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x10, #41 + ror x15, x6, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x5, x5, x12 + eor x17, x6, x7 + eor x12, x11, x4 + and x16, x17, x16 + and x12, x12, x10 + add x5, x5, x18 + eor x12, x12, x4 + add x5, x5, x19 + eor x16, x16, x7 + add x5, x5, x12 + add x15, x15, x16 + add x9, x9, x5 + add x5, x5, x15 + # Round 7 + mov x18, v3.d[1] + ldr x19, [x3], #8 + ext v10.16b, v3.16b, v4.16b, #8 + ror x12, x9, #14 + ext v9.16b, v7.16b, v0.16b, #8 + ror x14, x5, #28 + add v3.2d, v3.2d, v9.2d + ror x13, x9, #18 + shl v8.2d, v2.2d, #45 + ror x15, x5, #34 + sri v8.2d, v2.2d, #19 + eor x12, x13, x12 + shl v9.2d, v2.2d, #3 + eor x14, x15, x14 + sri v9.2d, v2.2d, #61 + ror x13, x9, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x5, #39 + ushr v8.2d, v2.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v3.2d, v3.2d, v9.2d + add x4, x4, x12 + shl v8.2d, v10.2d, #63 + eor x16, x5, x6 + sri v8.2d, v10.2d, #1 + eor x12, x10, x11 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x9 + ushr v10.2d, v10.2d, #7 + add x4, x4, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x11 + add v3.2d, v3.2d, v9.2d + add x4, x4, x19 + eor x17, x17, x6 + add x4, x4, x12 + add x15, x15, x17 + add x8, x8, x4 + add x4, x4, x15 + # Round 8 + mov x18, v4.d[0] + ldr x19, [x3], #8 + ror x12, x8, #14 + ror x14, x4, #28 + ror x13, x8, #18 + ror x15, x4, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x8, #41 + ror x15, x4, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x11, x11, x12 + eor x17, x4, x5 + eor x12, x9, x10 + and x16, x17, x16 + and x12, x12, x8 + add x11, x11, x18 + eor x12, x12, x10 + add x11, x11, x19 + eor x16, x16, x5 + add x11, x11, x12 + add x15, x15, x16 + add x7, x7, x11 + add x11, x11, x15 + # Round 9 + mov x18, v4.d[1] + ldr x19, [x3], #8 + ext v10.16b, v4.16b, v5.16b, #8 + ror x12, x7, #14 + ext v9.16b, v0.16b, v1.16b, #8 + ror x14, x11, #28 + add v4.2d, v4.2d, v9.2d + ror x13, x7, #18 + shl v8.2d, v3.2d, #45 + ror x15, x11, #34 + sri v8.2d, v3.2d, #19 + eor x12, x13, x12 + shl v9.2d, v3.2d, #3 + eor x14, x15, x14 + sri v9.2d, v3.2d, #61 + ror x13, x7, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x11, #39 + ushr v8.2d, v3.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v4.2d, v4.2d, v9.2d + add x10, x10, x12 + shl v8.2d, v10.2d, #63 + eor x16, x11, x4 + sri v8.2d, v10.2d, #1 + eor x12, x8, x9 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x7 + ushr v10.2d, v10.2d, #7 + add x10, x10, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x9 + add v4.2d, v4.2d, v9.2d + add x10, x10, x19 + eor x17, x17, x4 + add x10, x10, x12 + add x15, x15, x17 + add x6, x6, x10 + add x10, x10, x15 + # Round 10 + mov x18, v5.d[0] + ldr x19, [x3], #8 + ror x12, x6, #14 + ror x14, x10, #28 + ror x13, x6, #18 + ror x15, x10, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x6, #41 + ror x15, x10, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x9, x9, x12 + eor x17, x10, x11 + eor x12, x7, x8 + and x16, x17, x16 + and x12, x12, x6 + add x9, x9, x18 + eor x12, x12, x8 + add x9, x9, x19 + eor x16, x16, x11 + add x9, x9, x12 + add x15, x15, x16 + add x5, x5, x9 + add x9, x9, x15 + # Round 11 + mov x18, v5.d[1] + ldr x19, [x3], #8 + ext v10.16b, v5.16b, v6.16b, #8 + ror x12, x5, #14 + ext v9.16b, v1.16b, v2.16b, #8 + ror x14, x9, #28 + add v5.2d, v5.2d, v9.2d + ror x13, x5, #18 + shl v8.2d, v4.2d, #45 + ror x15, x9, #34 + sri v8.2d, v4.2d, #19 + eor x12, x13, x12 + shl v9.2d, v4.2d, #3 + eor x14, x15, x14 + sri v9.2d, v4.2d, #61 + ror x13, x5, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x9, #39 + ushr v8.2d, v4.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v5.2d, v5.2d, v9.2d + add x8, x8, x12 + shl v8.2d, v10.2d, #63 + eor x16, x9, x10 + sri v8.2d, v10.2d, #1 + eor x12, x6, x7 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x5 + ushr v10.2d, v10.2d, #7 + add x8, x8, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x7 + add v5.2d, v5.2d, v9.2d + add x8, x8, x19 + eor x17, x17, x10 + add x8, x8, x12 + add x15, x15, x17 + add x4, x4, x8 + add x8, x8, x15 + # Round 12 + mov x18, v6.d[0] + ldr x19, [x3], #8 + ror x12, x4, #14 + ror x14, x8, #28 + ror x13, x4, #18 + ror x15, x8, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x4, #41 + ror x15, x8, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x7, x7, x12 + eor x17, x8, x9 + eor x12, x5, x6 + and x16, x17, x16 + and x12, x12, x4 + add x7, x7, x18 + eor x12, x12, x6 + add x7, x7, x19 + eor x16, x16, x9 + add x7, x7, x12 + add x15, x15, x16 + add x11, x11, x7 + add x7, x7, x15 + # Round 13 + mov x18, v6.d[1] + ldr x19, [x3], #8 + ext v10.16b, v6.16b, v7.16b, #8 + ror x12, x11, #14 + ext v9.16b, v2.16b, v3.16b, #8 + ror x14, x7, #28 + add v6.2d, v6.2d, v9.2d + ror x13, x11, #18 + shl v8.2d, v5.2d, #45 + ror x15, x7, #34 + sri v8.2d, v5.2d, #19 + eor x12, x13, x12 + shl v9.2d, v5.2d, #3 + eor x14, x15, x14 + sri v9.2d, v5.2d, #61 + ror x13, x11, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x7, #39 + ushr v8.2d, v5.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v6.2d, v6.2d, v9.2d + add x6, x6, x12 + shl v8.2d, v10.2d, #63 + eor x16, x7, x8 + sri v8.2d, v10.2d, #1 + eor x12, x4, x5 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x11 + ushr v10.2d, v10.2d, #7 + add x6, x6, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x5 + add v6.2d, v6.2d, v9.2d + add x6, x6, x19 + eor x17, x17, x8 + add x6, x6, x12 + add x15, x15, x17 + add x10, x10, x6 + add x6, x6, x15 + # Round 14 + mov x18, v7.d[0] + ldr x19, [x3], #8 + ror x12, x10, #14 + ror x14, x6, #28 + ror x13, x10, #18 + ror x15, x6, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x10, #41 + ror x15, x6, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x5, x5, x12 + eor x17, x6, x7 + eor x12, x11, x4 + and x16, x17, x16 + and x12, x12, x10 + add x5, x5, x18 + eor x12, x12, x4 + add x5, x5, x19 + eor x16, x16, x7 + add x5, x5, x12 + add x15, x15, x16 + add x9, x9, x5 + add x5, x5, x15 + # Round 15 + mov x18, v7.d[1] + ldr x19, [x3], #8 + ext v10.16b, v7.16b, v0.16b, #8 + ror x12, x9, #14 + ext v9.16b, v3.16b, v4.16b, #8 + ror x14, x5, #28 + add v7.2d, v7.2d, v9.2d + ror x13, x9, #18 + shl v8.2d, v6.2d, #45 + ror x15, x5, #34 + sri v8.2d, v6.2d, #19 + eor x12, x13, x12 + shl v9.2d, v6.2d, #3 + eor x14, x15, x14 + sri v9.2d, v6.2d, #61 + ror x13, x9, #41 + eor v9.16b, v9.16b, v8.16b + ror x15, x5, #39 + ushr v8.2d, v6.2d, #6 + eor x12, x13, x12 + eor v9.16b, v9.16b, v8.16b + eor x15, x15, x14 + add v7.2d, v7.2d, v9.2d + add x4, x4, x12 + shl v8.2d, v10.2d, #63 + eor x16, x5, x6 + sri v8.2d, v10.2d, #1 + eor x12, x10, x11 + tbl v9.16b, { v10.16b }, v11.16b + and x17, x16, x17 + eor v9.16b, v9.16b, v8.16b + and x12, x12, x9 + ushr v10.2d, v10.2d, #7 + add x4, x4, x18 + eor v9.16b, v9.16b, v10.16b + eor x12, x12, x11 + add v7.2d, v7.2d, v9.2d + add x4, x4, x19 + eor x17, x17, x6 + add x4, x4, x12 + add x15, x15, x17 + add x8, x8, x4 + add x4, x4, x15 + subs x28, x28, #1 + bne L_sha512_len_neon_start + # Round 0 + mov x18, v0.d[0] + ldr x19, [x3], #8 + ror x12, x8, #14 + ror x14, x4, #28 + ror x13, x8, #18 + ror x15, x4, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x8, #41 + ror x15, x4, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x11, x11, x12 + eor x17, x4, x5 + eor x12, x9, x10 + and x16, x17, x16 + and x12, x12, x8 + add x11, x11, x18 + eor x12, x12, x10 + add x11, x11, x19 + eor x16, x16, x5 + add x11, x11, x12 + add x15, x15, x16 + add x7, x7, x11 + add x11, x11, x15 + # Round 1 + mov x18, v0.d[1] + ldr x19, [x3], #8 + ror x12, x7, #14 + ror x14, x11, #28 + ror x13, x7, #18 + ror x15, x11, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x7, #41 + ror x15, x11, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x10, x10, x12 + eor x16, x11, x4 + eor x12, x8, x9 + and x17, x16, x17 + and x12, x12, x7 + add x10, x10, x18 + eor x12, x12, x9 + add x10, x10, x19 + eor x17, x17, x4 + add x10, x10, x12 + add x15, x15, x17 + add x6, x6, x10 + add x10, x10, x15 + # Round 2 + mov x18, v1.d[0] + ldr x19, [x3], #8 + ror x12, x6, #14 + ror x14, x10, #28 + ror x13, x6, #18 + ror x15, x10, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x6, #41 + ror x15, x10, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x9, x9, x12 + eor x17, x10, x11 + eor x12, x7, x8 + and x16, x17, x16 + and x12, x12, x6 + add x9, x9, x18 + eor x12, x12, x8 + add x9, x9, x19 + eor x16, x16, x11 + add x9, x9, x12 + add x15, x15, x16 + add x5, x5, x9 + add x9, x9, x15 + # Round 3 + mov x18, v1.d[1] + ldr x19, [x3], #8 + ror x12, x5, #14 + ror x14, x9, #28 + ror x13, x5, #18 + ror x15, x9, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x5, #41 + ror x15, x9, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x8, x8, x12 + eor x16, x9, x10 + eor x12, x6, x7 + and x17, x16, x17 + and x12, x12, x5 + add x8, x8, x18 + eor x12, x12, x7 + add x8, x8, x19 + eor x17, x17, x10 + add x8, x8, x12 + add x15, x15, x17 + add x4, x4, x8 + add x8, x8, x15 + # Round 4 + mov x18, v2.d[0] + ldr x19, [x3], #8 + ror x12, x4, #14 + ror x14, x8, #28 + ror x13, x4, #18 + ror x15, x8, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x4, #41 + ror x15, x8, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x7, x7, x12 + eor x17, x8, x9 + eor x12, x5, x6 + and x16, x17, x16 + and x12, x12, x4 + add x7, x7, x18 + eor x12, x12, x6 + add x7, x7, x19 + eor x16, x16, x9 + add x7, x7, x12 + add x15, x15, x16 + add x11, x11, x7 + add x7, x7, x15 + # Round 5 + mov x18, v2.d[1] + ldr x19, [x3], #8 + ror x12, x11, #14 + ror x14, x7, #28 + ror x13, x11, #18 + ror x15, x7, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x11, #41 + ror x15, x7, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x6, x6, x12 + eor x16, x7, x8 + eor x12, x4, x5 + and x17, x16, x17 + and x12, x12, x11 + add x6, x6, x18 + eor x12, x12, x5 + add x6, x6, x19 + eor x17, x17, x8 + add x6, x6, x12 + add x15, x15, x17 + add x10, x10, x6 + add x6, x6, x15 + # Round 6 + mov x18, v3.d[0] + ldr x19, [x3], #8 + ror x12, x10, #14 + ror x14, x6, #28 + ror x13, x10, #18 + ror x15, x6, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x10, #41 + ror x15, x6, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x5, x5, x12 + eor x17, x6, x7 + eor x12, x11, x4 + and x16, x17, x16 + and x12, x12, x10 + add x5, x5, x18 + eor x12, x12, x4 + add x5, x5, x19 + eor x16, x16, x7 + add x5, x5, x12 + add x15, x15, x16 + add x9, x9, x5 + add x5, x5, x15 + # Round 7 + mov x18, v3.d[1] + ldr x19, [x3], #8 + ror x12, x9, #14 + ror x14, x5, #28 + ror x13, x9, #18 + ror x15, x5, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x9, #41 + ror x15, x5, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x4, x4, x12 + eor x16, x5, x6 + eor x12, x10, x11 + and x17, x16, x17 + and x12, x12, x9 + add x4, x4, x18 + eor x12, x12, x11 + add x4, x4, x19 + eor x17, x17, x6 + add x4, x4, x12 + add x15, x15, x17 + add x8, x8, x4 + add x4, x4, x15 + # Round 8 + mov x18, v4.d[0] + ldr x19, [x3], #8 + ror x12, x8, #14 + ror x14, x4, #28 + ror x13, x8, #18 + ror x15, x4, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x8, #41 + ror x15, x4, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x11, x11, x12 + eor x17, x4, x5 + eor x12, x9, x10 + and x16, x17, x16 + and x12, x12, x8 + add x11, x11, x18 + eor x12, x12, x10 + add x11, x11, x19 + eor x16, x16, x5 + add x11, x11, x12 + add x15, x15, x16 + add x7, x7, x11 + add x11, x11, x15 + # Round 9 + mov x18, v4.d[1] + ldr x19, [x3], #8 + ror x12, x7, #14 + ror x14, x11, #28 + ror x13, x7, #18 + ror x15, x11, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x7, #41 + ror x15, x11, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x10, x10, x12 + eor x16, x11, x4 + eor x12, x8, x9 + and x17, x16, x17 + and x12, x12, x7 + add x10, x10, x18 + eor x12, x12, x9 + add x10, x10, x19 + eor x17, x17, x4 + add x10, x10, x12 + add x15, x15, x17 + add x6, x6, x10 + add x10, x10, x15 + # Round 10 + mov x18, v5.d[0] + ldr x19, [x3], #8 + ror x12, x6, #14 + ror x14, x10, #28 + ror x13, x6, #18 + ror x15, x10, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x6, #41 + ror x15, x10, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x9, x9, x12 + eor x17, x10, x11 + eor x12, x7, x8 + and x16, x17, x16 + and x12, x12, x6 + add x9, x9, x18 + eor x12, x12, x8 + add x9, x9, x19 + eor x16, x16, x11 + add x9, x9, x12 + add x15, x15, x16 + add x5, x5, x9 + add x9, x9, x15 + # Round 11 + mov x18, v5.d[1] + ldr x19, [x3], #8 + ror x12, x5, #14 + ror x14, x9, #28 + ror x13, x5, #18 + ror x15, x9, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x5, #41 + ror x15, x9, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x8, x8, x12 + eor x16, x9, x10 + eor x12, x6, x7 + and x17, x16, x17 + and x12, x12, x5 + add x8, x8, x18 + eor x12, x12, x7 + add x8, x8, x19 + eor x17, x17, x10 + add x8, x8, x12 + add x15, x15, x17 + add x4, x4, x8 + add x8, x8, x15 + # Round 12 + mov x18, v6.d[0] + ldr x19, [x3], #8 + ror x12, x4, #14 + ror x14, x8, #28 + ror x13, x4, #18 + ror x15, x8, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x4, #41 + ror x15, x8, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x7, x7, x12 + eor x17, x8, x9 + eor x12, x5, x6 + and x16, x17, x16 + and x12, x12, x4 + add x7, x7, x18 + eor x12, x12, x6 + add x7, x7, x19 + eor x16, x16, x9 + add x7, x7, x12 + add x15, x15, x16 + add x11, x11, x7 + add x7, x7, x15 + # Round 13 + mov x18, v6.d[1] + ldr x19, [x3], #8 + ror x12, x11, #14 + ror x14, x7, #28 + ror x13, x11, #18 + ror x15, x7, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x11, #41 + ror x15, x7, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x6, x6, x12 + eor x16, x7, x8 + eor x12, x4, x5 + and x17, x16, x17 + and x12, x12, x11 + add x6, x6, x18 + eor x12, x12, x5 + add x6, x6, x19 + eor x17, x17, x8 + add x6, x6, x12 + add x15, x15, x17 + add x10, x10, x6 + add x6, x6, x15 + # Round 14 + mov x18, v7.d[0] + ldr x19, [x3], #8 + ror x12, x10, #14 + ror x14, x6, #28 + ror x13, x10, #18 + ror x15, x6, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x10, #41 + ror x15, x6, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x5, x5, x12 + eor x17, x6, x7 + eor x12, x11, x4 + and x16, x17, x16 + and x12, x12, x10 + add x5, x5, x18 + eor x12, x12, x4 + add x5, x5, x19 + eor x16, x16, x7 + add x5, x5, x12 + add x15, x15, x16 + add x9, x9, x5 + add x5, x5, x15 + # Round 15 + mov x18, v7.d[1] + ldr x19, [x3], #8 + ror x12, x9, #14 + ror x14, x5, #28 + ror x13, x9, #18 + ror x15, x5, #34 + eor x12, x13, x12 + eor x14, x15, x14 + ror x13, x9, #41 + ror x15, x5, #39 + eor x12, x13, x12 + eor x15, x15, x14 + add x4, x4, x12 + eor x16, x5, x6 + eor x12, x10, x11 + and x17, x16, x17 + and x12, x12, x9 + add x4, x4, x18 + eor x12, x12, x11 + add x4, x4, x19 + eor x17, x17, x6 + add x4, x4, x12 + add x15, x15, x17 + add x8, x8, x4 + add x4, x4, x15 + add x11, x11, x27 + add x10, x10, x26 + add x9, x9, x25 + add x8, x8, x24 + add x7, x7, x23 + add x6, x6, x22 + add x5, x5, x21 + add x4, x4, x20 + adr x3, L_SHA512_transform_neon_len_k + subs w2, w2, #0x80 + bne L_sha512_len_neon_begin + stp x4, x5, [x0] + stp x6, x7, [x0, #16] + stp x8, x9, [x0, #32] + stp x10, x11, [x0, #48] + eor x0, x0, x0 + ldr x17, [x29, #16] + ldp x18, x19, [x29, #24] + ldp x20, x21, [x29, #40] + ldp x22, x23, [x29, #56] + ldp x24, x25, [x29, #72] + ldp x26, x27, [x29, #88] + ldr x28, [x29, #104] + ldp d8, d9, [sp, #112] + ldp d10, d11, [sp, #128] + ldp x29, x30, [sp], #0x90 + ret + .size Transform_Sha512_Len,.-Transform_Sha512_Len +#endif /* __aarch64__ */ diff --git a/wolfcrypt/src/port/arm/armv8-sha512-asm.c b/wolfcrypt/src/port/arm/armv8-sha512-asm.c new file mode 100644 index 000000000..a4350d7f8 --- /dev/null +++ b/wolfcrypt/src/port/arm/armv8-sha512-asm.c @@ -0,0 +1,1158 @@ +/* armv8-sha512-asm + * + * Copyright (C) 2006-2019 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifdef __aarch64__ +#include +#include + +static const uint64_t L_SHA512_transform_neon_len_k[] = { + 0x428a2f98d728ae22UL, + 0x7137449123ef65cdUL, + 0xb5c0fbcfec4d3b2fUL, + 0xe9b5dba58189dbbcUL, + 0x3956c25bf348b538UL, + 0x59f111f1b605d019UL, + 0x923f82a4af194f9bUL, + 0xab1c5ed5da6d8118UL, + 0xd807aa98a3030242UL, + 0x12835b0145706fbeUL, + 0x243185be4ee4b28cUL, + 0x550c7dc3d5ffb4e2UL, + 0x72be5d74f27b896fUL, + 0x80deb1fe3b1696b1UL, + 0x9bdc06a725c71235UL, + 0xc19bf174cf692694UL, + 0xe49b69c19ef14ad2UL, + 0xefbe4786384f25e3UL, + 0xfc19dc68b8cd5b5UL, + 0x240ca1cc77ac9c65UL, + 0x2de92c6f592b0275UL, + 0x4a7484aa6ea6e483UL, + 0x5cb0a9dcbd41fbd4UL, + 0x76f988da831153b5UL, + 0x983e5152ee66dfabUL, + 0xa831c66d2db43210UL, + 0xb00327c898fb213fUL, + 0xbf597fc7beef0ee4UL, + 0xc6e00bf33da88fc2UL, + 0xd5a79147930aa725UL, + 0x6ca6351e003826fUL, + 0x142929670a0e6e70UL, + 0x27b70a8546d22ffcUL, + 0x2e1b21385c26c926UL, + 0x4d2c6dfc5ac42aedUL, + 0x53380d139d95b3dfUL, + 0x650a73548baf63deUL, + 0x766a0abb3c77b2a8UL, + 0x81c2c92e47edaee6UL, + 0x92722c851482353bUL, + 0xa2bfe8a14cf10364UL, + 0xa81a664bbc423001UL, + 0xc24b8b70d0f89791UL, + 0xc76c51a30654be30UL, + 0xd192e819d6ef5218UL, + 0xd69906245565a910UL, + 0xf40e35855771202aUL, + 0x106aa07032bbd1b8UL, + 0x19a4c116b8d2d0c8UL, + 0x1e376c085141ab53UL, + 0x2748774cdf8eeb99UL, + 0x34b0bcb5e19b48a8UL, + 0x391c0cb3c5c95a63UL, + 0x4ed8aa4ae3418acbUL, + 0x5b9cca4f7763e373UL, + 0x682e6ff3d6b2b8a3UL, + 0x748f82ee5defb2fcUL, + 0x78a5636f43172f60UL, + 0x84c87814a1f0ab72UL, + 0x8cc702081a6439ecUL, + 0x90befffa23631e28UL, + 0xa4506cebde82bde9UL, + 0xbef9a3f7b2c67915UL, + 0xc67178f2e372532bUL, + 0xca273eceea26619cUL, + 0xd186b8c721c0c207UL, + 0xeada7dd6cde0eb1eUL, + 0xf57d4f7fee6ed178UL, + 0x6f067aa72176fbaUL, + 0xa637dc5a2c898a6UL, + 0x113f9804bef90daeUL, + 0x1b710b35131c471bUL, + 0x28db77f523047d84UL, + 0x32caab7b40c72493UL, + 0x3c9ebe0a15c9bebcUL, + 0x431d67c49c100d4cUL, + 0x4cc5d4becb3e42b6UL, + 0x597f299cfc657e2aUL, + 0x5fcb6fab3ad6faecUL, + 0x6c44198c4a475817UL, +}; + +static const uint64_t L_SHA512_transform_neon_len_ror8[] = { + 0x7060504030201UL, + 0x80f0e0d0c0b0a09UL, +}; + +int Transform_Sha512_Len(wc_Sha512* sha512, const byte* data, word32 len) +{ + __asm__ __volatile__ ( + "stp x29, x30, [sp, #-16]!\n\t" + "add x29, sp, #0\n\t" + "adr x3, %[L_SHA512_transform_neon_len_k]\n\t" + "adr x28, %[L_SHA512_transform_neon_len_ror8]\n\t" + "ld1 {v11.16b}, [x28]\n\t" + /* Load digest into working vars */ + "ldp x4, x5, [%[sha512]]\n\t" + "ldp x6, x7, [%[sha512], #16]\n\t" + "ldp x8, x9, [%[sha512], #32]\n\t" + "ldp x10, x11, [%[sha512], #48]\n\t" + /* Start of loop processing a block */ + "\n" + "L_sha512_len_neon_begin_%=: \n\t" + /* Load W */ + /* Copy digest to add in at end */ + "ld1 {v0.2d, v1.2d, v2.2d, v3.2d}, [%[data]], #0x40\n\t" + "mov x20, x4\n\t" + "ld1 {v4.2d, v5.2d, v6.2d, v7.2d}, [%[data]], #0x40\n\t" + "mov x21, x5\n\t" + "rev64 v0.16b, v0.16b\n\t" + "mov x22, x6\n\t" + "rev64 v1.16b, v1.16b\n\t" + "mov x23, x7\n\t" + "rev64 v2.16b, v2.16b\n\t" + "mov x24, x8\n\t" + "rev64 v3.16b, v3.16b\n\t" + "mov x25, x9\n\t" + "rev64 v4.16b, v4.16b\n\t" + "mov x26, x10\n\t" + "rev64 v5.16b, v5.16b\n\t" + "mov x27, x11\n\t" + "rev64 v6.16b, v6.16b\n\t" + "rev64 v7.16b, v7.16b\n\t" + /* Pre-calc: b ^ c */ + "eor x16, x5, x6\n\t" + "mov x28, #4\n\t" + /* Start of 16 rounds */ + "\n" + "L_sha512_len_neon_start_%=: \n\t" + /* Round 0 */ + "mov x18, v0.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x8, #14\n\t" + "ror x14, x4, #28\n\t" + "ror x13, x8, #18\n\t" + "ror x15, x4, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x8, #41\n\t" + "ror x15, x4, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x11, x11, x12\n\t" + "eor x17, x4, x5\n\t" + "eor x12, x9, x10\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x8\n\t" + "add x11, x11, x18\n\t" + "eor x12, x12, x10\n\t" + "add x11, x11, x19\n\t" + "eor x16, x16, x5\n\t" + "add x11, x11, x12\n\t" + "add x15, x15, x16\n\t" + "add x7, x7, x11\n\t" + "add x11, x11, x15\n\t" + /* Round 1 */ + "mov x18, v0.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v0.16b, v1.16b, #8\n\t" + "ror x12, x7, #14\n\t" + "ext v9.16b, v4.16b, v5.16b, #8\n\t" + "ror x14, x11, #28\n\t" + "add v0.2d, v0.2d, v9.2d\n\t" + "ror x13, x7, #18\n\t" + "shl v8.2d, v7.2d, #45\n\t" + "ror x15, x11, #34\n\t" + "sri v8.2d, v7.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v7.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v7.2d, #61\n\t" + "ror x13, x7, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x11, #39\n\t" + "ushr v8.2d, v7.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v0.2d, v0.2d, v9.2d\n\t" + "add x10, x10, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x11, x4\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x8, x9\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x7\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x10, x10, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x9\n\t" + "add v0.2d, v0.2d, v9.2d\n\t" + "add x10, x10, x19\n\t" + "eor x17, x17, x4\n\t" + "add x10, x10, x12\n\t" + "add x15, x15, x17\n\t" + "add x6, x6, x10\n\t" + "add x10, x10, x15\n\t" + /* Round 2 */ + "mov x18, v1.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x6, #14\n\t" + "ror x14, x10, #28\n\t" + "ror x13, x6, #18\n\t" + "ror x15, x10, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x6, #41\n\t" + "ror x15, x10, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x9, x9, x12\n\t" + "eor x17, x10, x11\n\t" + "eor x12, x7, x8\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x6\n\t" + "add x9, x9, x18\n\t" + "eor x12, x12, x8\n\t" + "add x9, x9, x19\n\t" + "eor x16, x16, x11\n\t" + "add x9, x9, x12\n\t" + "add x15, x15, x16\n\t" + "add x5, x5, x9\n\t" + "add x9, x9, x15\n\t" + /* Round 3 */ + "mov x18, v1.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v1.16b, v2.16b, #8\n\t" + "ror x12, x5, #14\n\t" + "ext v9.16b, v5.16b, v6.16b, #8\n\t" + "ror x14, x9, #28\n\t" + "add v1.2d, v1.2d, v9.2d\n\t" + "ror x13, x5, #18\n\t" + "shl v8.2d, v0.2d, #45\n\t" + "ror x15, x9, #34\n\t" + "sri v8.2d, v0.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v0.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v0.2d, #61\n\t" + "ror x13, x5, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x9, #39\n\t" + "ushr v8.2d, v0.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v1.2d, v1.2d, v9.2d\n\t" + "add x8, x8, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x9, x10\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x6, x7\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x5\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x8, x8, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x7\n\t" + "add v1.2d, v1.2d, v9.2d\n\t" + "add x8, x8, x19\n\t" + "eor x17, x17, x10\n\t" + "add x8, x8, x12\n\t" + "add x15, x15, x17\n\t" + "add x4, x4, x8\n\t" + "add x8, x8, x15\n\t" + /* Round 4 */ + "mov x18, v2.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x4, #14\n\t" + "ror x14, x8, #28\n\t" + "ror x13, x4, #18\n\t" + "ror x15, x8, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x4, #41\n\t" + "ror x15, x8, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x7, x7, x12\n\t" + "eor x17, x8, x9\n\t" + "eor x12, x5, x6\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x4\n\t" + "add x7, x7, x18\n\t" + "eor x12, x12, x6\n\t" + "add x7, x7, x19\n\t" + "eor x16, x16, x9\n\t" + "add x7, x7, x12\n\t" + "add x15, x15, x16\n\t" + "add x11, x11, x7\n\t" + "add x7, x7, x15\n\t" + /* Round 5 */ + "mov x18, v2.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v2.16b, v3.16b, #8\n\t" + "ror x12, x11, #14\n\t" + "ext v9.16b, v6.16b, v7.16b, #8\n\t" + "ror x14, x7, #28\n\t" + "add v2.2d, v2.2d, v9.2d\n\t" + "ror x13, x11, #18\n\t" + "shl v8.2d, v1.2d, #45\n\t" + "ror x15, x7, #34\n\t" + "sri v8.2d, v1.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v1.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v1.2d, #61\n\t" + "ror x13, x11, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x7, #39\n\t" + "ushr v8.2d, v1.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v2.2d, v2.2d, v9.2d\n\t" + "add x6, x6, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x7, x8\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x4, x5\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x11\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x6, x6, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x5\n\t" + "add v2.2d, v2.2d, v9.2d\n\t" + "add x6, x6, x19\n\t" + "eor x17, x17, x8\n\t" + "add x6, x6, x12\n\t" + "add x15, x15, x17\n\t" + "add x10, x10, x6\n\t" + "add x6, x6, x15\n\t" + /* Round 6 */ + "mov x18, v3.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x10, #14\n\t" + "ror x14, x6, #28\n\t" + "ror x13, x10, #18\n\t" + "ror x15, x6, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x10, #41\n\t" + "ror x15, x6, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x5, x5, x12\n\t" + "eor x17, x6, x7\n\t" + "eor x12, x11, x4\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x10\n\t" + "add x5, x5, x18\n\t" + "eor x12, x12, x4\n\t" + "add x5, x5, x19\n\t" + "eor x16, x16, x7\n\t" + "add x5, x5, x12\n\t" + "add x15, x15, x16\n\t" + "add x9, x9, x5\n\t" + "add x5, x5, x15\n\t" + /* Round 7 */ + "mov x18, v3.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v3.16b, v4.16b, #8\n\t" + "ror x12, x9, #14\n\t" + "ext v9.16b, v7.16b, v0.16b, #8\n\t" + "ror x14, x5, #28\n\t" + "add v3.2d, v3.2d, v9.2d\n\t" + "ror x13, x9, #18\n\t" + "shl v8.2d, v2.2d, #45\n\t" + "ror x15, x5, #34\n\t" + "sri v8.2d, v2.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v2.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v2.2d, #61\n\t" + "ror x13, x9, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x5, #39\n\t" + "ushr v8.2d, v2.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v3.2d, v3.2d, v9.2d\n\t" + "add x4, x4, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x5, x6\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x10, x11\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x9\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x4, x4, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x11\n\t" + "add v3.2d, v3.2d, v9.2d\n\t" + "add x4, x4, x19\n\t" + "eor x17, x17, x6\n\t" + "add x4, x4, x12\n\t" + "add x15, x15, x17\n\t" + "add x8, x8, x4\n\t" + "add x4, x4, x15\n\t" + /* Round 8 */ + "mov x18, v4.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x8, #14\n\t" + "ror x14, x4, #28\n\t" + "ror x13, x8, #18\n\t" + "ror x15, x4, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x8, #41\n\t" + "ror x15, x4, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x11, x11, x12\n\t" + "eor x17, x4, x5\n\t" + "eor x12, x9, x10\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x8\n\t" + "add x11, x11, x18\n\t" + "eor x12, x12, x10\n\t" + "add x11, x11, x19\n\t" + "eor x16, x16, x5\n\t" + "add x11, x11, x12\n\t" + "add x15, x15, x16\n\t" + "add x7, x7, x11\n\t" + "add x11, x11, x15\n\t" + /* Round 9 */ + "mov x18, v4.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v4.16b, v5.16b, #8\n\t" + "ror x12, x7, #14\n\t" + "ext v9.16b, v0.16b, v1.16b, #8\n\t" + "ror x14, x11, #28\n\t" + "add v4.2d, v4.2d, v9.2d\n\t" + "ror x13, x7, #18\n\t" + "shl v8.2d, v3.2d, #45\n\t" + "ror x15, x11, #34\n\t" + "sri v8.2d, v3.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v3.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v3.2d, #61\n\t" + "ror x13, x7, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x11, #39\n\t" + "ushr v8.2d, v3.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v4.2d, v4.2d, v9.2d\n\t" + "add x10, x10, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x11, x4\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x8, x9\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x7\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x10, x10, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x9\n\t" + "add v4.2d, v4.2d, v9.2d\n\t" + "add x10, x10, x19\n\t" + "eor x17, x17, x4\n\t" + "add x10, x10, x12\n\t" + "add x15, x15, x17\n\t" + "add x6, x6, x10\n\t" + "add x10, x10, x15\n\t" + /* Round 10 */ + "mov x18, v5.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x6, #14\n\t" + "ror x14, x10, #28\n\t" + "ror x13, x6, #18\n\t" + "ror x15, x10, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x6, #41\n\t" + "ror x15, x10, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x9, x9, x12\n\t" + "eor x17, x10, x11\n\t" + "eor x12, x7, x8\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x6\n\t" + "add x9, x9, x18\n\t" + "eor x12, x12, x8\n\t" + "add x9, x9, x19\n\t" + "eor x16, x16, x11\n\t" + "add x9, x9, x12\n\t" + "add x15, x15, x16\n\t" + "add x5, x5, x9\n\t" + "add x9, x9, x15\n\t" + /* Round 11 */ + "mov x18, v5.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v5.16b, v6.16b, #8\n\t" + "ror x12, x5, #14\n\t" + "ext v9.16b, v1.16b, v2.16b, #8\n\t" + "ror x14, x9, #28\n\t" + "add v5.2d, v5.2d, v9.2d\n\t" + "ror x13, x5, #18\n\t" + "shl v8.2d, v4.2d, #45\n\t" + "ror x15, x9, #34\n\t" + "sri v8.2d, v4.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v4.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v4.2d, #61\n\t" + "ror x13, x5, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x9, #39\n\t" + "ushr v8.2d, v4.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v5.2d, v5.2d, v9.2d\n\t" + "add x8, x8, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x9, x10\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x6, x7\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x5\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x8, x8, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x7\n\t" + "add v5.2d, v5.2d, v9.2d\n\t" + "add x8, x8, x19\n\t" + "eor x17, x17, x10\n\t" + "add x8, x8, x12\n\t" + "add x15, x15, x17\n\t" + "add x4, x4, x8\n\t" + "add x8, x8, x15\n\t" + /* Round 12 */ + "mov x18, v6.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x4, #14\n\t" + "ror x14, x8, #28\n\t" + "ror x13, x4, #18\n\t" + "ror x15, x8, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x4, #41\n\t" + "ror x15, x8, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x7, x7, x12\n\t" + "eor x17, x8, x9\n\t" + "eor x12, x5, x6\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x4\n\t" + "add x7, x7, x18\n\t" + "eor x12, x12, x6\n\t" + "add x7, x7, x19\n\t" + "eor x16, x16, x9\n\t" + "add x7, x7, x12\n\t" + "add x15, x15, x16\n\t" + "add x11, x11, x7\n\t" + "add x7, x7, x15\n\t" + /* Round 13 */ + "mov x18, v6.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v6.16b, v7.16b, #8\n\t" + "ror x12, x11, #14\n\t" + "ext v9.16b, v2.16b, v3.16b, #8\n\t" + "ror x14, x7, #28\n\t" + "add v6.2d, v6.2d, v9.2d\n\t" + "ror x13, x11, #18\n\t" + "shl v8.2d, v5.2d, #45\n\t" + "ror x15, x7, #34\n\t" + "sri v8.2d, v5.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v5.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v5.2d, #61\n\t" + "ror x13, x11, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x7, #39\n\t" + "ushr v8.2d, v5.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v6.2d, v6.2d, v9.2d\n\t" + "add x6, x6, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x7, x8\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x4, x5\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x11\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x6, x6, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x5\n\t" + "add v6.2d, v6.2d, v9.2d\n\t" + "add x6, x6, x19\n\t" + "eor x17, x17, x8\n\t" + "add x6, x6, x12\n\t" + "add x15, x15, x17\n\t" + "add x10, x10, x6\n\t" + "add x6, x6, x15\n\t" + /* Round 14 */ + "mov x18, v7.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x10, #14\n\t" + "ror x14, x6, #28\n\t" + "ror x13, x10, #18\n\t" + "ror x15, x6, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x10, #41\n\t" + "ror x15, x6, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x5, x5, x12\n\t" + "eor x17, x6, x7\n\t" + "eor x12, x11, x4\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x10\n\t" + "add x5, x5, x18\n\t" + "eor x12, x12, x4\n\t" + "add x5, x5, x19\n\t" + "eor x16, x16, x7\n\t" + "add x5, x5, x12\n\t" + "add x15, x15, x16\n\t" + "add x9, x9, x5\n\t" + "add x5, x5, x15\n\t" + /* Round 15 */ + "mov x18, v7.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ext v10.16b, v7.16b, v0.16b, #8\n\t" + "ror x12, x9, #14\n\t" + "ext v9.16b, v3.16b, v4.16b, #8\n\t" + "ror x14, x5, #28\n\t" + "add v7.2d, v7.2d, v9.2d\n\t" + "ror x13, x9, #18\n\t" + "shl v8.2d, v6.2d, #45\n\t" + "ror x15, x5, #34\n\t" + "sri v8.2d, v6.2d, #19\n\t" + "eor x12, x13, x12\n\t" + "shl v9.2d, v6.2d, #3\n\t" + "eor x14, x15, x14\n\t" + "sri v9.2d, v6.2d, #61\n\t" + "ror x13, x9, #41\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "ror x15, x5, #39\n\t" + "ushr v8.2d, v6.2d, #6\n\t" + "eor x12, x13, x12\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "eor x15, x15, x14\n\t" + "add v7.2d, v7.2d, v9.2d\n\t" + "add x4, x4, x12\n\t" + "shl v8.2d, v10.2d, #63\n\t" + "eor x16, x5, x6\n\t" + "sri v8.2d, v10.2d, #1\n\t" + "eor x12, x10, x11\n\t" + "tbl v9.16b, { v10.16b }, v11.16b\n\t" + "and x17, x16, x17\n\t" + "eor v9.16b, v9.16b, v8.16b\n\t" + "and x12, x12, x9\n\t" + "ushr v10.2d, v10.2d, #7\n\t" + "add x4, x4, x18\n\t" + "eor v9.16b, v9.16b, v10.16b\n\t" + "eor x12, x12, x11\n\t" + "add v7.2d, v7.2d, v9.2d\n\t" + "add x4, x4, x19\n\t" + "eor x17, x17, x6\n\t" + "add x4, x4, x12\n\t" + "add x15, x15, x17\n\t" + "add x8, x8, x4\n\t" + "add x4, x4, x15\n\t" + "subs x28, x28, #1\n\t" + "bne L_sha512_len_neon_start_%=\n\t" + /* Round 0 */ + "mov x18, v0.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x8, #14\n\t" + "ror x14, x4, #28\n\t" + "ror x13, x8, #18\n\t" + "ror x15, x4, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x8, #41\n\t" + "ror x15, x4, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x11, x11, x12\n\t" + "eor x17, x4, x5\n\t" + "eor x12, x9, x10\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x8\n\t" + "add x11, x11, x18\n\t" + "eor x12, x12, x10\n\t" + "add x11, x11, x19\n\t" + "eor x16, x16, x5\n\t" + "add x11, x11, x12\n\t" + "add x15, x15, x16\n\t" + "add x7, x7, x11\n\t" + "add x11, x11, x15\n\t" + /* Round 1 */ + "mov x18, v0.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x7, #14\n\t" + "ror x14, x11, #28\n\t" + "ror x13, x7, #18\n\t" + "ror x15, x11, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x7, #41\n\t" + "ror x15, x11, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x10, x10, x12\n\t" + "eor x16, x11, x4\n\t" + "eor x12, x8, x9\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x7\n\t" + "add x10, x10, x18\n\t" + "eor x12, x12, x9\n\t" + "add x10, x10, x19\n\t" + "eor x17, x17, x4\n\t" + "add x10, x10, x12\n\t" + "add x15, x15, x17\n\t" + "add x6, x6, x10\n\t" + "add x10, x10, x15\n\t" + /* Round 2 */ + "mov x18, v1.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x6, #14\n\t" + "ror x14, x10, #28\n\t" + "ror x13, x6, #18\n\t" + "ror x15, x10, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x6, #41\n\t" + "ror x15, x10, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x9, x9, x12\n\t" + "eor x17, x10, x11\n\t" + "eor x12, x7, x8\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x6\n\t" + "add x9, x9, x18\n\t" + "eor x12, x12, x8\n\t" + "add x9, x9, x19\n\t" + "eor x16, x16, x11\n\t" + "add x9, x9, x12\n\t" + "add x15, x15, x16\n\t" + "add x5, x5, x9\n\t" + "add x9, x9, x15\n\t" + /* Round 3 */ + "mov x18, v1.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x5, #14\n\t" + "ror x14, x9, #28\n\t" + "ror x13, x5, #18\n\t" + "ror x15, x9, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x5, #41\n\t" + "ror x15, x9, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x8, x8, x12\n\t" + "eor x16, x9, x10\n\t" + "eor x12, x6, x7\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x5\n\t" + "add x8, x8, x18\n\t" + "eor x12, x12, x7\n\t" + "add x8, x8, x19\n\t" + "eor x17, x17, x10\n\t" + "add x8, x8, x12\n\t" + "add x15, x15, x17\n\t" + "add x4, x4, x8\n\t" + "add x8, x8, x15\n\t" + /* Round 4 */ + "mov x18, v2.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x4, #14\n\t" + "ror x14, x8, #28\n\t" + "ror x13, x4, #18\n\t" + "ror x15, x8, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x4, #41\n\t" + "ror x15, x8, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x7, x7, x12\n\t" + "eor x17, x8, x9\n\t" + "eor x12, x5, x6\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x4\n\t" + "add x7, x7, x18\n\t" + "eor x12, x12, x6\n\t" + "add x7, x7, x19\n\t" + "eor x16, x16, x9\n\t" + "add x7, x7, x12\n\t" + "add x15, x15, x16\n\t" + "add x11, x11, x7\n\t" + "add x7, x7, x15\n\t" + /* Round 5 */ + "mov x18, v2.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x11, #14\n\t" + "ror x14, x7, #28\n\t" + "ror x13, x11, #18\n\t" + "ror x15, x7, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x11, #41\n\t" + "ror x15, x7, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x6, x6, x12\n\t" + "eor x16, x7, x8\n\t" + "eor x12, x4, x5\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x11\n\t" + "add x6, x6, x18\n\t" + "eor x12, x12, x5\n\t" + "add x6, x6, x19\n\t" + "eor x17, x17, x8\n\t" + "add x6, x6, x12\n\t" + "add x15, x15, x17\n\t" + "add x10, x10, x6\n\t" + "add x6, x6, x15\n\t" + /* Round 6 */ + "mov x18, v3.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x10, #14\n\t" + "ror x14, x6, #28\n\t" + "ror x13, x10, #18\n\t" + "ror x15, x6, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x10, #41\n\t" + "ror x15, x6, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x5, x5, x12\n\t" + "eor x17, x6, x7\n\t" + "eor x12, x11, x4\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x10\n\t" + "add x5, x5, x18\n\t" + "eor x12, x12, x4\n\t" + "add x5, x5, x19\n\t" + "eor x16, x16, x7\n\t" + "add x5, x5, x12\n\t" + "add x15, x15, x16\n\t" + "add x9, x9, x5\n\t" + "add x5, x5, x15\n\t" + /* Round 7 */ + "mov x18, v3.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x9, #14\n\t" + "ror x14, x5, #28\n\t" + "ror x13, x9, #18\n\t" + "ror x15, x5, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x9, #41\n\t" + "ror x15, x5, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x4, x4, x12\n\t" + "eor x16, x5, x6\n\t" + "eor x12, x10, x11\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x9\n\t" + "add x4, x4, x18\n\t" + "eor x12, x12, x11\n\t" + "add x4, x4, x19\n\t" + "eor x17, x17, x6\n\t" + "add x4, x4, x12\n\t" + "add x15, x15, x17\n\t" + "add x8, x8, x4\n\t" + "add x4, x4, x15\n\t" + /* Round 8 */ + "mov x18, v4.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x8, #14\n\t" + "ror x14, x4, #28\n\t" + "ror x13, x8, #18\n\t" + "ror x15, x4, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x8, #41\n\t" + "ror x15, x4, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x11, x11, x12\n\t" + "eor x17, x4, x5\n\t" + "eor x12, x9, x10\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x8\n\t" + "add x11, x11, x18\n\t" + "eor x12, x12, x10\n\t" + "add x11, x11, x19\n\t" + "eor x16, x16, x5\n\t" + "add x11, x11, x12\n\t" + "add x15, x15, x16\n\t" + "add x7, x7, x11\n\t" + "add x11, x11, x15\n\t" + /* Round 9 */ + "mov x18, v4.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x7, #14\n\t" + "ror x14, x11, #28\n\t" + "ror x13, x7, #18\n\t" + "ror x15, x11, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x7, #41\n\t" + "ror x15, x11, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x10, x10, x12\n\t" + "eor x16, x11, x4\n\t" + "eor x12, x8, x9\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x7\n\t" + "add x10, x10, x18\n\t" + "eor x12, x12, x9\n\t" + "add x10, x10, x19\n\t" + "eor x17, x17, x4\n\t" + "add x10, x10, x12\n\t" + "add x15, x15, x17\n\t" + "add x6, x6, x10\n\t" + "add x10, x10, x15\n\t" + /* Round 10 */ + "mov x18, v5.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x6, #14\n\t" + "ror x14, x10, #28\n\t" + "ror x13, x6, #18\n\t" + "ror x15, x10, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x6, #41\n\t" + "ror x15, x10, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x9, x9, x12\n\t" + "eor x17, x10, x11\n\t" + "eor x12, x7, x8\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x6\n\t" + "add x9, x9, x18\n\t" + "eor x12, x12, x8\n\t" + "add x9, x9, x19\n\t" + "eor x16, x16, x11\n\t" + "add x9, x9, x12\n\t" + "add x15, x15, x16\n\t" + "add x5, x5, x9\n\t" + "add x9, x9, x15\n\t" + /* Round 11 */ + "mov x18, v5.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x5, #14\n\t" + "ror x14, x9, #28\n\t" + "ror x13, x5, #18\n\t" + "ror x15, x9, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x5, #41\n\t" + "ror x15, x9, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x8, x8, x12\n\t" + "eor x16, x9, x10\n\t" + "eor x12, x6, x7\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x5\n\t" + "add x8, x8, x18\n\t" + "eor x12, x12, x7\n\t" + "add x8, x8, x19\n\t" + "eor x17, x17, x10\n\t" + "add x8, x8, x12\n\t" + "add x15, x15, x17\n\t" + "add x4, x4, x8\n\t" + "add x8, x8, x15\n\t" + /* Round 12 */ + "mov x18, v6.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x4, #14\n\t" + "ror x14, x8, #28\n\t" + "ror x13, x4, #18\n\t" + "ror x15, x8, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x4, #41\n\t" + "ror x15, x8, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x7, x7, x12\n\t" + "eor x17, x8, x9\n\t" + "eor x12, x5, x6\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x4\n\t" + "add x7, x7, x18\n\t" + "eor x12, x12, x6\n\t" + "add x7, x7, x19\n\t" + "eor x16, x16, x9\n\t" + "add x7, x7, x12\n\t" + "add x15, x15, x16\n\t" + "add x11, x11, x7\n\t" + "add x7, x7, x15\n\t" + /* Round 13 */ + "mov x18, v6.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x11, #14\n\t" + "ror x14, x7, #28\n\t" + "ror x13, x11, #18\n\t" + "ror x15, x7, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x11, #41\n\t" + "ror x15, x7, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x6, x6, x12\n\t" + "eor x16, x7, x8\n\t" + "eor x12, x4, x5\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x11\n\t" + "add x6, x6, x18\n\t" + "eor x12, x12, x5\n\t" + "add x6, x6, x19\n\t" + "eor x17, x17, x8\n\t" + "add x6, x6, x12\n\t" + "add x15, x15, x17\n\t" + "add x10, x10, x6\n\t" + "add x6, x6, x15\n\t" + /* Round 14 */ + "mov x18, v7.d[0]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x10, #14\n\t" + "ror x14, x6, #28\n\t" + "ror x13, x10, #18\n\t" + "ror x15, x6, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x10, #41\n\t" + "ror x15, x6, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x5, x5, x12\n\t" + "eor x17, x6, x7\n\t" + "eor x12, x11, x4\n\t" + "and x16, x17, x16\n\t" + "and x12, x12, x10\n\t" + "add x5, x5, x18\n\t" + "eor x12, x12, x4\n\t" + "add x5, x5, x19\n\t" + "eor x16, x16, x7\n\t" + "add x5, x5, x12\n\t" + "add x15, x15, x16\n\t" + "add x9, x9, x5\n\t" + "add x5, x5, x15\n\t" + /* Round 15 */ + "mov x18, v7.d[1]\n\t" + "ldr x19, [x3], #8\n\t" + "ror x12, x9, #14\n\t" + "ror x14, x5, #28\n\t" + "ror x13, x9, #18\n\t" + "ror x15, x5, #34\n\t" + "eor x12, x13, x12\n\t" + "eor x14, x15, x14\n\t" + "ror x13, x9, #41\n\t" + "ror x15, x5, #39\n\t" + "eor x12, x13, x12\n\t" + "eor x15, x15, x14\n\t" + "add x4, x4, x12\n\t" + "eor x16, x5, x6\n\t" + "eor x12, x10, x11\n\t" + "and x17, x16, x17\n\t" + "and x12, x12, x9\n\t" + "add x4, x4, x18\n\t" + "eor x12, x12, x11\n\t" + "add x4, x4, x19\n\t" + "eor x17, x17, x6\n\t" + "add x4, x4, x12\n\t" + "add x15, x15, x17\n\t" + "add x8, x8, x4\n\t" + "add x4, x4, x15\n\t" + "add x11, x11, x27\n\t" + "add x10, x10, x26\n\t" + "add x9, x9, x25\n\t" + "add x8, x8, x24\n\t" + "add x7, x7, x23\n\t" + "add x6, x6, x22\n\t" + "add x5, x5, x21\n\t" + "add x4, x4, x20\n\t" + "adr x3, %[L_SHA512_transform_neon_len_k]\n\t" + "subs %[len], %[len], #0x80\n\t" + "bne L_sha512_len_neon_begin_%=\n\t" + "stp x4, x5, [%[sha512]]\n\t" + "stp x6, x7, [%[sha512], #16]\n\t" + "stp x8, x9, [%[sha512], #32]\n\t" + "stp x10, x11, [%[sha512], #48]\n\t" + "eor x0, x0, x0\n\t" + "ldp x29, x30, [sp], #16\n\t" + : [sha512] "+r" (sha512), [data] "+r" (data), [len] "+r" (len) + : [L_SHA512_transform_neon_len_k] "S" (L_SHA512_transform_neon_len_k), [L_SHA512_transform_neon_len_ror8] "S" (L_SHA512_transform_neon_len_ror8) + : "memory", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10" + ); + return (uint32_t)(size_t)sha512; +} + +#endif /* __aarch64__ */ diff --git a/wolfcrypt/src/port/arm/armv8-sha512.c b/wolfcrypt/src/port/arm/armv8-sha512.c new file mode 100644 index 000000000..4ae10dc66 --- /dev/null +++ b/wolfcrypt/src/port/arm/armv8-sha512.c @@ -0,0 +1,740 @@ +/* sha512.c + * + * Copyright (C) 2006-2019 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + + +#ifdef HAVE_CONFIG_H + #include +#endif + +#include + +#if defined(WOLFSSL_SHA512) || defined(WOLFSSL_SHA384) + +#include +#include +#include +#include + +#include + +#ifdef NO_INLINE + #include +#else + #define WOLFSSL_MISC_INCLUDED + #include +#endif + +#ifdef WOLFSSL_SHA512 + +static int InitSha512(wc_Sha512* sha512) +{ + if (sha512 == NULL) + return BAD_FUNC_ARG; + + sha512->digest[0] = W64LIT(0x6a09e667f3bcc908); + sha512->digest[1] = W64LIT(0xbb67ae8584caa73b); + sha512->digest[2] = W64LIT(0x3c6ef372fe94f82b); + sha512->digest[3] = W64LIT(0xa54ff53a5f1d36f1); + sha512->digest[4] = W64LIT(0x510e527fade682d1); + sha512->digest[5] = W64LIT(0x9b05688c2b3e6c1f); + sha512->digest[6] = W64LIT(0x1f83d9abfb41bd6b); + sha512->digest[7] = W64LIT(0x5be0cd19137e2179); + + sha512->buffLen = 0; + sha512->loLen = 0; + sha512->hiLen = 0; + +#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \ + !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH) + + sha512->ctx.sha_type = SHA2_512; + /* always start firstblock = 1 when using hw engine */ + sha512->ctx.isfirstblock = 1; + if(sha512->ctx.mode == ESP32_SHA_HW) { + /* release hw */ + esp_sha_hw_unlock(); + } + /* always set mode as INIT + * whether using HW or SW is detemined at first call of update() + */ + sha512->ctx.mode = ESP32_SHA_INIT; +#endif + return 0; +} + +#endif /* WOLFSSL_SHA512 */ + +#ifdef WOLFSSL_SHA512 + +int wc_InitSha512_ex(wc_Sha512* sha512, void* heap, int devId) +{ + int ret = 0; + + if (sha512 == NULL) + return BAD_FUNC_ARG; + + sha512->heap = heap; + + ret = InitSha512(sha512); + if (ret != 0) + return ret; + +#if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2) + Sha512_SetTransform(); +#endif + +#ifdef WOLFSSL_SMALL_STACK_CACHE + sha512->W = NULL; +#endif + +#if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA512) + ret = wolfAsync_DevCtxInit(&sha512->asyncDev, + WOLFSSL_ASYNC_MARKER_SHA512, sha512->heap, devId); +#else + (void)devId; +#endif /* WOLFSSL_ASYNC_CRYPT */ + + return ret; +} + +#endif /* WOLFSSL_SHA512 */ + +#ifndef WOLFSSL_ARMASM +static const word64 K512[80] = { + W64LIT(0x428a2f98d728ae22), W64LIT(0x7137449123ef65cd), + W64LIT(0xb5c0fbcfec4d3b2f), W64LIT(0xe9b5dba58189dbbc), + W64LIT(0x3956c25bf348b538), W64LIT(0x59f111f1b605d019), + W64LIT(0x923f82a4af194f9b), W64LIT(0xab1c5ed5da6d8118), + W64LIT(0xd807aa98a3030242), W64LIT(0x12835b0145706fbe), + W64LIT(0x243185be4ee4b28c), W64LIT(0x550c7dc3d5ffb4e2), + W64LIT(0x72be5d74f27b896f), W64LIT(0x80deb1fe3b1696b1), + W64LIT(0x9bdc06a725c71235), W64LIT(0xc19bf174cf692694), + W64LIT(0xe49b69c19ef14ad2), W64LIT(0xefbe4786384f25e3), + W64LIT(0x0fc19dc68b8cd5b5), W64LIT(0x240ca1cc77ac9c65), + W64LIT(0x2de92c6f592b0275), W64LIT(0x4a7484aa6ea6e483), + W64LIT(0x5cb0a9dcbd41fbd4), W64LIT(0x76f988da831153b5), + W64LIT(0x983e5152ee66dfab), W64LIT(0xa831c66d2db43210), + W64LIT(0xb00327c898fb213f), W64LIT(0xbf597fc7beef0ee4), + W64LIT(0xc6e00bf33da88fc2), W64LIT(0xd5a79147930aa725), + W64LIT(0x06ca6351e003826f), W64LIT(0x142929670a0e6e70), + W64LIT(0x27b70a8546d22ffc), W64LIT(0x2e1b21385c26c926), + W64LIT(0x4d2c6dfc5ac42aed), W64LIT(0x53380d139d95b3df), + W64LIT(0x650a73548baf63de), W64LIT(0x766a0abb3c77b2a8), + W64LIT(0x81c2c92e47edaee6), W64LIT(0x92722c851482353b), + W64LIT(0xa2bfe8a14cf10364), W64LIT(0xa81a664bbc423001), + W64LIT(0xc24b8b70d0f89791), W64LIT(0xc76c51a30654be30), + W64LIT(0xd192e819d6ef5218), W64LIT(0xd69906245565a910), + W64LIT(0xf40e35855771202a), W64LIT(0x106aa07032bbd1b8), + W64LIT(0x19a4c116b8d2d0c8), W64LIT(0x1e376c085141ab53), + W64LIT(0x2748774cdf8eeb99), W64LIT(0x34b0bcb5e19b48a8), + W64LIT(0x391c0cb3c5c95a63), W64LIT(0x4ed8aa4ae3418acb), + W64LIT(0x5b9cca4f7763e373), W64LIT(0x682e6ff3d6b2b8a3), + W64LIT(0x748f82ee5defb2fc), W64LIT(0x78a5636f43172f60), + W64LIT(0x84c87814a1f0ab72), W64LIT(0x8cc702081a6439ec), + W64LIT(0x90befffa23631e28), W64LIT(0xa4506cebde82bde9), + W64LIT(0xbef9a3f7b2c67915), W64LIT(0xc67178f2e372532b), + W64LIT(0xca273eceea26619c), W64LIT(0xd186b8c721c0c207), + W64LIT(0xeada7dd6cde0eb1e), W64LIT(0xf57d4f7fee6ed178), + W64LIT(0x06f067aa72176fba), W64LIT(0x0a637dc5a2c898a6), + W64LIT(0x113f9804bef90dae), W64LIT(0x1b710b35131c471b), + W64LIT(0x28db77f523047d84), W64LIT(0x32caab7b40c72493), + W64LIT(0x3c9ebe0a15c9bebc), W64LIT(0x431d67c49c100d4c), + W64LIT(0x4cc5d4becb3e42b6), W64LIT(0x597f299cfc657e2a), + W64LIT(0x5fcb6fab3ad6faec), W64LIT(0x6c44198c4a475817) +}; + +#ifdef LITTLE_ENDIAN_ORDER +#define blk0(i) (W[i] = ByteReverseWord64(DATA[i])) +#else +#define blk0(i) (W[i] = DATA[i]) +#endif + +#define blk2(i) ( \ + W[ i ] += \ + s1(W[(i- 2) & 15])+ \ + W[(i- 7) & 15] + \ + s0(W[(i-15) & 15]) \ + ) + +#define Ch(x,y,z) (z ^ ((z ^ y) & x)) +#define Maj(x,y,z) (y ^ ((y ^ z) & (x ^ y))) + +#define a(i) T[(0-i) & 7] +#define b(i) T[(1-i) & 7] +#define c(i) T[(2-i) & 7] +#define d(i) T[(3-i) & 7] +#define e(i) T[(4-i) & 7] +#define f(i) T[(5-i) & 7] +#define g(i) T[(6-i) & 7] +#define h(i) T[(7-i) & 7] + +#define S0(x) (rotrFixed64(x,28) ^ rotrFixed64(x,34) ^ rotrFixed64(x,39)) +#define S1(x) (rotrFixed64(x,14) ^ rotrFixed64(x,18) ^ rotrFixed64(x,41)) +#define s0(x) (rotrFixed64(x, 1) ^ rotrFixed64(x, 8) ^ (x>>7)) +#define s1(x) (rotrFixed64(x,19) ^ rotrFixed64(x,61) ^ (x>>6)) + +#define R0(i) \ + h(i) += S1(e(i)) + Ch(e(i),f(i),g(i)) + K[i+j] + blk0(i); \ + d(i) += h(i); \ + h(i) += S0(a(i)) + Maj(a(i),b(i),c(i)) +#define R(i) \ + h(i) += S1(e(i)) + Ch(e(i),f(i),g(i)) + K[i+j] + blk2(i); \ + d(i) += h(i); \ + h(i) += S0(a(i)) + Maj(a(i),b(i),c(i)) + +#define DATA sha512->buffer +static int Transform_Sha512(wc_Sha512* sha512) +{ + const word64* K = K512; + word32 j; + word64 T[8]; + word64 W[16]; + + /* Copy digest to working vars */ + T[0] = sha512->digest[0]; + T[1] = sha512->digest[1]; + T[2] = sha512->digest[2]; + T[3] = sha512->digest[3]; + T[4] = sha512->digest[4]; + T[5] = sha512->digest[5]; + T[6] = sha512->digest[6]; + T[7] = sha512->digest[7]; + + /* 80 operations, partially loop unrolled */ + j = 0; + R0( 0); R0( 1); R0( 2); R0( 3); + R0( 4); R0( 5); R0( 6); R0( 7); + R0( 8); R0( 9); R0(10); R0(11); + R0(12); R0(13); R0(14); R0(15); + for (j = 16; j < 80; j += 16) { + R( 0); R( 1); R( 2); R( 3); + R( 4); R( 5); R( 6); R( 7); + R( 8); R( 9); R(10); R(11); + R(12); R(13); R(14); R(15); + } + + /* Add the working vars back into digest */ + sha512->digest[0] += T[0]; + sha512->digest[1] += T[1]; + sha512->digest[2] += T[2]; + sha512->digest[3] += T[3]; + sha512->digest[4] += T[4]; + sha512->digest[5] += T[5]; + sha512->digest[6] += T[6]; + sha512->digest[7] += T[7]; + + return 0; +} +#undef DATA + +#define DATA ((word64*)data) +static int Transform_Sha512_Len(wc_Sha512* sha512, const byte* data, word32 len) +{ + const word64* K = K512; + word32 j; + word64 T[8]; + word64 TO[8]; + word64 W[16]; + + /* Copy digest to working vars */ + T[0] = sha512->digest[0]; + T[1] = sha512->digest[1]; + T[2] = sha512->digest[2]; + T[3] = sha512->digest[3]; + T[4] = sha512->digest[4]; + T[5] = sha512->digest[5]; + T[6] = sha512->digest[6]; + T[7] = sha512->digest[7]; + + do { + TO[0] = T[0]; + TO[1] = T[1]; + TO[2] = T[2]; + TO[3] = T[3]; + TO[4] = T[4]; + TO[5] = T[5]; + TO[6] = T[6]; + TO[7] = T[7]; + + /* 80 operations, partially loop unrolled */ + j = 0; + R0( 0); R0( 1); R0( 2); R0( 3); + R0( 4); R0( 5); R0( 6); R0( 7); + R0( 8); R0( 9); R0(10); R0(11); + R0(12); R0(13); R0(14); R0(15); + for (j = 16; j < 80; j += 16) { + R( 0); R( 1); R( 2); R( 3); + R( 4); R( 5); R( 6); R( 7); + R( 8); R( 9); R(10); R(11); + R(12); R(13); R(14); R(15); + } + + T[0] += TO[0]; + T[1] += TO[1]; + T[2] += TO[2]; + T[3] += TO[3]; + T[4] += TO[4]; + T[5] += TO[5]; + T[6] += TO[6]; + T[7] += TO[7]; + + data += 128; + len -= 128; + } + while (len > 0); + + /* Add the working vars back into digest */ + sha512->digest[0] = T[0]; + sha512->digest[1] = T[1]; + sha512->digest[2] = T[2]; + sha512->digest[3] = T[3]; + sha512->digest[4] = T[4]; + sha512->digest[5] = T[5]; + sha512->digest[6] = T[6]; + sha512->digest[7] = T[7]; + + return 0; +} +#undef DATA +#endif + + +static WC_INLINE void AddLength(wc_Sha512* sha512, word32 len) +{ + word64 tmp = sha512->loLen; + if ( (sha512->loLen += len) < tmp) + sha512->hiLen++; /* carry low to high */ +} + +static WC_INLINE int Sha512Update(wc_Sha512* sha512, const byte* data, word32 len) +{ + int ret = 0; + /* do block size increments */ + byte* local = (byte*)sha512->buffer; + word32 blocksLen; + + /* check that internal buffLen is valid */ + if (sha512->buffLen >= WC_SHA512_BLOCK_SIZE) + return BUFFER_E; + + AddLength(sha512, len); + + if (sha512->buffLen > 0) { + word32 add = min(len, WC_SHA512_BLOCK_SIZE - sha512->buffLen); + if (add > 0) { + XMEMCPY(&local[sha512->buffLen], data, add); + + sha512->buffLen += add; + data += add; + len -= add; + } + + if (sha512->buffLen == WC_SHA512_BLOCK_SIZE) { +#ifndef WOLFSSL_ARMASM + ret = Transform_Sha512(sha512); +#else + ret = Transform_Sha512_Len(sha512, (const byte*)sha512->buffer, + WC_SHA512_BLOCK_SIZE); +#endif + if (ret == 0) + sha512->buffLen = 0; + else + len = 0; + } + } + + blocksLen = len & ~(WC_SHA512_BLOCK_SIZE-1); + if (blocksLen > 0) { + /* Byte reversal performed in function if required. */ + Transform_Sha512_Len(sha512, data, blocksLen); + data += blocksLen; + len -= blocksLen; + } + + if (len > 0) { + XMEMCPY(local, data, len); + sha512->buffLen = len; + } + + return ret; +} + +#ifdef WOLFSSL_SHA512 + +int wc_Sha512Update(wc_Sha512* sha512, const byte* data, word32 len) +{ + if (sha512 == NULL || (data == NULL && len > 0)) { + return BAD_FUNC_ARG; + } + + return Sha512Update(sha512, data, len); +} + +#endif /* WOLFSSL_SHA512 */ + +static WC_INLINE int Sha512Final(wc_Sha512* sha512) +{ + byte* local = (byte*)sha512->buffer; + int ret; + + if (sha512 == NULL) { + return BAD_FUNC_ARG; + } + + local[sha512->buffLen++] = 0x80; /* add 1 */ + + /* pad with zeros */ + if (sha512->buffLen > WC_SHA512_PAD_SIZE) { + XMEMSET(&local[sha512->buffLen], 0, WC_SHA512_BLOCK_SIZE - + sha512->buffLen); + sha512->buffLen += WC_SHA512_BLOCK_SIZE - sha512->buffLen; +#ifndef WOLFSSL_ARMASM + ret = Transform_Sha512(sha512); +#else + ret = Transform_Sha512_Len(sha512, (const byte*)sha512->buffer, + WC_SHA512_BLOCK_SIZE); +#endif + if (ret != 0) + return ret; + + sha512->buffLen = 0; + } + XMEMSET(&local[sha512->buffLen], 0, WC_SHA512_PAD_SIZE - sha512->buffLen); + + /* put lengths in bits */ + sha512->hiLen = (sha512->loLen >> (8 * sizeof(sha512->loLen) - 3)) + + (sha512->hiLen << 3); + sha512->loLen = sha512->loLen << 3; + + /* store lengths */ + /* ! length ordering dependent on digest endian type ! */ + + sha512->buffer[WC_SHA512_BLOCK_SIZE / sizeof(word64) - 2] = sha512->hiLen; + sha512->buffer[WC_SHA512_BLOCK_SIZE / sizeof(word64) - 1] = sha512->loLen; + + ByteReverseWords64( + &(sha512->buffer[WC_SHA512_BLOCK_SIZE / sizeof(word64) - 2]), + &(sha512->buffer[WC_SHA512_BLOCK_SIZE / sizeof(word64) - 2]), + WC_SHA512_BLOCK_SIZE - WC_SHA512_PAD_SIZE); +#ifndef WOLFSSL_ARMASM + ret = Transform_Sha512(sha512); +#else + ret = Transform_Sha512_Len(sha512, (const byte*)sha512->buffer, + WC_SHA512_BLOCK_SIZE); +#endif + if (ret != 0) + return ret; + +#ifdef LITTLE_ENDIAN_ORDER + ByteReverseWords64(sha512->digest, sha512->digest, WC_SHA512_DIGEST_SIZE); +#endif + + return 0; +} + +#ifdef WOLFSSL_SHA512 + +int wc_Sha512FinalRaw(wc_Sha512* sha512, byte* hash) +{ +#ifdef LITTLE_ENDIAN_ORDER + word64 digest[WC_SHA512_DIGEST_SIZE / sizeof(word64)]; +#endif + + if (sha512 == NULL || hash == NULL) { + return BAD_FUNC_ARG; + } + +#ifdef LITTLE_ENDIAN_ORDER + ByteReverseWords64((word64*)digest, (word64*)sha512->digest, + WC_SHA512_DIGEST_SIZE); + XMEMCPY(hash, digest, WC_SHA512_DIGEST_SIZE); +#else + XMEMCPY(hash, sha512->digest, WC_SHA512_DIGEST_SIZE); +#endif + + return 0; +} + +int wc_Sha512Final(wc_Sha512* sha512, byte* hash) +{ + int ret; + + if (sha512 == NULL || hash == NULL) { + return BAD_FUNC_ARG; + } + + ret = Sha512Final(sha512); + if (ret != 0) + return ret; + + XMEMCPY(hash, sha512->digest, WC_SHA512_DIGEST_SIZE); + + return InitSha512(sha512); /* reset state */ +} + +int wc_InitSha512(wc_Sha512* sha512) +{ + return wc_InitSha512_ex(sha512, NULL, INVALID_DEVID); +} + +void wc_Sha512Free(wc_Sha512* sha512) +{ + if (sha512 == NULL) + return; + +#ifdef WOLFSSL_SMALL_STACK_CACHE + if (sha512->W != NULL) { + XFREE(sha512->W, NULL, DYNAMIC_TYPE_TMP_BUFFER); + sha512->W = NULL; + } +#endif +} + +#endif /* WOLFSSL_SHA512 */ + +/* -------------------------------------------------------------------------- */ +/* SHA384 */ +/* -------------------------------------------------------------------------- */ +#ifdef WOLFSSL_SHA384 + +static int InitSha384(wc_Sha384* sha384) +{ + if (sha384 == NULL) { + return BAD_FUNC_ARG; + } + + sha384->digest[0] = W64LIT(0xcbbb9d5dc1059ed8); + sha384->digest[1] = W64LIT(0x629a292a367cd507); + sha384->digest[2] = W64LIT(0x9159015a3070dd17); + sha384->digest[3] = W64LIT(0x152fecd8f70e5939); + sha384->digest[4] = W64LIT(0x67332667ffc00b31); + sha384->digest[5] = W64LIT(0x8eb44a8768581511); + sha384->digest[6] = W64LIT(0xdb0c2e0d64f98fa7); + sha384->digest[7] = W64LIT(0x47b5481dbefa4fa4); + + sha384->buffLen = 0; + sha384->loLen = 0; + sha384->hiLen = 0; + + return 0; +} + +int wc_Sha384Update(wc_Sha384* sha384, const byte* data, word32 len) +{ + if (sha384 == NULL || (data == NULL && len > 0)) { + return BAD_FUNC_ARG; + } + + return Sha512Update((wc_Sha512*)sha384, data, len); +} + + +int wc_Sha384FinalRaw(wc_Sha384* sha384, byte* hash) +{ +#ifdef LITTLE_ENDIAN_ORDER + word64 digest[WC_SHA384_DIGEST_SIZE / sizeof(word64)]; +#endif + + if (sha384 == NULL || hash == NULL) { + return BAD_FUNC_ARG; + } + +#ifdef LITTLE_ENDIAN_ORDER + ByteReverseWords64((word64*)digest, (word64*)sha384->digest, + WC_SHA384_DIGEST_SIZE); + XMEMCPY(hash, digest, WC_SHA384_DIGEST_SIZE); +#else + XMEMCPY(hash, sha384->digest, WC_SHA384_DIGEST_SIZE); +#endif + + return 0; +} + +int wc_Sha384Final(wc_Sha384* sha384, byte* hash) +{ + int ret; + + if (sha384 == NULL || hash == NULL) { + return BAD_FUNC_ARG; + } + + ret = Sha512Final((wc_Sha512*)sha384); + if (ret != 0) + return ret; + + XMEMCPY(hash, sha384->digest, WC_SHA384_DIGEST_SIZE); + + return InitSha384(sha384); /* reset state */ +} + +int wc_InitSha384_ex(wc_Sha384* sha384, void* heap, int devId) +{ + int ret; + + if (sha384 == NULL) { + return BAD_FUNC_ARG; + } + + sha384->heap = heap; + ret = InitSha384(sha384); + if (ret != 0) + return ret; + +#ifdef WOLFSSL_SMALL_STACK_CACHE + sha384->W = NULL; +#endif + + (void)devId; + + return ret; +} + +int wc_InitSha384(wc_Sha384* sha384) +{ + return wc_InitSha384_ex(sha384, NULL, INVALID_DEVID); +} + +void wc_Sha384Free(wc_Sha384* sha384) +{ + if (sha384 == NULL) + return; + +#ifdef WOLFSSL_SMALL_STACK_CACHE + if (sha384->W != NULL) { + XFREE(sha384->W, NULL, DYNAMIC_TYPE_TMP_BUFFER); + sha384->W = NULL; + } +#endif +} + +#endif /* WOLFSSL_SHA384 */ + +#ifdef WOLFSSL_SHA512 + +int wc_Sha512GetHash(wc_Sha512* sha512, byte* hash) +{ + int ret; + wc_Sha512 tmpSha512; + + if (sha512 == NULL || hash == NULL) + return BAD_FUNC_ARG; + + ret = wc_Sha512Copy(sha512, &tmpSha512); + if (ret == 0) { + ret = wc_Sha512Final(&tmpSha512, hash); + wc_Sha512Free(&tmpSha512); + } + return ret; +} + +int wc_Sha512Copy(wc_Sha512* src, wc_Sha512* dst) +{ + int ret = 0; + + if (src == NULL || dst == NULL) + return BAD_FUNC_ARG; + + XMEMCPY(dst, src, sizeof(wc_Sha512)); +#ifdef WOLFSSL_SMALL_STACK_CACHE + dst->W = NULL; +#endif + +#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB) + dst->flags |= WC_HASH_FLAG_ISCOPY; +#endif + + return ret; +} + +#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB) +int wc_Sha512SetFlags(wc_Sha512* sha512, word32 flags) +{ + if (sha512) { + sha512->flags = flags; + } + return 0; +} +int wc_Sha512GetFlags(wc_Sha512* sha512, word32* flags) +{ + if (sha512 && flags) { + *flags = sha512->flags; + } + return 0; +} +#endif + +#endif /* WOLFSSL_SHA512 */ + +#ifdef WOLFSSL_SHA384 + +int wc_Sha384GetHash(wc_Sha384* sha384, byte* hash) +{ + int ret; + wc_Sha384 tmpSha384; + + if (sha384 == NULL || hash == NULL) + return BAD_FUNC_ARG; + ret = wc_Sha384Copy(sha384, &tmpSha384); + if (ret == 0) { + ret = wc_Sha384Final(&tmpSha384, hash); + wc_Sha384Free(&tmpSha384); + } + return ret; +} +int wc_Sha384Copy(wc_Sha384* src, wc_Sha384* dst) +{ + int ret = 0; + + if (src == NULL || dst == NULL) + return BAD_FUNC_ARG; + + XMEMCPY(dst, src, sizeof(wc_Sha384)); +#ifdef WOLFSSL_SMALL_STACK_CACHE + dst->W = NULL; +#endif + +#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB) + dst->flags |= WC_HASH_FLAG_ISCOPY; +#endif + + return ret; +} + +#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB) +int wc_Sha384SetFlags(wc_Sha384* sha384, word32 flags) +{ + if (sha384) { + sha384->flags = flags; + } + return 0; +} +int wc_Sha384GetFlags(wc_Sha384* sha384, word32* flags) +{ + if (sha384 && flags) { + *flags = sha384->flags; + } + return 0; +} +#endif + +#endif /* WOLFSSL_SHA384 */ + +#endif /* WOLFSSL_SHA512 || WOLFSSL_SHA384 */ diff --git a/wolfssl/wolfcrypt/sha512.h b/wolfssl/wolfcrypt/sha512.h index 83a96b53a..750ba26bb 100644 --- a/wolfssl/wolfcrypt/sha512.h +++ b/wolfssl/wolfcrypt/sha512.h @@ -141,6 +141,10 @@ typedef struct wc_Sha512 { #endif /* HAVE_FIPS */ +#ifdef WOLFSSL_ARMASM +int Transform_Sha512_Len(wc_Sha512* sha512, const byte* data, word32 len); +#endif + #ifdef WOLFSSL_SHA512 WOLFSSL_API int wc_InitSha512(wc_Sha512*);