From e5bf2ed1d12e2e1c750709b27f2f428eb262772a Mon Sep 17 00:00:00 2001 From: Sean Parkinson Date: Thu, 11 Apr 2019 16:17:48 +1000 Subject: [PATCH] Curve25519 converted from inline assembly to asm only Fixed large values (top bit set) in tables to be negative as the type is signed. Put C++ protection around function prototypes in fe_operations.h. --- src/include.am | 6 + wolfcrypt/src/fe_operations.c | 2 +- wolfcrypt/src/fe_x25519_asm.S | 16178 ++++++++++++++++++++++++++++ wolfcrypt/src/fe_x25519_x64.i | 2089 ---- wolfcrypt/src/ge_operations.c | 2766 ++--- wolfcrypt/src/include.am | 3 +- wolfssl/wolfcrypt/fe_operations.h | 9 + 7 files changed, 17578 insertions(+), 3475 deletions(-) create mode 100644 wolfcrypt/src/fe_x25519_asm.S delete mode 100644 wolfcrypt/src/fe_x25519_x64.i diff --git a/src/include.am b/src/include.am index 9105f913a..e49442210 100644 --- a/src/include.am +++ b/src/include.am @@ -372,6 +372,9 @@ if BUILD_CURVE25519_SMALL src_libwolfssl_la_SOURCES += wolfcrypt/src/fe_low_mem.c else src_libwolfssl_la_SOURCES += wolfcrypt/src/fe_operations.c +if BUILD_INTELASM +src_libwolfssl_la_SOURCES += wolfcrypt/src/fe_x25519_asm.S +endif endif endif @@ -382,6 +385,9 @@ else src_libwolfssl_la_SOURCES += wolfcrypt/src/ge_operations.c if !BUILD_FEMATH src_libwolfssl_la_SOURCES += wolfcrypt/src/fe_operations.c +if BUILD_INTELASM +src_libwolfssl_la_SOURCES += wolfcrypt/src/fe_x25519_asm.S +endif endif endif endif diff --git a/wolfcrypt/src/fe_operations.c b/wolfcrypt/src/fe_operations.c index 7c84da8e5..df74a350c 100644 --- a/wolfcrypt/src/fe_operations.c +++ b/wolfcrypt/src/fe_operations.c @@ -42,7 +42,7 @@ #endif #ifdef CURVED25519_X64 -#include "fe_x25519_x64.i" +/* Assumbly code in fe_x25519_asm.* */ #elif defined(CURVED25519_128BIT) #include "fe_x25519_128.i" #else diff --git a/wolfcrypt/src/fe_x25519_asm.S b/wolfcrypt/src/fe_x25519_asm.S new file mode 100644 index 000000000..b052136aa --- /dev/null +++ b/wolfcrypt/src/fe_x25519_asm.S @@ -0,0 +1,16178 @@ +/* fe_x25519_asm + * + * Copyright (C) 2006-2018 wolfSSL Inc. + * + * This file is part of wolfSSL. + * + * wolfSSL is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * wolfSSL is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA + */ + +#ifndef HAVE_INTEL_AVX1 +#define HAVE_INTEL_AVX1 +#endif /* HAVE_INTEL_AVX1 */ +#ifndef NO_AVX2_SUPPORT +#define HAVE_INTEL_AVX2 +#endif /* NO_AVX2_SUPPORT */ + +#ifndef __APPLE__ +.text +.globl fe_init +.type fe_init,@function +.align 4 +fe_init: +#else +.section __TEXT,__text +.globl _fe_init +.p2align 2 +_fe_init: +#endif /* __APPLE__ */ +#ifdef HAVE_INTEL_AVX2 + movl cpuFlagsSet(%rip), %eax + testl %eax, %eax + je L_fe_init_get_flags + repz retq +L_fe_init_get_flags: +#ifndef __APPLE__ + callq cpuid_get_flags@plt +#else + callq _cpuid_get_flags +#endif /* __APPLE__ */ + movl %eax, intelFlags(%rip) + andl $0x50, %eax + cmpl $0x50, %eax + jne L_fe_init_flags_done +#ifndef __APPLE__ + movq fe_mul_avx2@GOTPCREL(%rip), %rax +#else + movq fe_mul_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_mul_p(%rip) +#ifndef __APPLE__ + movq fe_sq_avx2@GOTPCREL(%rip), %rax +#else + movq fe_sq_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_sq_p(%rip) +#ifndef __APPLE__ + movq fe_mul121666_avx2@GOTPCREL(%rip), %rax +#else + movq fe_mul121666_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_mul121666_p(%rip) +#ifndef __APPLE__ + movq fe_sq2_avx2@GOTPCREL(%rip), %rax +#else + movq fe_sq2_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_sq2_p(%rip) +#ifndef __APPLE__ + movq fe_invert_avx2@GOTPCREL(%rip), %rax +#else + movq fe_invert_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_invert_p(%rip) +#ifndef __APPLE__ + movq curve25519_avx2@GOTPCREL(%rip), %rax +#else + movq curve25519_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, curve25519_p(%rip) +#ifndef __APPLE__ + movq fe_pow22523_avx2@GOTPCREL(%rip), %rax +#else + movq fe_pow22523_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_pow22523_p(%rip) +#ifndef __APPLE__ + movq fe_ge_to_p2_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_to_p2_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_to_p2_p(%rip) +#ifndef __APPLE__ + movq fe_ge_to_p3_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_to_p3_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_to_p3_p(%rip) +#ifndef __APPLE__ + movq fe_ge_dbl_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_dbl_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_dbl_p(%rip) +#ifndef __APPLE__ + movq fe_ge_madd_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_madd_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_madd_p(%rip) +#ifndef __APPLE__ + movq fe_ge_msub_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_msub_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_msub_p(%rip) +#ifndef __APPLE__ + movq fe_ge_add_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_add_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_add_p(%rip) +#ifndef __APPLE__ + movq fe_ge_sub_avx2@GOTPCREL(%rip), %rax +#else + movq fe_ge_sub_avx2(%rip), %rax +#endif /* __APPLE__ */ + movq %rax, fe_ge_sub_p(%rip) +L_fe_init_flags_done: + movl $0x01, cpuFlagsSet(%rip) +#endif /* HAVE_INTEL_AVX2 */ + repz retq +#ifndef __APPLE__ +.size fe_init,.-fe_init +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_frombytes +.type fe_frombytes,@function +.align 4 +fe_frombytes: +#else +.section __TEXT,__text +.globl _fe_frombytes +.p2align 2 +_fe_frombytes: +#endif /* __APPLE__ */ + movq $0x7fffffffffffffff, %r9 + # Copy + movq (%rsi), %rdx + movq 8(%rsi), %rax + movq 16(%rsi), %rcx + movq 24(%rsi), %r8 + movq %rdx, (%rdi) + movq %rax, 8(%rdi) + movq %rcx, 16(%rdi) + movq %r8, 24(%rdi) + andq %r9, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_frombytes,.-fe_frombytes +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_tobytes +.type fe_tobytes,@function +.align 4 +fe_tobytes: +#else +.section __TEXT,__text +.globl _fe_tobytes +.p2align 2 +_fe_tobytes: +#endif /* __APPLE__ */ + movq $0x7fffffffffffffff, %r10 + movq (%rsi), %rdx + movq 8(%rsi), %rax + movq 16(%rsi), %rcx + movq 24(%rsi), %r8 + addq $19, %rdx + adcq $0x00, %rax + adcq $0x00, %rcx + adcq $0x00, %r8 + shrq $63, %r8 + imulq $19, %r8, %r9 + movq (%rsi), %rdx + movq 8(%rsi), %rax + movq 16(%rsi), %rcx + movq 24(%rsi), %r8 + addq %r9, %rdx + adcq $0x00, %rax + adcq $0x00, %rcx + adcq $0x00, %r8 + andq %r10, %r8 + movq %rdx, (%rdi) + movq %rax, 8(%rdi) + movq %rcx, 16(%rdi) + movq %r8, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_tobytes,.-fe_tobytes +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_1 +.type fe_1,@function +.align 4 +fe_1: +#else +.section __TEXT,__text +.globl _fe_1 +.p2align 2 +_fe_1: +#endif /* __APPLE__ */ + # Set one + movq $0x01, (%rdi) + movq $0x00, 8(%rdi) + movq $0x00, 16(%rdi) + movq $0x00, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_1,.-fe_1 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_0 +.type fe_0,@function +.align 4 +fe_0: +#else +.section __TEXT,__text +.globl _fe_0 +.p2align 2 +_fe_0: +#endif /* __APPLE__ */ + # Set zero + movq $0x00, (%rdi) + movq $0x00, 8(%rdi) + movq $0x00, 16(%rdi) + movq $0x00, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_0,.-fe_0 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_copy +.type fe_copy,@function +.align 4 +fe_copy: +#else +.section __TEXT,__text +.globl _fe_copy +.p2align 2 +_fe_copy: +#endif /* __APPLE__ */ + # Copy + movq (%rsi), %rdx + movq 8(%rsi), %rax + movq 16(%rsi), %rcx + movq 24(%rsi), %r8 + movq %rdx, (%rdi) + movq %rax, 8(%rdi) + movq %rcx, 16(%rdi) + movq %r8, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_copy,.-fe_copy +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_cswap +.type fe_cswap,@function +.align 4 +fe_cswap: +#else +.section __TEXT,__text +.globl _fe_cswap +.p2align 2 +_fe_cswap: +#endif /* __APPLE__ */ + # Conditional Swap + movslq %edx, %rax + movq (%rdi), %rcx + movq 8(%rdi), %r8 + movq 16(%rdi), %r9 + movq 24(%rdi), %r10 + negq %rax + xorq (%rsi), %rcx + xorq 8(%rsi), %r8 + xorq 16(%rsi), %r9 + xorq 24(%rsi), %r10 + andq %rax, %rcx + andq %rax, %r8 + andq %rax, %r9 + andq %rax, %r10 + xorq %rcx, (%rdi) + xorq %r8, 8(%rdi) + xorq %r9, 16(%rdi) + xorq %r10, 24(%rdi) + xorq %rcx, (%rsi) + xorq %r8, 8(%rsi) + xorq %r9, 16(%rsi) + xorq %r10, 24(%rsi) + repz retq +#ifndef __APPLE__ +.size fe_cswap,.-fe_cswap +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sub +.type fe_sub,@function +.align 4 +fe_sub: +#else +.section __TEXT,__text +.globl _fe_sub +.p2align 2 +_fe_sub: +#endif /* __APPLE__ */ + pushq %r12 + # Sub + movq (%rsi), %rax + movq 8(%rsi), %rcx + movq 16(%rsi), %r8 + movq 24(%rsi), %r9 + subq (%rdx), %rax + movq $0x00, %r10 + sbbq 8(%rdx), %rcx + movq $-19, %r11 + sbbq 16(%rdx), %r8 + movq $0x7fffffffffffffff, %r12 + sbbq 24(%rdx), %r9 + sbbq $0x00, %r10 + # Mask the modulus + andq %r10, %r11 + andq %r10, %r12 + # Add modulus (if underflow) + addq %r11, %rax + adcq %r10, %rcx + adcq %r10, %r8 + adcq %r12, %r9 + movq %rax, (%rdi) + movq %rcx, 8(%rdi) + movq %r8, 16(%rdi) + movq %r9, 24(%rdi) + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_sub,.-fe_sub +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_add +.type fe_add,@function +.align 4 +fe_add: +#else +.section __TEXT,__text +.globl _fe_add +.p2align 2 +_fe_add: +#endif /* __APPLE__ */ + pushq %r12 + # Add + movq (%rsi), %rax + movq 8(%rsi), %rcx + addq (%rdx), %rax + movq 16(%rsi), %r8 + adcq 8(%rdx), %rcx + movq 24(%rsi), %r10 + adcq 16(%rdx), %r8 + movq $-19, %r11 + adcq 24(%rdx), %r10 + movq $0x7fffffffffffffff, %r12 + movq %r10, %r9 + sarq $63, %r10 + # Mask the modulus + andq %r10, %r11 + andq %r10, %r12 + # Sub modulus (if overflow) + subq %r11, %rax + sbbq %r10, %rcx + sbbq %r10, %r8 + sbbq %r12, %r9 + movq %rax, (%rdi) + movq %rcx, 8(%rdi) + movq %r8, 16(%rdi) + movq %r9, 24(%rdi) + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_add,.-fe_add +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_neg +.type fe_neg,@function +.align 4 +fe_neg: +#else +.section __TEXT,__text +.globl _fe_neg +.p2align 2 +_fe_neg: +#endif /* __APPLE__ */ + movq $-19, %rdx + movq $-1, %rax + movq $-1, %rcx + movq $0x7fffffffffffffff, %r8 + subq (%rsi), %rdx + subq 8(%rsi), %rax + subq 16(%rsi), %rcx + subq 24(%rsi), %r8 + movq %rdx, (%rdi) + movq %rax, 8(%rdi) + movq %rcx, 16(%rdi) + movq %r8, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_neg,.-fe_neg +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_cmov +.type fe_cmov,@function +.align 4 +fe_cmov: +#else +.section __TEXT,__text +.globl _fe_cmov +.p2align 2 +_fe_cmov: +#endif /* __APPLE__ */ + cmpl $0x01, %edx + movq (%rdi), %rcx + movq 8(%rdi), %r8 + movq 16(%rdi), %r9 + movq 24(%rdi), %r10 + cmoveq (%rsi), %rcx + cmoveq 8(%rsi), %r8 + cmoveq 16(%rsi), %r9 + cmoveq 24(%rsi), %r10 + movq %rcx, (%rdi) + movq %r8, 8(%rdi) + movq %r9, 16(%rdi) + movq %r10, 24(%rdi) + repz retq +#ifndef __APPLE__ +.size fe_cmov,.-fe_cmov +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_isnonzero +.type fe_isnonzero,@function +.align 4 +fe_isnonzero: +#else +.section __TEXT,__text +.globl _fe_isnonzero +.p2align 2 +_fe_isnonzero: +#endif /* __APPLE__ */ + movq $0x7fffffffffffffff, %r10 + movq (%rdi), %rax + movq 8(%rdi), %rdx + movq 16(%rdi), %rcx + movq 24(%rdi), %r8 + addq $19, %rax + adcq $0x00, %rdx + adcq $0x00, %rcx + adcq $0x00, %r8 + shrq $63, %r8 + imulq $19, %r8, %r9 + movq (%rdi), %rax + movq 8(%rdi), %rdx + movq 16(%rdi), %rcx + movq 24(%rdi), %r8 + addq %r9, %rax + adcq $0x00, %rdx + adcq $0x00, %rcx + adcq $0x00, %r8 + andq %r10, %r8 + orq %rdx, %rax + orq %rcx, %rax + orq %r8, %rax + repz retq +#ifndef __APPLE__ +.size fe_isnonzero,.-fe_isnonzero +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_isnegative +.type fe_isnegative,@function +.align 4 +fe_isnegative: +#else +.section __TEXT,__text +.globl _fe_isnegative +.p2align 2 +_fe_isnegative: +#endif /* __APPLE__ */ + movq $0x7fffffffffffffff, %r10 + movq (%rdi), %rax + movq 8(%rdi), %rdx + movq 16(%rdi), %rcx + movq 24(%rdi), %r8 + addq $19, %rax + adcq $0x00, %rdx + adcq $0x00, %rcx + adcq $0x00, %r8 + shrq $63, %r8 + imulq $19, %r8, %r9 + movq (%rdi), %rax + addq %r9, %rax + andq $0x01, %rax + repz retq +#ifndef __APPLE__ +.size fe_isnegative,.-fe_isnegative +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_cmov_table +.type fe_cmov_table,@function +.align 4 +fe_cmov_table: +#else +.section __TEXT,__text +.globl _fe_cmov_table +.p2align 2 +_fe_cmov_table: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rdx, %rcx + movsbq %cl, %rax + cdq + xorb %dl, %al + subb %dl, %al + movb %al, %r15b + movq $0x01, %rax + xorq %rdx, %rdx + xorq %r8, %r8 + xorq %r9, %r9 + movq $0x01, %r10 + xorq %r11, %r11 + xorq %r12, %r12 + xorq %r13, %r13 + cmpb $0x01, %r15b + movq (%rsi), %r14 + cmoveq %r14, %rax + movq 8(%rsi), %r14 + cmoveq %r14, %rdx + movq 16(%rsi), %r14 + cmoveq %r14, %r8 + movq 24(%rsi), %r14 + cmoveq %r14, %r9 + movq 32(%rsi), %r14 + cmoveq %r14, %r10 + movq 40(%rsi), %r14 + cmoveq %r14, %r11 + movq 48(%rsi), %r14 + cmoveq %r14, %r12 + movq 56(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $2, %r15b + movq 96(%rsi), %r14 + cmoveq %r14, %rax + movq 104(%rsi), %r14 + cmoveq %r14, %rdx + movq 112(%rsi), %r14 + cmoveq %r14, %r8 + movq 120(%rsi), %r14 + cmoveq %r14, %r9 + movq 128(%rsi), %r14 + cmoveq %r14, %r10 + movq 136(%rsi), %r14 + cmoveq %r14, %r11 + movq 144(%rsi), %r14 + cmoveq %r14, %r12 + movq 152(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $3, %r15b + movq 192(%rsi), %r14 + cmoveq %r14, %rax + movq 200(%rsi), %r14 + cmoveq %r14, %rdx + movq 208(%rsi), %r14 + cmoveq %r14, %r8 + movq 216(%rsi), %r14 + cmoveq %r14, %r9 + movq 224(%rsi), %r14 + cmoveq %r14, %r10 + movq 232(%rsi), %r14 + cmoveq %r14, %r11 + movq 240(%rsi), %r14 + cmoveq %r14, %r12 + movq 248(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $4, %r15b + movq 288(%rsi), %r14 + cmoveq %r14, %rax + movq 296(%rsi), %r14 + cmoveq %r14, %rdx + movq 304(%rsi), %r14 + cmoveq %r14, %r8 + movq 312(%rsi), %r14 + cmoveq %r14, %r9 + movq 320(%rsi), %r14 + cmoveq %r14, %r10 + movq 328(%rsi), %r14 + cmoveq %r14, %r11 + movq 336(%rsi), %r14 + cmoveq %r14, %r12 + movq 344(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $5, %r15b + movq 384(%rsi), %r14 + cmoveq %r14, %rax + movq 392(%rsi), %r14 + cmoveq %r14, %rdx + movq 400(%rsi), %r14 + cmoveq %r14, %r8 + movq 408(%rsi), %r14 + cmoveq %r14, %r9 + movq 416(%rsi), %r14 + cmoveq %r14, %r10 + movq 424(%rsi), %r14 + cmoveq %r14, %r11 + movq 432(%rsi), %r14 + cmoveq %r14, %r12 + movq 440(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $6, %r15b + movq 480(%rsi), %r14 + cmoveq %r14, %rax + movq 488(%rsi), %r14 + cmoveq %r14, %rdx + movq 496(%rsi), %r14 + cmoveq %r14, %r8 + movq 504(%rsi), %r14 + cmoveq %r14, %r9 + movq 512(%rsi), %r14 + cmoveq %r14, %r10 + movq 520(%rsi), %r14 + cmoveq %r14, %r11 + movq 528(%rsi), %r14 + cmoveq %r14, %r12 + movq 536(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $7, %r15b + movq 576(%rsi), %r14 + cmoveq %r14, %rax + movq 584(%rsi), %r14 + cmoveq %r14, %rdx + movq 592(%rsi), %r14 + cmoveq %r14, %r8 + movq 600(%rsi), %r14 + cmoveq %r14, %r9 + movq 608(%rsi), %r14 + cmoveq %r14, %r10 + movq 616(%rsi), %r14 + cmoveq %r14, %r11 + movq 624(%rsi), %r14 + cmoveq %r14, %r12 + movq 632(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $8, %r15b + movq 672(%rsi), %r14 + cmoveq %r14, %rax + movq 680(%rsi), %r14 + cmoveq %r14, %rdx + movq 688(%rsi), %r14 + cmoveq %r14, %r8 + movq 696(%rsi), %r14 + cmoveq %r14, %r9 + movq 704(%rsi), %r14 + cmoveq %r14, %r10 + movq 712(%rsi), %r14 + cmoveq %r14, %r11 + movq 720(%rsi), %r14 + cmoveq %r14, %r12 + movq 728(%rsi), %r14 + cmoveq %r14, %r13 + cmpb $0x00, %cl + movq %rax, %r14 + cmovlq %r10, %rax + cmovlq %r14, %r10 + movq %rdx, %r14 + cmovlq %r11, %rdx + cmovlq %r14, %r11 + movq %r8, %r14 + cmovlq %r12, %r8 + cmovlq %r14, %r12 + movq %r9, %r14 + cmovlq %r13, %r9 + cmovlq %r14, %r13 + movq %rax, (%rdi) + movq %rdx, 8(%rdi) + movq %r8, 16(%rdi) + movq %r9, 24(%rdi) + movq %r10, 32(%rdi) + movq %r11, 40(%rdi) + movq %r12, 48(%rdi) + movq %r13, 56(%rdi) + xorq %rax, %rax + xorq %rdx, %rdx + xorq %r8, %r8 + xorq %r9, %r9 + cmpb $0x01, %r15b + movq 64(%rsi), %r14 + cmoveq %r14, %rax + movq 72(%rsi), %r14 + cmoveq %r14, %rdx + movq 80(%rsi), %r14 + cmoveq %r14, %r8 + movq 88(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $2, %r15b + movq 160(%rsi), %r14 + cmoveq %r14, %rax + movq 168(%rsi), %r14 + cmoveq %r14, %rdx + movq 176(%rsi), %r14 + cmoveq %r14, %r8 + movq 184(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $3, %r15b + movq 256(%rsi), %r14 + cmoveq %r14, %rax + movq 264(%rsi), %r14 + cmoveq %r14, %rdx + movq 272(%rsi), %r14 + cmoveq %r14, %r8 + movq 280(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $4, %r15b + movq 352(%rsi), %r14 + cmoveq %r14, %rax + movq 360(%rsi), %r14 + cmoveq %r14, %rdx + movq 368(%rsi), %r14 + cmoveq %r14, %r8 + movq 376(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $5, %r15b + movq 448(%rsi), %r14 + cmoveq %r14, %rax + movq 456(%rsi), %r14 + cmoveq %r14, %rdx + movq 464(%rsi), %r14 + cmoveq %r14, %r8 + movq 472(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $6, %r15b + movq 544(%rsi), %r14 + cmoveq %r14, %rax + movq 552(%rsi), %r14 + cmoveq %r14, %rdx + movq 560(%rsi), %r14 + cmoveq %r14, %r8 + movq 568(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $7, %r15b + movq 640(%rsi), %r14 + cmoveq %r14, %rax + movq 648(%rsi), %r14 + cmoveq %r14, %rdx + movq 656(%rsi), %r14 + cmoveq %r14, %r8 + movq 664(%rsi), %r14 + cmoveq %r14, %r9 + cmpb $8, %r15b + movq 736(%rsi), %r14 + cmoveq %r14, %rax + movq 744(%rsi), %r14 + cmoveq %r14, %rdx + movq 752(%rsi), %r14 + cmoveq %r14, %r8 + movq 760(%rsi), %r14 + cmoveq %r14, %r9 + movq $-19, %r10 + movq $-1, %r11 + movq $-1, %r12 + movq $0x7fffffffffffffff, %r13 + subq %rax, %r10 + sbbq %rdx, %r11 + sbbq %r8, %r12 + sbbq %r9, %r13 + cmpb $0x00, %cl + cmovlq %r10, %rax + cmovlq %r11, %rdx + cmovlq %r12, %r8 + cmovlq %r13, %r9 + movq %rax, 64(%rdi) + movq %rdx, 72(%rdi) + movq %r8, 80(%rdi) + movq %r9, 88(%rdi) + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_cmov_table,.-fe_cmov_table +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_mul +.type fe_mul,@function +.align 4 +fe_mul: +#else +.section __TEXT,__text +.globl _fe_mul +.p2align 2 +_fe_mul: +#endif /* __APPLE__ */ + jmpq *fe_mul_p(%rip) +#ifndef __APPLE__ +.size fe_mul,.-fe_mul +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq +.type fe_sq,@function +.align 4 +fe_sq: +#else +.section __TEXT,__text +.globl _fe_sq +.p2align 2 +_fe_sq: +#endif /* __APPLE__ */ + jmpq *fe_sq_p(%rip) +#ifndef __APPLE__ +.size fe_sq,.-fe_sq +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_mul121666 +.type fe_mul121666,@function +.align 4 +fe_mul121666: +#else +.section __TEXT,__text +.globl _fe_mul121666 +.p2align 2 +_fe_mul121666: +#endif /* __APPLE__ */ + jmpq *fe_mul121666_p(%rip) +#ifndef __APPLE__ +.size fe_mul121666,.-fe_mul121666 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq2 +.type fe_sq2,@function +.align 4 +fe_sq2: +#else +.section __TEXT,__text +.globl _fe_sq2 +.p2align 2 +_fe_sq2: +#endif /* __APPLE__ */ + jmpq *fe_sq2_p(%rip) +#ifndef __APPLE__ +.size fe_sq2,.-fe_sq2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_invert +.type fe_invert,@function +.align 4 +fe_invert: +#else +.section __TEXT,__text +.globl _fe_invert +.p2align 2 +_fe_invert: +#endif /* __APPLE__ */ + jmpq *fe_invert_p(%rip) +#ifndef __APPLE__ +.size fe_invert,.-fe_invert +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl curve25519 +.type curve25519,@function +.align 4 +curve25519: +#else +.section __TEXT,__text +.globl _curve25519 +.p2align 2 +_curve25519: +#endif /* __APPLE__ */ + jmpq *curve25519_p(%rip) +#ifndef __APPLE__ +.size curve25519,.-curve25519 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_pow22523 +.type fe_pow22523,@function +.align 4 +fe_pow22523: +#else +.section __TEXT,__text +.globl _fe_pow22523 +.p2align 2 +_fe_pow22523: +#endif /* __APPLE__ */ + jmpq *fe_pow22523_p(%rip) +#ifndef __APPLE__ +.size fe_pow22523,.-fe_pow22523 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_to_p2 +.type fe_ge_to_p2,@function +.align 4 +fe_ge_to_p2: +#else +.section __TEXT,__text +.globl _fe_ge_to_p2 +.p2align 2 +_fe_ge_to_p2: +#endif /* __APPLE__ */ + jmpq *fe_ge_to_p2_p(%rip) +#ifndef __APPLE__ +.size fe_ge_to_p2,.-fe_ge_to_p2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_to_p3 +.type fe_ge_to_p3,@function +.align 4 +fe_ge_to_p3: +#else +.section __TEXT,__text +.globl _fe_ge_to_p3 +.p2align 2 +_fe_ge_to_p3: +#endif /* __APPLE__ */ + jmpq *fe_ge_to_p3_p(%rip) +#ifndef __APPLE__ +.size fe_ge_to_p3,.-fe_ge_to_p3 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_dbl +.type fe_ge_dbl,@function +.align 4 +fe_ge_dbl: +#else +.section __TEXT,__text +.globl _fe_ge_dbl +.p2align 2 +_fe_ge_dbl: +#endif /* __APPLE__ */ + jmpq *fe_ge_dbl_p(%rip) +#ifndef __APPLE__ +.size fe_ge_dbl,.-fe_ge_dbl +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_madd +.type fe_ge_madd,@function +.align 4 +fe_ge_madd: +#else +.section __TEXT,__text +.globl _fe_ge_madd +.p2align 2 +_fe_ge_madd: +#endif /* __APPLE__ */ + jmpq *fe_ge_madd_p(%rip) +#ifndef __APPLE__ +.size fe_ge_madd,.-fe_ge_madd +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_msub +.type fe_ge_msub,@function +.align 4 +fe_ge_msub: +#else +.section __TEXT,__text +.globl _fe_ge_msub +.p2align 2 +_fe_ge_msub: +#endif /* __APPLE__ */ + jmpq *fe_ge_msub_p(%rip) +#ifndef __APPLE__ +.size fe_ge_msub,.-fe_ge_msub +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_add +.type fe_ge_add,@function +.align 4 +fe_ge_add: +#else +.section __TEXT,__text +.globl _fe_ge_add +.p2align 2 +_fe_ge_add: +#endif /* __APPLE__ */ + jmpq *fe_ge_add_p(%rip) +#ifndef __APPLE__ +.size fe_ge_add,.-fe_ge_add +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_sub +.type fe_ge_sub,@function +.align 4 +fe_ge_sub: +#else +.section __TEXT,__text +.globl _fe_ge_sub +.p2align 2 +_fe_ge_sub: +#endif /* __APPLE__ */ + jmpq *fe_ge_sub_p(%rip) +#ifndef __APPLE__ +.size fe_ge_sub,.-fe_ge_sub +#endif /* __APPLE__ */ + .data + .type cpuFlagsSet, @object + .size cpuFlagsSet,4 +cpuFlagsSet: + .long 0 + .data + .type intelFlags, @object + .size intelFlags,4 +intelFlags: + .long 0 + .data + .type fe_mul_p, @object + .size fe_mul_p,8 +fe_mul_p: + .quad fe_mul_x64 + .data + .type fe_sq_p, @object + .size fe_sq_p,8 +fe_sq_p: + .quad fe_sq_x64 + .data + .type fe_mul121666_p, @object + .size fe_mul121666_p,8 +fe_mul121666_p: + .quad fe_mul121666_x64 + .data + .type fe_sq2_p, @object + .size fe_sq2_p,8 +fe_sq2_p: + .quad fe_sq2_x64 + .data + .type fe_invert_p, @object + .size fe_invert_p,8 +fe_invert_p: + .quad fe_invert_x64 + .data + .type curve25519_p, @object + .size curve25519_p,8 +curve25519_p: + .quad curve25519_x64 + .data + .type fe_pow22523_p, @object + .size fe_pow22523_p,8 +fe_pow22523_p: + .quad fe_pow22523_x64 + .data + .type fe_ge_to_p2_p, @object + .size fe_ge_to_p2_p,8 +fe_ge_to_p2_p: + .quad fe_ge_to_p2_x64 + .data + .type fe_ge_to_p3_p, @object + .size fe_ge_to_p3_p,8 +fe_ge_to_p3_p: + .quad fe_ge_to_p3_x64 + .data + .type fe_ge_dbl_p, @object + .size fe_ge_dbl_p,8 +fe_ge_dbl_p: + .quad fe_ge_dbl_x64 + .data + .type fe_ge_madd_p, @object + .size fe_ge_madd_p,8 +fe_ge_madd_p: + .quad fe_ge_madd_x64 + .data + .type fe_ge_msub_p, @object + .size fe_ge_msub_p,8 +fe_ge_msub_p: + .quad fe_ge_msub_x64 + .data + .type fe_ge_add_p, @object + .size fe_ge_add_p,8 +fe_ge_add_p: + .quad fe_ge_add_x64 + .data + .type fe_ge_sub_p, @object + .size fe_ge_sub_p,8 +fe_ge_sub_p: + .quad fe_ge_sub_x64 +#ifndef __APPLE__ +.text +.globl fe_mul_x64 +.type fe_mul_x64,@function +.align 4 +fe_mul_x64: +#else +.section __TEXT,__text +.globl _fe_mul_x64 +.p2align 2 +_fe_mul_x64: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbx + movq %rdx, %rcx + # Multiply + # A[0] * B[0] + movq (%rcx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rcx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rcx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rcx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rcx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rcx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rcx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rcx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rcx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rcx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rcx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rcx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rcx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rcx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rcx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rcx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + popq %rbx + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_mul_x64,.-fe_mul_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq_x64 +.type fe_sq_x64,@function +.align 4 +fe_sq_x64: +#else +.section __TEXT,__text +.globl _fe_sq_x64 +.p2align 2 +_fe_sq_x64: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + # Square + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r13, %r13 + addq %rax, %r12 + adcq %rdx, %r13 + # Double + xorq %r14, %r14 + addq %r8, %r8 + adcq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq $0x00, %r14 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %r15 + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %r15, %r8 + adcq %rax, %r9 + adcq $0x00, %rdx + movq %rdx, %r15 + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %r15, %r10 + adcq %rax, %r11 + adcq $0x00, %rdx + movq %rdx, %r15 + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r13 + adcq %rdx, %r14 + addq %r15, %r12 + adcq $0x00, %r13 + adcq $0x00, %r14 + # Reduce + movq $0x7fffffffffffffff, %r15 + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + shldq $0x01, %r10, %r11 + andq %r15, %r10 + # Multiply top half by 19 + movq $19, %rax + mulq %r11 + xorq %r11, %r11 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r11 + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + # Add remaining produce results in + addq %r11, %r8 + adcq %r12, %r9 + adcq %r13, %r10 + adcq %rax, %r10 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r10, %rdx + imulq $19, %rdx, %rax + andq %r15, %r10 + addq %rax, %rcx + adcq $0x00, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + # Reduce if top bit set + movq %r10, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %r15, %r10 + addq %rax, %rcx + adcq $0x00, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + # Store + movq %rcx, (%rdi) + movq %r8, 8(%rdi) + movq %r9, 16(%rdi) + movq %r10, 24(%rdi) + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_sq_x64,.-fe_sq_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_mul121666_x64 +.type fe_mul121666_x64,@function +.align 4 +fe_mul121666_x64: +#else +.section __TEXT,__text +.globl _fe_mul121666_x64 +.p2align 2 +_fe_mul121666_x64: +#endif /* __APPLE__ */ + pushq %r12 + # Multiply by 121666 + movq $0x1db42, %rax + mulq (%rsi) + xorq %r10, %r10 + movq %rax, %r8 + movq %rdx, %r9 + movq $0x1db42, %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + movq $0x1db42, %rax + mulq 16(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + movq $0x1db42, %rax + mulq 24(%rsi) + movq $0x7fffffffffffffff, %rcx + addq %rax, %r11 + adcq %rdx, %r12 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + movq $19, %rax + mulq %r12 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_mul121666_x64,.-fe_mul121666_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq2_x64 +.type fe_sq2_x64,@function +.align 4 +fe_sq2_x64: +#else +.section __TEXT,__text +.globl _fe_sq2_x64 +.p2align 2 +_fe_sq2_x64: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbx + # Square * 2 + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r13, %r13 + addq %rax, %r12 + adcq %rdx, %r13 + # Double + xorq %r14, %r14 + addq %r8, %r8 + adcq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq $0x00, %r14 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %r15 + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %r15, %r8 + adcq %rax, %r9 + adcq $0x00, %rdx + movq %rdx, %r15 + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %r15, %r10 + adcq %rax, %r11 + adcq $0x00, %rdx + movq %rdx, %r15 + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r13 + adcq %rdx, %r14 + addq %r15, %r12 + adcq $0x00, %r13 + adcq $0x00, %r14 + # Reduce + movq $0x7fffffffffffffff, %rbx + xorq %rax, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $3, %r14, %rax + shldq $2, %r13, %r14 + shldq $2, %r12, %r13 + shldq $2, %r11, %r12 + shldq $2, %r10, %r11 + shldq $0x01, %r9, %r10 + shldq $0x01, %r8, %r9 + shldq $0x01, %rcx, %r8 + shlq $0x01, %rcx + andq %rbx, %r10 + # Two out left, one in right + andq %rbx, %r14 + # Multiply top bits by 19*19 + imulq $0x169, %rax, %r15 + # Multiply top half by 19 + movq $19, %rax + mulq %r11 + xorq %r11, %r11 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r11 + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + # Add remaining produce results in + addq %r15, %rcx + addq %r11, %r8 + adcq %r12, %r9 + adcq %r13, %r10 + adcq %rax, %r10 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r10, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r10 + addq %rax, %rcx + adcq $0x00, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + # Reduce if top bit set + movq %r10, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r10 + addq %rax, %rcx + adcq $0x00, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + # Store + movq %rcx, (%rdi) + movq %r8, 8(%rdi) + movq %r9, 16(%rdi) + movq %r10, 24(%rdi) + popq %rbx + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_sq2_x64,.-fe_sq2_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_invert_x64 +.type fe_invert_x64,@function +.align 4 +fe_invert_x64: +#else +.section __TEXT,__text +.globl _fe_invert_x64 +.p2align 2 +_fe_invert_x64: +#endif /* __APPLE__ */ + subq $0x98, %rsp + # Invert + movq %rdi, 128(%rsp) + movq %rsi, 136(%rsp) + movq %rsp, %rdi + movq 136(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq 136(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $4, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_x64_1: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_1 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_x64_2: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_2 + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $19, 144(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_fe_invert_x64_3: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_3 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_x64_4: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_4 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_x64_5: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_5 + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $0x63, 144(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_fe_invert_x64_6: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_6 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_x64_7: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_7 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $4, 144(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_invert_x64_8: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_x64_8 + movq 128(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq 136(%rsp), %rsi + movq 128(%rsp), %rdi + addq $0x98, %rsp + repz retq +#ifndef __APPLE__ +.text +.globl curve25519_x64 +.type curve25519_x64,@function +.align 4 +curve25519_x64: +#else +.section __TEXT,__text +.globl _curve25519_x64 +.p2align 2 +_curve25519_x64: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbx + pushq %rbp + movq %rdx, %r8 + subq $0xb8, %rsp + xorq %rbx, %rbx + movq %rdi, 176(%rsp) + # Set one + movq $0x01, (%rdi) + movq $0x00, 8(%rdi) + movq $0x00, 16(%rdi) + movq $0x00, 24(%rdi) + # Set zero + movq $0x00, (%rsp) + movq $0x00, 8(%rsp) + movq $0x00, 16(%rsp) + movq $0x00, 24(%rsp) + # Set one + movq $0x01, 32(%rsp) + movq $0x00, 40(%rsp) + movq $0x00, 48(%rsp) + movq $0x00, 56(%rsp) + # Copy + movq (%r8), %rcx + movq 8(%r8), %r9 + movq 16(%r8), %r10 + movq 24(%r8), %r11 + movq %rcx, 64(%rsp) + movq %r9, 72(%rsp) + movq %r10, 80(%rsp) + movq %r11, 88(%rsp) + movb $62, 168(%rsp) + movq $3, 160(%rsp) +L_curve25519_x64_words: +L_curve25519_x64_bits: + movq 160(%rsp), %r9 + movb 168(%rsp), %cl + movq (%rsi,%r9,8), %rbp + shrq %cl, %rbp + andq $0x01, %rbp + xorq %rbp, %rbx + negq %rbx + # Conditional Swap + movq (%rdi), %rcx + movq 8(%rdi), %r9 + movq 16(%rdi), %r10 + movq 24(%rdi), %r11 + xorq 64(%rsp), %rcx + xorq 72(%rsp), %r9 + xorq 80(%rsp), %r10 + xorq 88(%rsp), %r11 + andq %rbx, %rcx + andq %rbx, %r9 + andq %rbx, %r10 + andq %rbx, %r11 + xorq %rcx, (%rdi) + xorq %r9, 8(%rdi) + xorq %r10, 16(%rdi) + xorq %r11, 24(%rdi) + xorq %rcx, 64(%rsp) + xorq %r9, 72(%rsp) + xorq %r10, 80(%rsp) + xorq %r11, 88(%rsp) + # Conditional Swap + movq (%rsp), %rcx + movq 8(%rsp), %r9 + movq 16(%rsp), %r10 + movq 24(%rsp), %r11 + xorq 32(%rsp), %rcx + xorq 40(%rsp), %r9 + xorq 48(%rsp), %r10 + xorq 56(%rsp), %r11 + andq %rbx, %rcx + andq %rbx, %r9 + andq %rbx, %r10 + andq %rbx, %r11 + xorq %rcx, (%rsp) + xorq %r9, 8(%rsp) + xorq %r10, 16(%rsp) + xorq %r11, 24(%rsp) + xorq %rcx, 32(%rsp) + xorq %r9, 40(%rsp) + xorq %r10, 48(%rsp) + xorq %r11, 56(%rsp) + movq %rbp, %rbx + # Sub + movq 64(%rsp), %rcx + movq 72(%rsp), %r9 + movq 80(%rsp), %r10 + movq 88(%rsp), %r11 + subq 32(%rsp), %rcx + movq $0x00, %rbp + sbbq 40(%rsp), %r9 + movq $-19, %rax + sbbq 48(%rsp), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 56(%rsp), %r11 + sbbq $0x00, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Add modulus (if underflow) + addq %rax, %rcx + adcq %rbp, %r9 + adcq %rbp, %r10 + adcq %rdx, %r11 + movq %rcx, 96(%rsp) + movq %r9, 104(%rsp) + movq %r10, 112(%rsp) + movq %r11, 120(%rsp) + # Sub + movq (%rdi), %rcx + movq 8(%rdi), %r9 + movq 16(%rdi), %r10 + movq 24(%rdi), %r11 + subq (%rsp), %rcx + movq $0x00, %rbp + sbbq 8(%rsp), %r9 + movq $-19, %rax + sbbq 16(%rsp), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rsp), %r11 + sbbq $0x00, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Add modulus (if underflow) + addq %rax, %rcx + adcq %rbp, %r9 + adcq %rbp, %r10 + adcq %rdx, %r11 + movq %rcx, 128(%rsp) + movq %r9, 136(%rsp) + movq %r10, 144(%rsp) + movq %r11, 152(%rsp) + # Add + movq (%rdi), %rcx + movq 8(%rdi), %r9 + addq (%rsp), %rcx + movq 16(%rdi), %r10 + adcq 8(%rsp), %r9 + movq 24(%rdi), %rbp + adcq 16(%rsp), %r10 + movq $-19, %rax + adcq 24(%rsp), %rbp + movq $0x7fffffffffffffff, %rdx + movq %rbp, %r11 + sarq $63, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Sub modulus (if overflow) + subq %rax, %rcx + sbbq %rbp, %r9 + sbbq %rbp, %r10 + sbbq %rdx, %r11 + movq %rcx, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + # Add + movq 64(%rsp), %rcx + movq 72(%rsp), %r9 + addq 32(%rsp), %rcx + movq 80(%rsp), %r10 + adcq 40(%rsp), %r9 + movq 88(%rsp), %rbp + adcq 48(%rsp), %r10 + movq $-19, %rax + adcq 56(%rsp), %rbp + movq $0x7fffffffffffffff, %rdx + movq %rbp, %r11 + sarq $63, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Sub modulus (if overflow) + subq %rax, %rcx + sbbq %rbp, %r9 + sbbq %rbp, %r10 + sbbq %rdx, %r11 + movq %rcx, (%rsp) + movq %r9, 8(%rsp) + movq %r10, 16(%rsp) + movq %r11, 24(%rsp) + # Multiply + # A[0] * B[0] + movq (%rdi), %rax + mulq 96(%rsp) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rdi), %rax + mulq 96(%rsp) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rdi), %rax + mulq 104(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rdi), %rax + mulq 96(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rdi), %rax + mulq 104(%rsp) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rdi), %rax + mulq 112(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rdi), %rax + mulq 96(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rdi), %rax + mulq 104(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rdi), %rax + mulq 112(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rdi), %rax + mulq 120(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rdi), %rax + mulq 104(%rsp) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rdi), %rax + mulq 112(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rdi), %rax + mulq 120(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rdi), %rax + mulq 112(%rsp) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rdi), %rax + mulq 120(%rsp) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rdi), %rax + mulq 120(%rsp) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, 32(%rsp) + movq %r9, 40(%rsp) + movq %r10, 48(%rsp) + movq %r11, 56(%rsp) + # Multiply + # A[0] * B[0] + movq 128(%rsp), %rax + mulq (%rsp) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 136(%rsp), %rax + mulq (%rsp) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq 128(%rsp), %rax + mulq 8(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 144(%rsp), %rax + mulq (%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 136(%rsp), %rax + mulq 8(%rsp) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq 128(%rsp), %rax + mulq 16(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 152(%rsp), %rax + mulq (%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 144(%rsp), %rax + mulq 8(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 136(%rsp), %rax + mulq 16(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq 128(%rsp), %rax + mulq 24(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 152(%rsp), %rax + mulq 8(%rsp) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 144(%rsp), %rax + mulq 16(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 136(%rsp), %rax + mulq 24(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 152(%rsp), %rax + mulq 16(%rsp) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 144(%rsp), %rax + mulq 24(%rsp) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 152(%rsp), %rax + mulq 24(%rsp) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, (%rsp) + movq %r9, 8(%rsp) + movq %r10, 16(%rsp) + movq %r11, 24(%rsp) + # Square + # A[0] * A[1] + movq 128(%rsp), %rax + mulq 136(%rsp) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq 128(%rsp), %rax + mulq 144(%rsp) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq 128(%rsp), %rax + mulq 152(%rsp) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 136(%rsp), %rax + mulq 144(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 136(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 144(%rsp), %rax + mulq 152(%rsp) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq 128(%rsp), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %rbp + # A[1] * A[1] + movq 136(%rsp), %rax + mulq %rax + addq %rbp, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[2] * A[2] + movq 144(%rsp), %rax + mulq %rax + addq %rbp, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[3] * A[3] + movq 152(%rsp), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rbp, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, 96(%rsp) + movq %r9, 104(%rsp) + movq %r10, 112(%rsp) + movq %r11, 120(%rsp) + # Square + # A[0] * A[1] + movq (%rdi), %rax + mulq 8(%rdi) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rdi), %rax + mulq 16(%rdi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rdi), %rax + mulq 24(%rdi) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rdi), %rax + mulq 16(%rdi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rdi), %rax + mulq 24(%rdi) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rdi), %rax + mulq 24(%rdi) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rdi), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %rbp + # A[1] * A[1] + movq 8(%rdi), %rax + mulq %rax + addq %rbp, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[2] * A[2] + movq 16(%rdi), %rax + mulq %rax + addq %rbp, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[3] * A[3] + movq 24(%rdi), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rbp, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, 128(%rsp) + movq %r9, 136(%rsp) + movq %r10, 144(%rsp) + movq %r11, 152(%rsp) + # Add + movq 32(%rsp), %rcx + movq 40(%rsp), %r9 + addq (%rsp), %rcx + movq 48(%rsp), %r10 + adcq 8(%rsp), %r9 + movq 56(%rsp), %rbp + adcq 16(%rsp), %r10 + movq $-19, %rax + adcq 24(%rsp), %rbp + movq $0x7fffffffffffffff, %rdx + movq %rbp, %r11 + sarq $63, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Sub modulus (if overflow) + subq %rax, %rcx + sbbq %rbp, %r9 + sbbq %rbp, %r10 + sbbq %rdx, %r11 + movq %rcx, 64(%rsp) + movq %r9, 72(%rsp) + movq %r10, 80(%rsp) + movq %r11, 88(%rsp) + # Sub + movq 32(%rsp), %rcx + movq 40(%rsp), %r9 + movq 48(%rsp), %r10 + movq 56(%rsp), %r11 + subq (%rsp), %rcx + movq $0x00, %rbp + sbbq 8(%rsp), %r9 + movq $-19, %rax + sbbq 16(%rsp), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rsp), %r11 + sbbq $0x00, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Add modulus (if underflow) + addq %rax, %rcx + adcq %rbp, %r9 + adcq %rbp, %r10 + adcq %rdx, %r11 + movq %rcx, (%rsp) + movq %r9, 8(%rsp) + movq %r10, 16(%rsp) + movq %r11, 24(%rsp) + # Multiply + # A[0] * B[0] + movq 96(%rsp), %rax + mulq 128(%rsp) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 104(%rsp), %rax + mulq 128(%rsp) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq 96(%rsp), %rax + mulq 136(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 112(%rsp), %rax + mulq 128(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 104(%rsp), %rax + mulq 136(%rsp) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq 96(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 120(%rsp), %rax + mulq 128(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 112(%rsp), %rax + mulq 136(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 104(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq 96(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 120(%rsp), %rax + mulq 136(%rsp) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 112(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 104(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 120(%rsp), %rax + mulq 144(%rsp) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 112(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 120(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + # Sub + movq 128(%rsp), %rcx + movq 136(%rsp), %r9 + movq 144(%rsp), %r10 + movq 152(%rsp), %r11 + subq 96(%rsp), %rcx + movq $0x00, %rbp + sbbq 104(%rsp), %r9 + movq $-19, %rax + sbbq 112(%rsp), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 120(%rsp), %r11 + sbbq $0x00, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Add modulus (if underflow) + addq %rax, %rcx + adcq %rbp, %r9 + adcq %rbp, %r10 + adcq %rdx, %r11 + movq %rcx, 128(%rsp) + movq %r9, 136(%rsp) + movq %r10, 144(%rsp) + movq %r11, 152(%rsp) + # Square + # A[0] * A[1] + movq (%rsp), %rax + mulq 8(%rsp) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rsp), %rax + mulq 16(%rsp) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rsp), %rax + mulq 24(%rsp) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rsp), %rax + mulq 16(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rsp), %rax + mulq 24(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rsp), %rax + mulq 24(%rsp) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rsp), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %rbp + # A[1] * A[1] + movq 8(%rsp), %rax + mulq %rax + addq %rbp, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[2] * A[2] + movq 16(%rsp), %rax + mulq %rax + addq %rbp, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[3] * A[3] + movq 24(%rsp), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rbp, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, (%rsp) + movq %r9, 8(%rsp) + movq %r10, 16(%rsp) + movq %r11, 24(%rsp) + # Multiply by 121666 + movq $0x1db42, %rax + mulq 128(%rsp) + xorq %r10, %r10 + movq %rax, %rcx + movq %rdx, %r9 + movq $0x1db42, %rax + mulq 136(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + movq $0x1db42, %rax + mulq 144(%rsp) + xorq %r13, %r13 + addq %rax, %r10 + adcq %rdx, %r11 + movq $0x1db42, %rax + mulq 152(%rsp) + movq $0x7fffffffffffffff, %r12 + addq %rax, %r11 + adcq %rdx, %r13 + shldq $0x01, %r11, %r13 + andq %r12, %r11 + movq $19, %rax + mulq %r13 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + movq %rcx, 32(%rsp) + movq %r9, 40(%rsp) + movq %r10, 48(%rsp) + movq %r11, 56(%rsp) + # Square + # A[0] * A[1] + movq 64(%rsp), %rax + mulq 72(%rsp) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq 64(%rsp), %rax + mulq 80(%rsp) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq 64(%rsp), %rax + mulq 88(%rsp) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 72(%rsp), %rax + mulq 80(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 72(%rsp), %rax + mulq 88(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 80(%rsp), %rax + mulq 88(%rsp) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq 64(%rsp), %rax + mulq %rax + movq %rax, %rcx + movq %rdx, %rbp + # A[1] * A[1] + movq 72(%rsp), %rax + mulq %rax + addq %rbp, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[2] * A[2] + movq 80(%rsp), %rax + mulq %rax + addq %rbp, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rbp + # A[3] * A[3] + movq 88(%rsp), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rbp, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, 64(%rsp) + movq %r9, 72(%rsp) + movq %r10, 80(%rsp) + movq %r11, 88(%rsp) + # Add + movq 96(%rsp), %rcx + movq 104(%rsp), %r9 + addq 32(%rsp), %rcx + movq 112(%rsp), %r10 + adcq 40(%rsp), %r9 + movq 120(%rsp), %rbp + adcq 48(%rsp), %r10 + movq $-19, %rax + adcq 56(%rsp), %rbp + movq $0x7fffffffffffffff, %rdx + movq %rbp, %r11 + sarq $63, %rbp + # Mask the modulus + andq %rbp, %rax + andq %rbp, %rdx + # Sub modulus (if overflow) + subq %rax, %rcx + sbbq %rbp, %r9 + sbbq %rbp, %r10 + sbbq %rdx, %r11 + movq %rcx, 96(%rsp) + movq %r9, 104(%rsp) + movq %r10, 112(%rsp) + movq %r11, 120(%rsp) + # Multiply + # A[0] * B[0] + movq (%rsp), %rax + mulq (%r8) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rsp), %rax + mulq (%r8) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rsp), %rax + mulq 8(%r8) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rsp), %rax + mulq (%r8) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rsp), %rax + mulq 8(%r8) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rsp), %rax + mulq 16(%r8) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rsp), %rax + mulq (%r8) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rsp), %rax + mulq 8(%r8) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rsp), %rax + mulq 16(%r8) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rsp), %rax + mulq 24(%r8) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rsp), %rax + mulq 8(%r8) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rsp), %rax + mulq 16(%r8) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rsp), %rax + mulq 24(%r8) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rsp), %rax + mulq 16(%r8) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rsp), %rax + mulq 24(%r8) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rsp), %rax + mulq 24(%r8) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, 32(%rsp) + movq %r9, 40(%rsp) + movq %r10, 48(%rsp) + movq %r11, 56(%rsp) + # Multiply + # A[0] * B[0] + movq 96(%rsp), %rax + mulq 128(%rsp) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 104(%rsp), %rax + mulq 128(%rsp) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq 96(%rsp), %rax + mulq 136(%rsp) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 112(%rsp), %rax + mulq 128(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 104(%rsp), %rax + mulq 136(%rsp) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq 96(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 120(%rsp), %rax + mulq 128(%rsp) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 112(%rsp), %rax + mulq 136(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 104(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq 96(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 120(%rsp), %rax + mulq 136(%rsp) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 112(%rsp), %rax + mulq 144(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 104(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 120(%rsp), %rax + mulq 144(%rsp) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 112(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 120(%rsp), %rax + mulq 152(%rsp) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, (%rsp) + movq %r9, 8(%rsp) + movq %r10, 16(%rsp) + movq %r11, 24(%rsp) + decb 168(%rsp) + jge L_curve25519_x64_bits + movq $63, 168(%rsp) + decb 160(%rsp) + jge L_curve25519_x64_words + # Invert + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + movq %rsp, %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $4, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_x64_inv_1: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_1 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_x64_inv_2: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_2 + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 128(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $19, 160(%rsp) + leaq 128(%rsp), %rdi + leaq 128(%rsp), %rsi +L_curve25519_x64_inv_3: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_3 + leaq 96(%rsp), %rdi + leaq 128(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_x64_inv_4: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_4 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_x64_inv_5: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_5 + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 128(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $0x63, 160(%rsp) + leaq 128(%rsp), %rdi + leaq 128(%rsp), %rsi +L_curve25519_x64_inv_6: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_6 + leaq 96(%rsp), %rdi + leaq 128(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_x64_inv_7: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_7 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $4, 160(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_curve25519_x64_inv_8: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_x64_inv_8 + movq %rsp, %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq 176(%rsp), %rdi + # Multiply + # A[0] * B[0] + movq (%rsp), %rax + mulq (%rdi) + movq %rax, %rcx + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rsp), %rax + mulq (%rdi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rsp), %rax + mulq 8(%rdi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rsp), %rax + mulq (%rdi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rsp), %rax + mulq 8(%rdi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rsp), %rax + mulq 16(%rdi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rsp), %rax + mulq (%rdi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rsp), %rax + mulq 8(%rdi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rsp), %rax + mulq 16(%rdi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rsp), %rax + mulq 24(%rdi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rsp), %rax + mulq 8(%rdi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rsp), %rax + mulq 16(%rdi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rsp), %rax + mulq 24(%rdi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rsp), %rax + mulq 16(%rdi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rsp), %rax + mulq 24(%rdi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rsp), %rax + mulq 24(%rdi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbp + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rbp, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %rcx + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbp, %r11 + addq %rax, %rcx + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %rcx, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + xorq %rax, %rax + addq $0xb8, %rsp + popq %rbp + popq %rbx + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size curve25519_x64,.-curve25519_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_pow22523_x64 +.type fe_pow22523_x64,@function +.align 4 +fe_pow22523_x64: +#else +.section __TEXT,__text +.globl _fe_pow22523_x64 +.p2align 2 +_fe_pow22523_x64: +#endif /* __APPLE__ */ + subq $0x78, %rsp + # pow22523 + movq %rdi, 96(%rsp) + movq %rsi, 104(%rsp) + movq %rsp, %rdi + movq 104(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq 104(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $4, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_x64_1: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_1 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_x64_2: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_2 + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $19, 112(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_pow22523_x64_3: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_3 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $9, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_x64_4: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_4 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_x64_5: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_5 + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $0x63, 112(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_pow22523_x64_6: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_6 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movb $49, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_x64_7: +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_x64_7 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_x64@plt +#else + callq _fe_sq_x64 +#endif /* __APPLE__ */ + movq 96(%rsp), %rdi + movq %rsp, %rsi + movq 104(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_x64@plt +#else + callq _fe_mul_x64 +#endif /* __APPLE__ */ + movq 104(%rsp), %rsi + movq 96(%rsp), %rdi + addq $0x78, %rsp + repz retq +#ifndef __APPLE__ +.text +.globl fe_ge_to_p2_x64 +.type fe_ge_to_p2_x64,@function +.align 4 +fe_ge_to_p2_x64: +#else +.section __TEXT,__text +.globl _fe_ge_to_p2_x64 +.p2align 2 +_fe_ge_to_p2_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $40, %rsp + movq %rsi, (%rsp) + movq %rdx, 8(%rsp) + movq %rcx, 16(%rsp) + movq %r8, 24(%rsp) + movq %r9, 32(%rsp) + movq 16(%rsp), %rsi + movq 88(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 24(%rsp), %rsi + movq 32(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 32(%rsp), %rsi + movq 88(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $40, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_to_p2_x64,.-fe_ge_to_p2_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_to_p3_x64 +.type fe_ge_to_p3_x64,@function +.align 4 +fe_ge_to_p3_x64: +#else +.section __TEXT,__text +.globl _fe_ge_to_p3_x64 +.p2align 2 +_fe_ge_to_p3_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $40, %rsp + movq %rsi, (%rsp) + movq %rdx, 8(%rsp) + movq %rcx, 16(%rsp) + movq %r8, 24(%rsp) + movq %r9, 32(%rsp) + movq 24(%rsp), %rsi + movq 96(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 32(%rsp), %rsi + movq 88(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 88(%rsp), %rsi + movq 96(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq 24(%rsp), %rsi + movq 32(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $40, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_to_p3_x64,.-fe_ge_to_p3_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_dbl_x64 +.type fe_ge_dbl_x64,@function +.align 4 +fe_ge_dbl_x64: +#else +.section __TEXT,__text +.globl _fe_ge_dbl_x64 +.p2align 2 +_fe_ge_dbl_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq (%rsp), %rdi + movq 32(%rsp), %rsi + # Square + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %r8 + movq %rdx, %rcx + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %rcx, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %rcx, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rcx, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq 40(%rsp), %rsi + # Square + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %r8 + movq %rdx, %rcx + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %rcx, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %rcx, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rcx, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 128(%rsp), %rsi + # Square * 2 + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %r8 + movq %rdx, %rcx + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %rcx, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %rcx, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rcx, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbx + xorq %rax, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $3, %r15, %rax + shldq $2, %r14, %r15 + shldq $2, %r13, %r14 + shldq $2, %r12, %r13 + shldq $2, %r11, %r12 + shldq $0x01, %r10, %r11 + shldq $0x01, %r9, %r10 + shldq $0x01, %r8, %r9 + shlq $0x01, %r8 + andq %rbx, %r11 + # Two out left, one in right + andq %rbx, %r15 + # Multiply top bits by 19*19 + imulq $0x169, %rax, %rcx + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %rcx, %r8 + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 32(%rsp), %rsi + movq 40(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rdi + movq 8(%rsp), %rsi + # Square + # A[0] * A[1] + movq (%rsi), %rax + mulq 8(%rsi) + movq %rax, %r9 + movq %rdx, %r10 + # A[0] * A[2] + movq (%rsi), %rax + mulq 16(%rsi) + xorq %r11, %r11 + addq %rax, %r10 + adcq %rdx, %r11 + # A[0] * A[3] + movq (%rsi), %rax + mulq 24(%rsi) + xorq %r12, %r12 + addq %rax, %r11 + adcq %rdx, %r12 + # A[1] * A[2] + movq 8(%rsi), %rax + mulq 16(%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * A[3] + movq 8(%rsi), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + # A[2] * A[3] + movq 16(%rsi), %rax + mulq 24(%rsi) + xorq %r14, %r14 + addq %rax, %r13 + adcq %rdx, %r14 + # Double + xorq %r15, %r15 + addq %r9, %r9 + adcq %r10, %r10 + adcq %r11, %r11 + adcq %r12, %r12 + adcq %r13, %r13 + adcq %r14, %r14 + adcq $0x00, %r15 + # A[0] * A[0] + movq (%rsi), %rax + mulq %rax + movq %rax, %r8 + movq %rdx, %rcx + # A[1] * A[1] + movq 8(%rsi), %rax + mulq %rax + addq %rcx, %r9 + adcq %rax, %r10 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[2] * A[2] + movq 16(%rsi), %rax + mulq %rax + addq %rcx, %r11 + adcq %rax, %r12 + adcq $0x00, %rdx + movq %rdx, %rcx + # A[3] * A[3] + movq 24(%rsi), %rax + mulq %rax + addq %rax, %r14 + adcq %rdx, %r15 + addq %rcx, %r13 + adcq $0x00, %r14 + adcq $0x00, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 16(%rsp), %rsi + movq (%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq 16(%rsp), %rsi + movq (%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + leaq 48(%rsp), %rsi + movq 8(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 24(%rsp), %rsi + movq 16(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_dbl_x64,.-fe_ge_dbl_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_madd_x64 +.type fe_ge_madd_x64,@function +.align 4 +fe_ge_madd_x64: +#else +.section __TEXT,__text +.globl _fe_ge_madd_x64 +.p2align 2 +_fe_ge_madd_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq (%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq (%rsp), %rsi + movq 152(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 8(%rsp), %rsi + movq 160(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 144(%rsp), %rsi + movq 136(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rdi + movq 128(%rsp), %rsi + movq 128(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_madd_x64,.-fe_ge_madd_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_msub_x64 +.type fe_ge_msub_x64,@function +.align 4 +fe_ge_msub_x64: +#else +.section __TEXT,__text +.globl _fe_ge_msub_x64 +.p2align 2 +_fe_ge_msub_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq (%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq (%rsp), %rsi + movq 160(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 8(%rsp), %rsi + movq 152(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 144(%rsp), %rsi + movq 136(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rdi + movq 128(%rsp), %rsi + movq 128(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_msub_x64,.-fe_ge_msub_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_add_x64 +.type fe_ge_add_x64,@function +.align 4 +fe_ge_add_x64: +#else +.section __TEXT,__text +.globl _fe_ge_add_x64 +.p2align 2 +_fe_ge_add_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq (%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq (%rsp), %rsi + movq 160(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 8(%rsp), %rsi + movq 168(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 152(%rsp), %rsi + movq 136(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 128(%rsp), %rsi + movq 144(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rdi + movq (%rsp), %rsi + movq (%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_add_x64,.-fe_ge_add_x64 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_sub_x64 +.type fe_ge_sub_x64,@function +.align 4 +fe_ge_sub_x64: +#else +.section __TEXT,__text +.globl _fe_ge_sub_x64 +.p2align 2 +_fe_ge_sub_x64: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq (%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 40(%rsp), %rsi + movq 32(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq (%rsp), %rsi + movq 168(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 8(%rsp), %rsi + movq 160(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + movq 152(%rsp), %rsi + movq 136(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 128(%rsp), %rsi + movq 144(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rax + mulq (%rsi) + movq %rax, %r8 + movq %rdx, %r9 + # A[0] * B[1] + movq 8(%rbx), %rax + mulq (%rsi) + xorq %r10, %r10 + addq %rax, %r9 + adcq %rdx, %r10 + # A[1] * B[0] + movq (%rbx), %rax + mulq 8(%rsi) + xorq %r11, %r11 + addq %rax, %r9 + adcq %rdx, %r10 + adcq $0x00, %r11 + # A[0] * B[2] + movq 16(%rbx), %rax + mulq (%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + # A[1] * B[1] + movq 8(%rbx), %rax + mulq 8(%rsi) + xorq %r12, %r12 + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[2] * B[0] + movq (%rbx), %rax + mulq 16(%rsi) + addq %rax, %r10 + adcq %rdx, %r11 + adcq $0x00, %r12 + # A[0] * B[3] + movq 24(%rbx), %rax + mulq (%rsi) + xorq %r13, %r13 + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[2] + movq 16(%rbx), %rax + mulq 8(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[2] * B[1] + movq 8(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[3] * B[0] + movq (%rbx), %rax + mulq 24(%rsi) + addq %rax, %r11 + adcq %rdx, %r12 + adcq $0x00, %r13 + # A[1] * B[3] + movq 24(%rbx), %rax + mulq 8(%rsi) + xorq %r14, %r14 + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[2] + movq 16(%rbx), %rax + mulq 16(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[3] * B[1] + movq 8(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r12 + adcq %rdx, %r13 + adcq $0x00, %r14 + # A[2] * B[3] + movq 24(%rbx), %rax + mulq 16(%rsi) + xorq %r15, %r15 + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[2] + movq 16(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r13 + adcq %rdx, %r14 + adcq $0x00, %r15 + # A[3] * B[3] + movq 24(%rbx), %rax + mulq 24(%rsi) + addq %rax, %r14 + adcq %rdx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rax + mulq %r12 + xorq %r12, %r12 + addq %rax, %r8 + movq $19, %rax + adcq %rdx, %r12 + mulq %r13 + xorq %r13, %r13 + addq %rax, %r9 + movq $19, %rax + adcq %rdx, %r13 + mulq %r14 + xorq %r14, %r14 + addq %rax, %r10 + movq $19, %rax + adcq %rdx, %r14 + mulq %r15 + # Add remaining produce results in + addq %r12, %r9 + adcq %r13, %r10 + adcq %r14, %r11 + adcq %rax, %r11 + adcq $0x00, %rdx + # Overflow + shldq $0x01, %r11, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rdi + movq (%rsp), %rsi + movq (%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 16(%rsp), %rsi + movq 8(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rbx), %r8 + movq $0x00, %rcx + sbbq 8(%rbx), %r9 + movq $-19, %rax + sbbq 16(%rbx), %r10 + movq $0x7fffffffffffffff, %rdx + sbbq 24(%rbx), %r11 + sbbq $0x00, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Add modulus (if underflow) + addq %rax, %r8 + adcq %rcx, %r9 + adcq %rcx, %r10 + adcq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rdi + leaq 48(%rsp), %rsi + movq 24(%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rcx + adcq 16(%rbx), %r10 + movq $-19, %rax + adcq 24(%rbx), %rcx + movq $0x7fffffffffffffff, %rdx + movq %rcx, %r11 + sarq $63, %rcx + # Mask the modulus + andq %rcx, %rax + andq %rcx, %rdx + # Sub modulus (if overflow) + subq %rax, %r8 + sbbq %rcx, %r9 + sbbq %rcx, %r10 + sbbq %rdx, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_sub_x64,.-fe_ge_sub_x64 +#endif /* __APPLE__ */ +#ifdef HAVE_INTEL_AVX2 +#ifndef __APPLE__ +.text +.globl fe_mul_avx2 +.type fe_mul_avx2,@function +.align 4 +fe_mul_avx2: +#else +.section __TEXT,__text +.globl _fe_mul_avx2 +.p2align 2 +_fe_mul_avx2: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbx + movq %rdx, %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rax, %rcx + xorq %r15, %r15 + adcxq %rax, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rcx, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rax, %rcx + adoxq %rax, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rax, %r14 + adoxq %rcx, %r10 + adcxq %rax, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rax, %rcx + adcxq %r14, %r12 + adoxq %rax, %r11 + adcxq %r15, %r13 + adoxq %rcx, %r12 + # A[0] * B[2] + mulxq (%rsi), %rax, %rcx + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rax, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rax + adcxq %rcx, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rax, %r11 + mulxq 24(%rsi), %rax, %rcx + adcxq %rax, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rax + adcxq %rcx, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rax, %r13 + mulxq 24(%rsi), %rax, %rcx + adoxq %r15, %r14 + adcxq %rax, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rax + adcxq %rcx, %r15 + xorq %rcx, %rcx + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rax, %r12 + mulxq 24(%rsi), %rdx, %rax + adoxq %rdx, %r11 + adoxq %rax, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rax + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rax, %r14 + mulxq 24(%rsi), %rax, %rdx + adcxq %rcx, %r15 + adoxq %rax, %r13 + adoxq %rdx, %r14 + adoxq %rcx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r12, %rax, %r12 + adcxq %rax, %r8 + adoxq %r12, %r9 + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + popq %rbx + popq %r15 + popq %r14 + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_mul_avx2,.-fe_mul_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq_avx2 +.type fe_sq_avx2,@function +.align 4 +fe_sq_avx2: +#else +.section __TEXT,__text +.globl _fe_sq_avx2 +.p2align 2 +_fe_sq_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + # Square + # A[0] * A[1] + movq (%rsi), %rdx + mulxq 8(%rsi), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rsi), %r11, %r12 + # A[2] * A[1] + movq 16(%rsi), %rdx + mulxq 8(%rsi), %rcx, %rbx + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rsi), %r13, %r14 + adoxq %rbx, %r12 + # A[2] * A[0] + mulxq (%rsi), %rcx, %rbx + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rsi), %rdx + mulxq 24(%rsi), %rax, %r8 + adcxq %rbx, %r11 + adcxq %rax, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rsi), %rdx + mulxq %rdx, %r8, %rax + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rsi), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r10, %r10 + adoxq %rax, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rsi), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r12, %r12 + adoxq %rbx, %r11 + adcxq %r13, %r13 + adoxq %rax, %r12 + # A[3] * A[3] + movq 24(%rsi), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rax, %r14 + adoxq %rbx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r12, %rax, %r12 + adcxq %rax, %r8 + adoxq %r12, %r9 + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_sq_avx2,.-fe_sq_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_mul121666_avx2 +.type fe_mul121666_avx2,@function +.align 4 +fe_mul121666_avx2: +#else +.section __TEXT,__text +.globl _fe_mul121666_avx2 +.p2align 2 +_fe_mul121666_avx2: +#endif /* __APPLE__ */ + pushq %r12 + pushq %r13 + movq $0x1db42, %rdx + mulxq (%rsi), %rax, %r13 + mulxq 8(%rsi), %rcx, %r12 + mulxq 16(%rsi), %r8, %r11 + mulxq 24(%rsi), %r9, %r10 + addq %r13, %rcx + adcq %r12, %r8 + adcq %r11, %r9 + adcq $0x00, %r10 + movq $0x7fffffffffffffff, %r13 + shldq $0x01, %r9, %r10 + andq %r13, %r9 + imulq $19, %r10, %r10 + addq %r10, %rax + adcq $0x00, %rcx + adcq $0x00, %r8 + adcq $0x00, %r9 + movq %rax, (%rdi) + movq %rcx, 8(%rdi) + movq %r8, 16(%rdi) + movq %r9, 24(%rdi) + popq %r13 + popq %r12 + repz retq +#ifndef __APPLE__ +.size fe_mul121666_avx2,.-fe_mul121666_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_sq2_avx2 +.type fe_sq2_avx2,@function +.align 4 +fe_sq2_avx2: +#else +.section __TEXT,__text +.globl _fe_sq2_avx2 +.p2align 2 +_fe_sq2_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + # Square * 2 + # A[0] * A[1] + movq (%rsi), %rdx + mulxq 8(%rsi), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rsi), %r11, %r12 + # A[2] * A[1] + movq 16(%rsi), %rdx + mulxq 8(%rsi), %rcx, %rbx + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rsi), %r13, %r14 + adoxq %rbx, %r12 + # A[2] * A[0] + mulxq (%rsi), %rcx, %rbx + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rsi), %rdx + mulxq 24(%rsi), %rax, %r8 + adcxq %rbx, %r11 + adcxq %rax, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rsi), %rdx + mulxq %rdx, %r8, %rax + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rsi), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r10, %r10 + adoxq %rax, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rsi), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r12, %r12 + adoxq %rbx, %r11 + adcxq %r13, %r13 + adoxq %rax, %r12 + # A[3] * A[3] + movq 24(%rsi), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rax, %r14 + adoxq %rbx, %r15 + # Reduce + movq $0x7fffffffffffffff, %rbx + xorq %rax, %rax + # Move top half into t4-t7 and remove top bit from t3 and double + shldq $3, %r15, %rax + shldq $2, %r14, %r15 + shldq $2, %r13, %r14 + shldq $2, %r12, %r13 + shldq $2, %r11, %r12 + shldq $0x01, %r10, %r11 + shldq $0x01, %r9, %r10 + shldq $0x01, %r8, %r9 + shlq $0x01, %r8 + andq %rbx, %r11 + # Two out left, one in right + andq %rbx, %r15 + # Multiply top bits by 19*19 + imulq $0x169, %rax, %rcx + xorq %rbx, %rbx + # Multiply top half by 19 + movq $19, %rdx + adoxq %rcx, %r8 + mulxq %r12, %rax, %r12 + adcxq %rax, %r8 + adoxq %r12, %r9 + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rbx, %r11 + addq %rax, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_sq2_avx2,.-fe_sq2_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_invert_avx2 +.type fe_invert_avx2,@function +.align 4 +fe_invert_avx2: +#else +.section __TEXT,__text +.globl _fe_invert_avx2 +.p2align 2 +_fe_invert_avx2: +#endif /* __APPLE__ */ + subq $0x98, %rsp + # Invert + movq %rdi, 128(%rsp) + movq %rsi, 136(%rsp) + movq %rsp, %rdi + movq 136(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq 136(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $4, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_avx2_1: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_1 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_avx2_2: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_2 + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $19, 144(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_fe_invert_avx2_3: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_3 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_avx2_4: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_4 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_avx2_5: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_5 + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $0x63, 144(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_fe_invert_avx2_6: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_6 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 144(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_invert_avx2_7: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_7 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $4, 144(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_invert_avx2_8: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 144(%rsp) + jnz L_fe_invert_avx2_8 + movq 128(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq 136(%rsp), %rsi + movq 128(%rsp), %rdi + addq $0x98, %rsp + repz retq +#ifndef __APPLE__ +.text +.globl curve25519_avx2 +.type curve25519_avx2,@function +.align 4 +curve25519_avx2: +#else +.section __TEXT,__text +.globl _curve25519_avx2 +.p2align 2 +_curve25519_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + pushq %rbp + movq %rdx, %r8 + subq $0xc0, %rsp + movq $0x00, 184(%rsp) + movq %rdi, 176(%rsp) + # Set one + movq $0x01, (%rdi) + movq $0x00, 8(%rdi) + movq $0x00, 16(%rdi) + movq $0x00, 24(%rdi) + # Set zero + movq $0x00, (%rsp) + movq $0x00, 8(%rsp) + movq $0x00, 16(%rsp) + movq $0x00, 24(%rsp) + # Set one + movq $0x01, 32(%rsp) + movq $0x00, 40(%rsp) + movq $0x00, 48(%rsp) + movq $0x00, 56(%rsp) + # Copy + movq (%r8), %r9 + movq 8(%r8), %r10 + movq 16(%r8), %r11 + movq 24(%r8), %r12 + movq %r9, 64(%rsp) + movq %r10, 72(%rsp) + movq %r11, 80(%rsp) + movq %r12, 88(%rsp) + movb $62, 168(%rsp) + movq $3, 160(%rsp) +L_curve25519_avx2_words: +L_curve25519_avx2_bits: + movq 184(%rsp), %rbx + movq 160(%rsp), %r9 + movb 168(%rsp), %cl + movq (%rsi,%r9,8), %rax + shrq %cl, %rax + andq $0x01, %rax + xorq %rax, %rbx + negq %rbx + # Conditional Swap + movq (%rdi), %r9 + movq 8(%rdi), %r10 + movq 16(%rdi), %r11 + movq 24(%rdi), %r12 + xorq 64(%rsp), %r9 + xorq 72(%rsp), %r10 + xorq 80(%rsp), %r11 + xorq 88(%rsp), %r12 + andq %rbx, %r9 + andq %rbx, %r10 + andq %rbx, %r11 + andq %rbx, %r12 + xorq %r9, (%rdi) + xorq %r10, 8(%rdi) + xorq %r11, 16(%rdi) + xorq %r12, 24(%rdi) + xorq %r9, 64(%rsp) + xorq %r10, 72(%rsp) + xorq %r11, 80(%rsp) + xorq %r12, 88(%rsp) + # Conditional Swap + movq (%rsp), %r9 + movq 8(%rsp), %r10 + movq 16(%rsp), %r11 + movq 24(%rsp), %r12 + xorq 32(%rsp), %r9 + xorq 40(%rsp), %r10 + xorq 48(%rsp), %r11 + xorq 56(%rsp), %r12 + andq %rbx, %r9 + andq %rbx, %r10 + andq %rbx, %r11 + andq %rbx, %r12 + xorq %r9, (%rsp) + xorq %r10, 8(%rsp) + xorq %r11, 16(%rsp) + xorq %r12, 24(%rsp) + xorq %r9, 32(%rsp) + xorq %r10, 40(%rsp) + xorq %r11, 48(%rsp) + xorq %r12, 56(%rsp) + movq %rax, 184(%rsp) + # Sub + movq 64(%rsp), %r9 + movq 72(%rsp), %r10 + movq 80(%rsp), %r11 + movq 88(%rsp), %r12 + subq 32(%rsp), %r9 + movq $0x00, %rax + sbbq 40(%rsp), %r10 + movq $-19, %rcx + sbbq 48(%rsp), %r11 + movq $0x7fffffffffffffff, %rbx + sbbq 56(%rsp), %r12 + sbbq $0x00, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Add modulus (if underflow) + addq %rcx, %r9 + adcq %rax, %r10 + adcq %rax, %r11 + adcq %rbx, %r12 + movq %r9, 96(%rsp) + movq %r10, 104(%rsp) + movq %r11, 112(%rsp) + movq %r12, 120(%rsp) + # Sub + movq (%rdi), %r9 + movq 8(%rdi), %r10 + movq 16(%rdi), %r11 + movq 24(%rdi), %r12 + subq (%rsp), %r9 + movq $0x00, %rax + sbbq 8(%rsp), %r10 + movq $-19, %rcx + sbbq 16(%rsp), %r11 + movq $0x7fffffffffffffff, %rbx + sbbq 24(%rsp), %r12 + sbbq $0x00, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Add modulus (if underflow) + addq %rcx, %r9 + adcq %rax, %r10 + adcq %rax, %r11 + adcq %rbx, %r12 + movq %r9, 128(%rsp) + movq %r10, 136(%rsp) + movq %r11, 144(%rsp) + movq %r12, 152(%rsp) + # Add + movq (%rdi), %r9 + movq 8(%rdi), %r10 + addq (%rsp), %r9 + movq 16(%rdi), %r11 + adcq 8(%rsp), %r10 + movq 24(%rdi), %rax + adcq 16(%rsp), %r11 + movq $-19, %rcx + adcq 24(%rsp), %rax + movq $0x7fffffffffffffff, %rbx + movq %rax, %r12 + sarq $63, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Sub modulus (if overflow) + subq %rcx, %r9 + sbbq %rax, %r10 + sbbq %rax, %r11 + sbbq %rbx, %r12 + movq %r9, (%rdi) + movq %r10, 8(%rdi) + movq %r11, 16(%rdi) + movq %r12, 24(%rdi) + # Add + movq 64(%rsp), %r9 + movq 72(%rsp), %r10 + addq 32(%rsp), %r9 + movq 80(%rsp), %r11 + adcq 40(%rsp), %r10 + movq 88(%rsp), %rax + adcq 48(%rsp), %r11 + movq $-19, %rcx + adcq 56(%rsp), %rax + movq $0x7fffffffffffffff, %rbx + movq %rax, %r12 + sarq $63, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Sub modulus (if overflow) + subq %rcx, %r9 + sbbq %rax, %r10 + sbbq %rax, %r11 + sbbq %rbx, %r12 + movq %r9, (%rsp) + movq %r10, 8(%rsp) + movq %r11, 16(%rsp) + movq %r12, 24(%rsp) + # Multiply + # A[0] * B[0] + movq (%rdi), %rdx + mulxq 96(%rsp), %r9, %r10 + # A[2] * B[0] + mulxq 112(%rsp), %r11, %r12 + # A[1] * B[0] + mulxq 104(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 24(%rdi), %rdx + mulxq 104(%rsp), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 8(%rdi), %rdx + mulxq 96(%rsp), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 112(%rsp), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 16(%rdi), %rdx + mulxq 104(%rsp), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq 96(%rsp), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 8(%rdi), %rdx + mulxq 104(%rsp), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 8(%rdi), %rdx + adoxq %rcx, %r12 + mulxq 120(%rsp), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 16(%rdi), %rdx + mulxq 112(%rsp), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 24(%rdi), %rdx + adoxq %rcx, %r14 + mulxq 120(%rsp), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq 96(%rsp), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq (%rdi), %rdx + adcxq %rcx, %r13 + mulxq 120(%rsp), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 24(%rdi), %rdx + mulxq 112(%rsp), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 16(%rdi), %rdx + adcxq %rcx, %r15 + mulxq 120(%rsp), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, 32(%rsp) + movq %r10, 40(%rsp) + movq %r11, 48(%rsp) + movq %r12, 56(%rsp) + # Multiply + # A[0] * B[0] + movq 128(%rsp), %rdx + mulxq (%rsp), %r9, %r10 + # A[2] * B[0] + mulxq 16(%rsp), %r11, %r12 + # A[1] * B[0] + mulxq 8(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 152(%rsp), %rdx + mulxq 8(%rsp), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 136(%rsp), %rdx + mulxq (%rsp), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 16(%rsp), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 144(%rsp), %rdx + mulxq 8(%rsp), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq (%rsp), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 136(%rsp), %rdx + mulxq 8(%rsp), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 136(%rsp), %rdx + adoxq %rcx, %r12 + mulxq 24(%rsp), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 144(%rsp), %rdx + mulxq 16(%rsp), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 152(%rsp), %rdx + adoxq %rcx, %r14 + mulxq 24(%rsp), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq (%rsp), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq 128(%rsp), %rdx + adcxq %rcx, %r13 + mulxq 24(%rsp), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 152(%rsp), %rdx + mulxq 16(%rsp), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 144(%rsp), %rdx + adcxq %rcx, %r15 + mulxq 24(%rsp), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, (%rsp) + movq %r10, 8(%rsp) + movq %r11, 16(%rsp) + movq %r12, 24(%rsp) + # Square + # A[0] * A[1] + movq 128(%rsp), %rdx + mulxq 136(%rsp), %r10, %r11 + # A[0] * A[3] + mulxq 152(%rsp), %r12, %r13 + # A[2] * A[1] + movq 144(%rsp), %rdx + mulxq 136(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adoxq %rcx, %r12 + # A[2] * A[3] + mulxq 152(%rsp), %r14, %r15 + adoxq %rbx, %r13 + # A[2] * A[0] + mulxq 128(%rsp), %rcx, %rbx + adoxq %rbp, %r14 + adcxq %rcx, %r11 + adoxq %rbp, %r15 + # A[1] * A[3] + movq 136(%rsp), %rdx + mulxq 152(%rsp), %rax, %r9 + adcxq %rbx, %r12 + adcxq %rax, %r13 + adcxq %r9, %r14 + adcxq %rbp, %r15 + # Double with Carry Flag + xorq %rbp, %rbp + # A[0] * A[0] + movq 128(%rsp), %rdx + mulxq %rdx, %r9, %rax + adcxq %r10, %r10 + # A[1] * A[1] + movq 136(%rsp), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r11, %r11 + adoxq %rax, %r10 + adcxq %r12, %r12 + adoxq %rcx, %r11 + # A[2] * A[2] + movq 144(%rsp), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r13, %r13 + adoxq %rbx, %r12 + adcxq %r14, %r14 + adoxq %rax, %r13 + # A[3] * A[3] + movq 152(%rsp), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r15, %r15 + adoxq %rcx, %r14 + adcxq %rbp, %rbp + adoxq %rax, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rcx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %rax, %r15 + adcxq %rax, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, 96(%rsp) + movq %r10, 104(%rsp) + movq %r11, 112(%rsp) + movq %r12, 120(%rsp) + # Square + # A[0] * A[1] + movq (%rdi), %rdx + mulxq 8(%rdi), %r10, %r11 + # A[0] * A[3] + mulxq 24(%rdi), %r12, %r13 + # A[2] * A[1] + movq 16(%rdi), %rdx + mulxq 8(%rdi), %rcx, %rbx + xorq %rbp, %rbp + adoxq %rcx, %r12 + # A[2] * A[3] + mulxq 24(%rdi), %r14, %r15 + adoxq %rbx, %r13 + # A[2] * A[0] + mulxq (%rdi), %rcx, %rbx + adoxq %rbp, %r14 + adcxq %rcx, %r11 + adoxq %rbp, %r15 + # A[1] * A[3] + movq 8(%rdi), %rdx + mulxq 24(%rdi), %rax, %r9 + adcxq %rbx, %r12 + adcxq %rax, %r13 + adcxq %r9, %r14 + adcxq %rbp, %r15 + # Double with Carry Flag + xorq %rbp, %rbp + # A[0] * A[0] + movq (%rdi), %rdx + mulxq %rdx, %r9, %rax + adcxq %r10, %r10 + # A[1] * A[1] + movq 8(%rdi), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r11, %r11 + adoxq %rax, %r10 + adcxq %r12, %r12 + adoxq %rcx, %r11 + # A[2] * A[2] + movq 16(%rdi), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r13, %r13 + adoxq %rbx, %r12 + adcxq %r14, %r14 + adoxq %rax, %r13 + # A[3] * A[3] + movq 24(%rdi), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r15, %r15 + adoxq %rcx, %r14 + adcxq %rbp, %rbp + adoxq %rax, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rcx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %rax, %r15 + adcxq %rax, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, 128(%rsp) + movq %r10, 136(%rsp) + movq %r11, 144(%rsp) + movq %r12, 152(%rsp) + # Add + movq 32(%rsp), %r9 + movq 40(%rsp), %r10 + addq (%rsp), %r9 + movq 48(%rsp), %r11 + adcq 8(%rsp), %r10 + movq 56(%rsp), %rax + adcq 16(%rsp), %r11 + movq $-19, %rcx + adcq 24(%rsp), %rax + movq $0x7fffffffffffffff, %rbx + movq %rax, %r12 + sarq $63, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Sub modulus (if overflow) + subq %rcx, %r9 + sbbq %rax, %r10 + sbbq %rax, %r11 + sbbq %rbx, %r12 + movq %r9, 64(%rsp) + movq %r10, 72(%rsp) + movq %r11, 80(%rsp) + movq %r12, 88(%rsp) + # Sub + movq 32(%rsp), %r9 + movq 40(%rsp), %r10 + movq 48(%rsp), %r11 + movq 56(%rsp), %r12 + subq (%rsp), %r9 + movq $0x00, %rax + sbbq 8(%rsp), %r10 + movq $-19, %rcx + sbbq 16(%rsp), %r11 + movq $0x7fffffffffffffff, %rbx + sbbq 24(%rsp), %r12 + sbbq $0x00, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Add modulus (if underflow) + addq %rcx, %r9 + adcq %rax, %r10 + adcq %rax, %r11 + adcq %rbx, %r12 + movq %r9, (%rsp) + movq %r10, 8(%rsp) + movq %r11, 16(%rsp) + movq %r12, 24(%rsp) + # Multiply + # A[0] * B[0] + movq 96(%rsp), %rdx + mulxq 128(%rsp), %r9, %r10 + # A[2] * B[0] + mulxq 144(%rsp), %r11, %r12 + # A[1] * B[0] + mulxq 136(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 120(%rsp), %rdx + mulxq 136(%rsp), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 104(%rsp), %rdx + mulxq 128(%rsp), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 144(%rsp), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 112(%rsp), %rdx + mulxq 136(%rsp), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq 128(%rsp), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 104(%rsp), %rdx + mulxq 136(%rsp), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 104(%rsp), %rdx + adoxq %rcx, %r12 + mulxq 152(%rsp), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 112(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 120(%rsp), %rdx + adoxq %rcx, %r14 + mulxq 152(%rsp), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq 128(%rsp), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq 96(%rsp), %rdx + adcxq %rcx, %r13 + mulxq 152(%rsp), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 120(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 112(%rsp), %rdx + adcxq %rcx, %r15 + mulxq 152(%rsp), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, (%rdi) + movq %r10, 8(%rdi) + movq %r11, 16(%rdi) + movq %r12, 24(%rdi) + # Sub + movq 128(%rsp), %r9 + movq 136(%rsp), %r10 + movq 144(%rsp), %r11 + movq 152(%rsp), %r12 + subq 96(%rsp), %r9 + movq $0x00, %rax + sbbq 104(%rsp), %r10 + movq $-19, %rcx + sbbq 112(%rsp), %r11 + movq $0x7fffffffffffffff, %rbx + sbbq 120(%rsp), %r12 + sbbq $0x00, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Add modulus (if underflow) + addq %rcx, %r9 + adcq %rax, %r10 + adcq %rax, %r11 + adcq %rbx, %r12 + movq %r9, 128(%rsp) + movq %r10, 136(%rsp) + movq %r11, 144(%rsp) + movq %r12, 152(%rsp) + # Square + # A[0] * A[1] + movq (%rsp), %rdx + mulxq 8(%rsp), %r10, %r11 + # A[0] * A[3] + mulxq 24(%rsp), %r12, %r13 + # A[2] * A[1] + movq 16(%rsp), %rdx + mulxq 8(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adoxq %rcx, %r12 + # A[2] * A[3] + mulxq 24(%rsp), %r14, %r15 + adoxq %rbx, %r13 + # A[2] * A[0] + mulxq (%rsp), %rcx, %rbx + adoxq %rbp, %r14 + adcxq %rcx, %r11 + adoxq %rbp, %r15 + # A[1] * A[3] + movq 8(%rsp), %rdx + mulxq 24(%rsp), %rax, %r9 + adcxq %rbx, %r12 + adcxq %rax, %r13 + adcxq %r9, %r14 + adcxq %rbp, %r15 + # Double with Carry Flag + xorq %rbp, %rbp + # A[0] * A[0] + movq (%rsp), %rdx + mulxq %rdx, %r9, %rax + adcxq %r10, %r10 + # A[1] * A[1] + movq 8(%rsp), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r11, %r11 + adoxq %rax, %r10 + adcxq %r12, %r12 + adoxq %rcx, %r11 + # A[2] * A[2] + movq 16(%rsp), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r13, %r13 + adoxq %rbx, %r12 + adcxq %r14, %r14 + adoxq %rax, %r13 + # A[3] * A[3] + movq 24(%rsp), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r15, %r15 + adoxq %rcx, %r14 + adcxq %rbp, %rbp + adoxq %rax, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rcx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %rax, %r15 + adcxq %rax, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, (%rsp) + movq %r10, 8(%rsp) + movq %r11, 16(%rsp) + movq %r12, 24(%rsp) + movq $0x1db42, %rdx + mulxq 128(%rsp), %r9, %rbp + mulxq 136(%rsp), %r10, %r15 + mulxq 144(%rsp), %r11, %r14 + mulxq 152(%rsp), %r12, %r13 + addq %rbp, %r10 + adcq %r15, %r11 + adcq %r14, %r12 + adcq $0x00, %r13 + movq $0x7fffffffffffffff, %rbp + shldq $0x01, %r12, %r13 + andq %rbp, %r12 + imulq $19, %r13, %r13 + addq %r13, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + movq %r9, 32(%rsp) + movq %r10, 40(%rsp) + movq %r11, 48(%rsp) + movq %r12, 56(%rsp) + # Square + # A[0] * A[1] + movq 64(%rsp), %rdx + mulxq 72(%rsp), %r10, %r11 + # A[0] * A[3] + mulxq 88(%rsp), %r12, %r13 + # A[2] * A[1] + movq 80(%rsp), %rdx + mulxq 72(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adoxq %rcx, %r12 + # A[2] * A[3] + mulxq 88(%rsp), %r14, %r15 + adoxq %rbx, %r13 + # A[2] * A[0] + mulxq 64(%rsp), %rcx, %rbx + adoxq %rbp, %r14 + adcxq %rcx, %r11 + adoxq %rbp, %r15 + # A[1] * A[3] + movq 72(%rsp), %rdx + mulxq 88(%rsp), %rax, %r9 + adcxq %rbx, %r12 + adcxq %rax, %r13 + adcxq %r9, %r14 + adcxq %rbp, %r15 + # Double with Carry Flag + xorq %rbp, %rbp + # A[0] * A[0] + movq 64(%rsp), %rdx + mulxq %rdx, %r9, %rax + adcxq %r10, %r10 + # A[1] * A[1] + movq 72(%rsp), %rdx + mulxq %rdx, %rcx, %rbx + adcxq %r11, %r11 + adoxq %rax, %r10 + adcxq %r12, %r12 + adoxq %rcx, %r11 + # A[2] * A[2] + movq 80(%rsp), %rdx + mulxq %rdx, %rax, %rcx + adcxq %r13, %r13 + adoxq %rbx, %r12 + adcxq %r14, %r14 + adoxq %rax, %r13 + # A[3] * A[3] + movq 88(%rsp), %rdx + mulxq %rdx, %rax, %rbx + adcxq %r15, %r15 + adoxq %rcx, %r14 + adcxq %rbp, %rbp + adoxq %rax, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rcx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r13, %rax, %r13 + adcxq %rax, %r9 + adoxq %r13, %r10 + mulxq %r14, %rax, %r14 + adcxq %rax, %r10 + adoxq %r14, %r11 + mulxq %r15, %rax, %r15 + adcxq %rax, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rax + andq %rcx, %r12 + addq %rax, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, 64(%rsp) + movq %r10, 72(%rsp) + movq %r11, 80(%rsp) + movq %r12, 88(%rsp) + # Add + movq 96(%rsp), %r9 + movq 104(%rsp), %r10 + addq 32(%rsp), %r9 + movq 112(%rsp), %r11 + adcq 40(%rsp), %r10 + movq 120(%rsp), %rax + adcq 48(%rsp), %r11 + movq $-19, %rcx + adcq 56(%rsp), %rax + movq $0x7fffffffffffffff, %rbx + movq %rax, %r12 + sarq $63, %rax + # Mask the modulus + andq %rax, %rcx + andq %rax, %rbx + # Sub modulus (if overflow) + subq %rcx, %r9 + sbbq %rax, %r10 + sbbq %rax, %r11 + sbbq %rbx, %r12 + movq %r9, 96(%rsp) + movq %r10, 104(%rsp) + movq %r11, 112(%rsp) + movq %r12, 120(%rsp) + # Multiply + # A[0] * B[0] + movq (%rsp), %rdx + mulxq (%r8), %r9, %r10 + # A[2] * B[0] + mulxq 16(%r8), %r11, %r12 + # A[1] * B[0] + mulxq 8(%r8), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 24(%rsp), %rdx + mulxq 8(%r8), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 8(%rsp), %rdx + mulxq (%r8), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 16(%r8), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 16(%rsp), %rdx + mulxq 8(%r8), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq (%r8), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 8(%rsp), %rdx + mulxq 8(%r8), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 8(%rsp), %rdx + adoxq %rcx, %r12 + mulxq 24(%r8), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 16(%rsp), %rdx + mulxq 16(%r8), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 24(%rsp), %rdx + adoxq %rcx, %r14 + mulxq 24(%r8), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq (%r8), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq (%rsp), %rdx + adcxq %rcx, %r13 + mulxq 24(%r8), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 24(%rsp), %rdx + mulxq 16(%r8), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 16(%rsp), %rdx + adcxq %rcx, %r15 + mulxq 24(%r8), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, 32(%rsp) + movq %r10, 40(%rsp) + movq %r11, 48(%rsp) + movq %r12, 56(%rsp) + # Multiply + # A[0] * B[0] + movq 96(%rsp), %rdx + mulxq 128(%rsp), %r9, %r10 + # A[2] * B[0] + mulxq 144(%rsp), %r11, %r12 + # A[1] * B[0] + mulxq 136(%rsp), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 120(%rsp), %rdx + mulxq 136(%rsp), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 104(%rsp), %rdx + mulxq 128(%rsp), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 144(%rsp), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 112(%rsp), %rdx + mulxq 136(%rsp), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq 128(%rsp), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 104(%rsp), %rdx + mulxq 136(%rsp), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 104(%rsp), %rdx + adoxq %rcx, %r12 + mulxq 152(%rsp), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 112(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 120(%rsp), %rdx + adoxq %rcx, %r14 + mulxq 152(%rsp), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq 128(%rsp), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq 96(%rsp), %rdx + adcxq %rcx, %r13 + mulxq 152(%rsp), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 120(%rsp), %rdx + mulxq 144(%rsp), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 112(%rsp), %rdx + adcxq %rcx, %r15 + mulxq 152(%rsp), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, (%rsp) + movq %r10, 8(%rsp) + movq %r11, 16(%rsp) + movq %r12, 24(%rsp) + decb 168(%rsp) + jge L_curve25519_avx2_bits + movq $63, 168(%rsp) + decb 160(%rsp) + jge L_curve25519_avx2_words + # Invert + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + movq %rsp, %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $4, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_avx2_inv_1: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_1 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_avx2_inv_2: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_2 + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 128(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $19, 160(%rsp) + leaq 128(%rsp), %rdi + leaq 128(%rsp), %rsi +L_curve25519_avx2_inv_3: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_3 + leaq 96(%rsp), %rdi + leaq 128(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_avx2_inv_4: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_4 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_avx2_inv_5: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_5 + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 128(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $0x63, 160(%rsp) + leaq 128(%rsp), %rdi + leaq 128(%rsp), %rsi +L_curve25519_avx2_inv_6: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_6 + leaq 96(%rsp), %rdi + leaq 128(%rsp), %rsi + leaq 96(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 160(%rsp) + leaq 96(%rsp), %rdi + leaq 96(%rsp), %rsi +L_curve25519_avx2_inv_7: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_7 + leaq 64(%rsp), %rdi + leaq 96(%rsp), %rsi + leaq 64(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $4, 160(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_curve25519_avx2_inv_8: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 160(%rsp) + jnz L_curve25519_avx2_inv_8 + movq %rsp, %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq 176(%rsp), %rdi + # Multiply + # A[0] * B[0] + movq (%rsp), %rdx + mulxq (%rdi), %r9, %r10 + # A[2] * B[0] + mulxq 16(%rdi), %r11, %r12 + # A[1] * B[0] + mulxq 8(%rdi), %rcx, %rbx + xorq %rbp, %rbp + adcxq %rcx, %r10 + # A[1] * B[3] + movq 24(%rsp), %rdx + mulxq 8(%rdi), %r13, %r14 + adcxq %rbx, %r11 + # A[0] * B[1] + movq 8(%rsp), %rdx + mulxq (%rdi), %rcx, %rbx + adoxq %rcx, %r10 + # A[2] * B[1] + mulxq 16(%rdi), %rcx, %r15 + adoxq %rbx, %r11 + adcxq %rcx, %r12 + # A[1] * B[2] + movq 16(%rsp), %rdx + mulxq 8(%rdi), %rcx, %rbx + adcxq %r15, %r13 + adoxq %rcx, %r12 + adcxq %rbp, %r14 + adoxq %rbx, %r13 + # A[0] * B[2] + mulxq (%rdi), %rcx, %rbx + adoxq %rbp, %r14 + xorq %r15, %r15 + adcxq %rcx, %r11 + # A[1] * B[1] + movq 8(%rsp), %rdx + mulxq 8(%rdi), %rdx, %rcx + adcxq %rbx, %r12 + adoxq %rdx, %r11 + # A[3] * B[1] + movq 8(%rsp), %rdx + adoxq %rcx, %r12 + mulxq 24(%rdi), %rcx, %rbx + adcxq %rcx, %r13 + # A[2] * B[2] + movq 16(%rsp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rbx, %r14 + adoxq %rdx, %r13 + # A[3] * B[3] + movq 24(%rsp), %rdx + adoxq %rcx, %r14 + mulxq 24(%rdi), %rcx, %rbx + adoxq %rbp, %r15 + adcxq %rcx, %r15 + # A[0] * B[3] + mulxq (%rdi), %rdx, %rcx + adcxq %rbx, %rbp + xorq %rbx, %rbx + adcxq %rdx, %r12 + # A[3] * B[0] + movq (%rsp), %rdx + adcxq %rcx, %r13 + mulxq 24(%rdi), %rdx, %rcx + adoxq %rdx, %r12 + adoxq %rcx, %r13 + # A[2] * B[3] + movq 24(%rsp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rdx, %r14 + # A[3] * B[2] + movq 16(%rsp), %rdx + adcxq %rcx, %r15 + mulxq 24(%rdi), %rcx, %rdx + adcxq %rbx, %rbp + adoxq %rcx, %r14 + adoxq %rdx, %r15 + adoxq %rbx, %rbp + # Reduce + movq $0x7fffffffffffffff, %rbx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r15, %rbp + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + andq %rbx, %r12 + # Multiply top half by 19 + movq $19, %rdx + xorq %rbx, %rbx + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %rcx, %r15 + adcxq %rcx, %r11 + adoxq %r15, %r12 + mulxq %rbp, %rbp, %rdx + adcxq %rbp, %r12 + adoxq %rbx, %rdx + adcxq %rbx, %rdx + # Overflow + shldq $0x01, %r12, %rdx + movq $0x7fffffffffffffff, %rbx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Reduce if top bit set + movq %r12, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rbx, %r12 + addq %rcx, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + adcq $0x00, %r12 + # Store + movq %r9, (%rdi) + movq %r10, 8(%rdi) + movq %r11, 16(%rdi) + movq %r12, 24(%rdi) + xorq %rax, %rax + addq $0xc0, %rsp + popq %rbp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size curve25519_avx2,.-curve25519_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_pow22523_avx2 +.type fe_pow22523_avx2,@function +.align 4 +fe_pow22523_avx2: +#else +.section __TEXT,__text +.globl _fe_pow22523_avx2 +.p2align 2 +_fe_pow22523_avx2: +#endif /* __APPLE__ */ + subq $0x78, %rsp + # pow22523 + movq %rdi, 96(%rsp) + movq %rsi, 104(%rsp) + movq %rsp, %rdi + movq 104(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq 104(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $4, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_avx2_1: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_1 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_avx2_2: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_2 + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $19, 112(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_pow22523_avx2_3: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_3 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $9, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_avx2_4: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_4 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_avx2_5: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_5 + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 64(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $0x63, 112(%rsp) + leaq 64(%rsp), %rdi + leaq 64(%rsp), %rsi +L_fe_pow22523_avx2_6: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_6 + leaq 32(%rsp), %rdi + leaq 64(%rsp), %rsi + leaq 32(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movb $49, 112(%rsp) + leaq 32(%rsp), %rdi + leaq 32(%rsp), %rsi +L_fe_pow22523_avx2_7: +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + decb 112(%rsp) + jnz L_fe_pow22523_avx2_7 + movq %rsp, %rdi + leaq 32(%rsp), %rsi + movq %rsp, %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movq %rsp, %rdi + movq %rsp, %rsi +#ifndef __APPLE__ + callq fe_sq_avx2@plt +#else + callq _fe_sq_avx2 +#endif /* __APPLE__ */ + movq 96(%rsp), %rdi + movq %rsp, %rsi + movq 104(%rsp), %rdx +#ifndef __APPLE__ + callq fe_mul_avx2@plt +#else + callq _fe_mul_avx2 +#endif /* __APPLE__ */ + movq 104(%rsp), %rsi + movq 96(%rsp), %rdi + addq $0x78, %rsp + repz retq +#ifndef __APPLE__ +.text +.globl fe_ge_to_p2_avx2 +.type fe_ge_to_p2_avx2,@function +.align 4 +fe_ge_to_p2_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_to_p2_avx2 +.p2align 2 +_fe_ge_to_p2_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $40, %rsp + movq %rsi, (%rsp) + movq %rdx, 8(%rsp) + movq %rcx, 16(%rsp) + movq %r8, 24(%rsp) + movq %r9, 32(%rsp) + movq 16(%rsp), %rsi + movq 88(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 24(%rsp), %rsi + movq 32(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 88(%rsp), %rsi + # Multiply + # A[0] * B[0] + movq (%rsi), %rdx + mulxq (%rbx), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rbx), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rbx), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rsi), %rdx + mulxq 8(%rbx), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rsi), %rdx + mulxq (%rbx), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rbx), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rsi), %rdx + mulxq 8(%rbx), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rbx), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rsi), %rdx + mulxq 8(%rbx), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rsi), %rdx + adoxq %rcx, %r11 + mulxq 24(%rbx), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rsi), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rsi), %rdx + adoxq %rcx, %r13 + mulxq 24(%rbx), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rbx), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rsi), %rdx + adcxq %rcx, %r12 + mulxq 24(%rbx), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rsi), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rsi), %rdx + adcxq %rcx, %r14 + mulxq 24(%rbx), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $40, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_to_p2_avx2,.-fe_ge_to_p2_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_to_p3_avx2 +.type fe_ge_to_p3_avx2,@function +.align 4 +fe_ge_to_p3_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_to_p3_avx2 +.p2align 2 +_fe_ge_to_p3_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $40, %rsp + movq %rsi, (%rsp) + movq %rdx, 8(%rsp) + movq %rcx, 16(%rsp) + movq %r8, 24(%rsp) + movq %r9, 32(%rsp) + movq 24(%rsp), %rsi + movq 96(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq (%rsp), %rdi + movq 32(%rsp), %rsi + movq 88(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq 96(%rsp), %rsi + # Multiply + # A[0] * B[0] + movq (%rsi), %rdx + mulxq (%rbx), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rbx), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rbx), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rsi), %rdx + mulxq 8(%rbx), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rsi), %rdx + mulxq (%rbx), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rbx), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rsi), %rdx + mulxq 8(%rbx), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rbx), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rsi), %rdx + mulxq 8(%rbx), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rsi), %rdx + adoxq %rcx, %r11 + mulxq 24(%rbx), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rsi), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rsi), %rdx + adoxq %rcx, %r13 + mulxq 24(%rbx), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rbx), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rsi), %rdx + adcxq %rcx, %r12 + mulxq 24(%rbx), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rsi), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rsi), %rdx + adcxq %rcx, %r14 + mulxq 24(%rbx), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq 24(%rsp), %rsi + movq 32(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + addq $40, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_to_p3_avx2,.-fe_ge_to_p3_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_dbl_avx2 +.type fe_ge_dbl_avx2,@function +.align 4 +fe_ge_dbl_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_dbl_avx2 +.p2align 2 +_fe_ge_dbl_avx2: +#endif /* __APPLE__ */ + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $48, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq 32(%rsp), %rsi + # Square + # A[0] * A[1] + movq (%rsi), %rdx + mulxq 8(%rsi), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rsi), %r11, %r12 + # A[2] * A[1] + movq 16(%rsi), %rdx + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rsi), %r13, %r14 + adoxq %rax, %r12 + # A[2] * A[0] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rsi), %rdx + mulxq 24(%rsi), %rbp, %r8 + adcxq %rax, %r11 + adcxq %rbp, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rsi), %rdx + mulxq %rdx, %r8, %rbp + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rsi), %rdx + mulxq %rdx, %rcx, %rax + adcxq %r10, %r10 + adoxq %rbp, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rsi), %rdx + mulxq %rdx, %rbp, %rcx + adcxq %r12, %r12 + adoxq %rax, %r11 + adcxq %r13, %r13 + adoxq %rbp, %r12 + # A[3] * A[3] + movq 24(%rsi), %rdx + mulxq %rdx, %rbp, %rax + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rbp, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r12, %rbp, %r12 + adcxq %rbp, %r8 + adoxq %r12, %r9 + mulxq %r13, %rbp, %r13 + adcxq %rbp, %r9 + adoxq %r13, %r10 + mulxq %r14, %rbp, %r14 + adcxq %rbp, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 16(%rsp), %rdi + movq 40(%rsp), %rbx + # Square + # A[0] * A[1] + movq (%rbx), %rdx + mulxq 8(%rbx), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rbx), %r11, %r12 + # A[2] * A[1] + movq 16(%rbx), %rdx + mulxq 8(%rbx), %rcx, %rax + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rbx), %r13, %r14 + adoxq %rax, %r12 + # A[2] * A[0] + mulxq (%rbx), %rcx, %rax + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rbx), %rdx + mulxq 24(%rbx), %rbp, %r8 + adcxq %rax, %r11 + adcxq %rbp, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rbx), %rdx + mulxq %rdx, %r8, %rbp + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rbx), %rdx + mulxq %rdx, %rcx, %rax + adcxq %r10, %r10 + adoxq %rbp, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rbx), %rdx + mulxq %rdx, %rbp, %rcx + adcxq %r12, %r12 + adoxq %rax, %r11 + adcxq %r13, %r13 + adoxq %rbp, %r12 + # A[3] * A[3] + movq 24(%rbx), %rdx + mulxq %rdx, %rbp, %rax + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rbp, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r12, %rbp, %r12 + adcxq %rbp, %r8 + adoxq %r12, %r9 + mulxq %r13, %rbp, %r13 + adcxq %rbp, %r9 + adoxq %r13, %r10 + mulxq %r14, %rbp, %r14 + adcxq %rbp, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq (%rbx), %r8 + movq 16(%rsi), %r10 + adcq 8(%rbx), %r9 + movq 24(%rsi), %rdx + adcq 16(%rbx), %r10 + movq $-19, %rcx + adcq 24(%rbx), %rdx + movq $0x7fffffffffffffff, %rax + movq %rdx, %r11 + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 24(%rsp), %rsi + # Square + # A[0] * A[1] + movq (%rdi), %rdx + mulxq 8(%rdi), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rdi), %r11, %r12 + # A[2] * A[1] + movq 16(%rdi), %rdx + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rdi), %r13, %r14 + adoxq %rax, %r12 + # A[2] * A[0] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rdi), %rdx + mulxq 24(%rdi), %rbp, %r8 + adcxq %rax, %r11 + adcxq %rbp, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rdi), %rdx + mulxq %rdx, %r8, %rbp + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rdi), %rdx + mulxq %rdx, %rcx, %rax + adcxq %r10, %r10 + adoxq %rbp, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rdi), %rdx + mulxq %rdx, %rbp, %rcx + adcxq %r12, %r12 + adoxq %rax, %r11 + adcxq %r13, %r13 + adoxq %rbp, %r12 + # A[3] * A[3] + movq 24(%rdi), %rdx + mulxq %rdx, %rbp, %rax + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rbp, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rcx + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rcx, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rcx, %rcx + mulxq %r12, %rbp, %r12 + adcxq %rbp, %r8 + adoxq %r12, %r9 + mulxq %r13, %rbp, %r13 + adcxq %rbp, %r9 + adoxq %r13, %r10 + mulxq %r14, %rbp, %r14 + adcxq %rbp, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rcx, %rdx + adcxq %rcx, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rcx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rbp + andq %rcx, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 16(%rsp), %rsi + movq (%rsp), %rbx + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %rdx + movq %r8, %r12 + addq (%rbx), %r8 + movq %r9, %r13 + adcq 8(%rbx), %r9 + movq %r10, %r14 + adcq 16(%rbx), %r10 + movq %rdx, %r15 + adcq 24(%rbx), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbx), %r12 + movq $0x00, %rdx + sbbq 8(%rbx), %r13 + movq $-19, %rcx + sbbq 16(%rbx), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbx), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 24(%rsp), %rsi + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rdi), %r8 + movq $0x00, %rdx + sbbq 8(%rdi), %r9 + movq $-19, %rcx + sbbq 16(%rdi), %r10 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r11 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r8 + adcq %rdx, %r9 + adcq %rdx, %r10 + adcq %rax, %r11 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 104(%rsp), %rdi + # Square * 2 + # A[0] * A[1] + movq (%rdi), %rdx + mulxq 8(%rdi), %r9, %r10 + # A[0] * A[3] + mulxq 24(%rdi), %r11, %r12 + # A[2] * A[1] + movq 16(%rdi), %rdx + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adoxq %rcx, %r11 + # A[2] * A[3] + mulxq 24(%rdi), %r13, %r14 + adoxq %rax, %r12 + # A[2] * A[0] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + adcxq %rcx, %r10 + adoxq %r15, %r14 + # A[1] * A[3] + movq 8(%rdi), %rdx + mulxq 24(%rdi), %rbp, %r8 + adcxq %rax, %r11 + adcxq %rbp, %r12 + adcxq %r8, %r13 + adcxq %r15, %r14 + # Double with Carry Flag + xorq %r15, %r15 + # A[0] * A[0] + movq (%rdi), %rdx + mulxq %rdx, %r8, %rbp + adcxq %r9, %r9 + # A[1] * A[1] + movq 8(%rdi), %rdx + mulxq %rdx, %rcx, %rax + adcxq %r10, %r10 + adoxq %rbp, %r9 + adcxq %r11, %r11 + adoxq %rcx, %r10 + # A[2] * A[2] + movq 16(%rdi), %rdx + mulxq %rdx, %rbp, %rcx + adcxq %r12, %r12 + adoxq %rax, %r11 + adcxq %r13, %r13 + adoxq %rbp, %r12 + # A[3] * A[3] + movq 24(%rdi), %rdx + mulxq %rdx, %rbp, %rax + adcxq %r14, %r14 + adoxq %rcx, %r13 + adcxq %r15, %r15 + adoxq %rbp, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + xorq %rbp, %rbp + # Move top half into t4-t7 and remove top bit from t3 and double + shldq $3, %r15, %rbp + shldq $2, %r14, %r15 + shldq $2, %r13, %r14 + shldq $2, %r12, %r13 + shldq $2, %r11, %r12 + shldq $0x01, %r10, %r11 + shldq $0x01, %r9, %r10 + shldq $0x01, %r8, %r9 + shlq $0x01, %r8 + andq %rax, %r11 + # Two out left, one in right + andq %rax, %r15 + # Multiply top bits by 19*19 + imulq $0x169, %rbp, %rcx + xorq %rax, %rax + # Multiply top half by 19 + movq $19, %rdx + adoxq %rcx, %r8 + mulxq %r12, %rbp, %r12 + adcxq %rbp, %r8 + adoxq %r12, %r9 + mulxq %r13, %rbp, %r13 + adcxq %rbp, %r9 + adoxq %r13, %r10 + mulxq %r14, %rbp, %r14 + adcxq %rbp, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rbp + andq %rax, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rbp + andq %rax, %r11 + addq %rbp, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 16(%rsp), %rdi + # Sub + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %r11 + subq (%rdi), %r8 + movq $0x00, %rdx + sbbq 8(%rdi), %r9 + movq $-19, %rcx + sbbq 16(%rdi), %r10 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r11 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r8 + adcq %rdx, %r9 + adcq %rdx, %r10 + adcq %rax, %r11 + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + addq $48, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + popq %rbp + repz retq +#ifndef __APPLE__ +.size fe_ge_dbl_avx2,.-fe_ge_dbl_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_madd_avx2 +.type fe_ge_madd_avx2,@function +.align 4 +fe_ge_madd_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_madd_avx2 +.p2align 2 +_fe_ge_madd_avx2: +#endif /* __APPLE__ */ + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $48, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq 8(%rsp), %rsi + movq 40(%rsp), %rbx + movq 32(%rsp), %rbp + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rbp), %r8 + movq %r9, %r13 + adcq 8(%rbp), %r9 + movq %r10, %r14 + adcq 16(%rbp), %r10 + movq %rdx, %r15 + adcq 24(%rbp), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbp), %r12 + movq $0x00, %rdx + sbbq 8(%rbp), %r13 + movq $-19, %rcx + sbbq 16(%rbp), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbp), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 16(%rsp), %rbx + movq 128(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rdi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rdi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rdi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rdi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rdi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rdi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rdi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rdi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rdi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rdi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rdi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rdi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 136(%rsp), %rdi + # Multiply + # A[0] * B[0] + movq (%rdi), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rdi), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rdi), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rdi), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rdi), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rdi), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rdi), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rdi), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rdi), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rdi), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rdi), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 24(%rsp), %rdi + movq 120(%rsp), %rsi + movq 112(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rdi + movq (%rsp), %rsi + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rdi), %r8 + movq %r9, %r13 + adcq 8(%rdi), %r9 + movq %r10, %r14 + adcq 16(%rdi), %r10 + movq %rdx, %r15 + adcq 24(%rdi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rdi), %r12 + movq $0x00, %rdx + sbbq 8(%rdi), %r13 + movq $-19, %rcx + sbbq 16(%rdi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 104(%rsp), %rdi + # Double + movq (%rdi), %r8 + movq 8(%rdi), %r9 + addq %r8, %r8 + movq 16(%rdi), %r10 + adcq %r9, %r9 + movq 24(%rdi), %rdx + adcq %r10, %r10 + movq $-19, %rcx + adcq %rdx, %rdx + movq $0x7fffffffffffffff, %rax + movq %rdx, %r11 + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 24(%rsp), %rdi + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rdi), %r8 + movq %r9, %r13 + adcq 8(%rdi), %r9 + movq %r10, %r14 + adcq 16(%rdi), %r10 + movq %rdx, %r15 + adcq 24(%rdi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rdi), %r12 + movq $0x00, %rdx + sbbq 8(%rdi), %r13 + movq $-19, %rcx + sbbq 16(%rdi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq %r12, (%rdi) + movq %r13, 8(%rdi) + movq %r14, 16(%rdi) + movq %r15, 24(%rdi) + addq $48, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + popq %rbp + repz retq +#ifndef __APPLE__ +.size fe_ge_madd_avx2,.-fe_ge_madd_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_msub_avx2 +.type fe_ge_msub_avx2,@function +.align 4 +fe_ge_msub_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_msub_avx2 +.p2align 2 +_fe_ge_msub_avx2: +#endif /* __APPLE__ */ + pushq %rbp + pushq %rbx + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $48, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq 8(%rsp), %rsi + movq 40(%rsp), %rbx + movq 32(%rsp), %rbp + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rbp), %r8 + movq %r9, %r13 + adcq 8(%rbp), %r9 + movq %r10, %r14 + adcq 16(%rbp), %r10 + movq %rdx, %r15 + adcq 24(%rbp), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbp), %r12 + movq $0x00, %rdx + sbbq 8(%rbp), %r13 + movq $-19, %rcx + sbbq 16(%rbp), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbp), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 16(%rsp), %rbx + movq 136(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rdi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rdi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rdi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rdi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rdi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rdi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rdi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rdi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rdi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rdi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rdi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rdi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 128(%rsp), %rdi + # Multiply + # A[0] * B[0] + movq (%rdi), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rdi), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rdi), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rdi), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rdi), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rdi), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rdi), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rdi), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rdi), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rdi), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rdi), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 24(%rsp), %rdi + movq 120(%rsp), %rsi + movq 112(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq 8(%rsp), %rsi + movq (%rsp), %rbp + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rsi), %r8 + movq %r9, %r13 + adcq 8(%rsi), %r9 + movq %r10, %r14 + adcq 16(%rsi), %r10 + movq %rdx, %r15 + adcq 24(%rsi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rsi), %r12 + movq $0x00, %rdx + sbbq 8(%rsi), %r13 + movq $-19, %rcx + sbbq 16(%rsi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rsi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq %r12, (%rbp) + movq %r13, 8(%rbp) + movq %r14, 16(%rbp) + movq %r15, 24(%rbp) + movq 104(%rsp), %rsi + # Double + movq (%rsi), %r8 + movq 8(%rsi), %r9 + addq %r8, %r8 + movq 16(%rsi), %r10 + adcq %r9, %r9 + movq 24(%rsi), %rdx + adcq %r10, %r10 + movq $-19, %rcx + adcq %rdx, %rdx + movq $0x7fffffffffffffff, %rax + movq %rdx, %r11 + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rdi), %r8 + movq %r9, %r13 + adcq 8(%rdi), %r9 + movq %r10, %r14 + adcq 16(%rdi), %r10 + movq %rdx, %r15 + adcq 24(%rdi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rdi), %r12 + movq $0x00, %rdx + sbbq 8(%rdi), %r13 + movq $-19, %rcx + sbbq 16(%rdi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rbx) + movq %r13, 8(%rbx) + movq %r14, 16(%rbx) + movq %r15, 24(%rbx) + addq $48, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbx + popq %rbp + repz retq +#ifndef __APPLE__ +.size fe_ge_msub_avx2,.-fe_ge_msub_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_add_avx2 +.type fe_ge_add_avx2,@function +.align 4 +fe_ge_add_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_add_avx2 +.p2align 2 +_fe_ge_add_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq 8(%rsp), %rsi + movq 40(%rsp), %rbx + movq 32(%rsp), %rbp + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rbp), %r8 + movq %r9, %r13 + adcq 8(%rbp), %r9 + movq %r10, %r14 + adcq 16(%rbp), %r10 + movq %rdx, %r15 + adcq 24(%rbp), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbp), %r12 + movq $0x00, %rdx + sbbq 8(%rbp), %r13 + movq $-19, %rcx + sbbq 16(%rbp), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbp), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 16(%rsp), %rbx + movq 168(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rdi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rdi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rdi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rdi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rdi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rdi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rdi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rdi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rdi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rdi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rdi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rdi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 176(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 24(%rsp), %rsi + movq 160(%rsp), %rbx + movq 144(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rbx), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rbx), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rbx), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rbx), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rbx), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rbx), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rbx), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rbx), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rbx), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rbx), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rbx), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rbx), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rbx), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rbx), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 136(%rsp), %rsi + movq 152(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rsi + # Double + movq (%rdi), %r8 + movq 8(%rdi), %r9 + addq %r8, %r8 + movq 16(%rdi), %r10 + adcq %r9, %r9 + movq 24(%rdi), %rdx + adcq %r10, %r10 + movq $-19, %rcx + adcq %rdx, %rdx + movq $0x7fffffffffffffff, %rax + movq %rdx, %r11 + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 8(%rsp), %rbx + movq 16(%rsp), %rbp + # Add + movq (%rbp), %r8 + movq 8(%rbp), %r9 + movq 16(%rbp), %r10 + movq 24(%rbp), %rdx + movq %r8, %r12 + addq (%rbx), %r8 + movq %r9, %r13 + adcq 8(%rbx), %r9 + movq %r10, %r14 + adcq 16(%rbx), %r10 + movq %rdx, %r15 + adcq 24(%rbx), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbx), %r12 + movq $0x00, %rdx + sbbq 8(%rbx), %r13 + movq $-19, %rcx + sbbq 16(%rbx), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbx), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq %r12, (%rdi) + movq %r13, 8(%rdi) + movq %r14, 16(%rdi) + movq %r15, 24(%rdi) + movq 24(%rsp), %rdi + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %rdx + movq %r8, %r12 + addq (%rdi), %r8 + movq %r9, %r13 + adcq 8(%rdi), %r9 + movq %r10, %r14 + adcq 16(%rdi), %r10 + movq %rdx, %r15 + adcq 24(%rdi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rdi), %r12 + movq $0x00, %rdx + sbbq 8(%rdi), %r13 + movq $-19, %rcx + sbbq 16(%rdi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rbp) + movq %r9, 8(%rbp) + movq %r10, 16(%rbp) + movq %r11, 24(%rbp) + movq %r12, (%rdi) + movq %r13, 8(%rdi) + movq %r14, 16(%rdi) + movq %r15, 24(%rdi) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_add_avx2,.-fe_ge_add_avx2 +#endif /* __APPLE__ */ +#ifndef __APPLE__ +.text +.globl fe_ge_sub_avx2 +.type fe_ge_sub_avx2,@function +.align 4 +fe_ge_sub_avx2: +#else +.section __TEXT,__text +.globl _fe_ge_sub_avx2 +.p2align 2 +_fe_ge_sub_avx2: +#endif /* __APPLE__ */ + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + subq $0x50, %rsp + movq %rdi, (%rsp) + movq %rsi, 8(%rsp) + movq %rdx, 16(%rsp) + movq %rcx, 24(%rsp) + movq %r8, 32(%rsp) + movq %r9, 40(%rsp) + movq 8(%rsp), %rsi + movq 40(%rsp), %rbx + movq 32(%rsp), %rbp + # Add + movq (%rbx), %r8 + movq 8(%rbx), %r9 + movq 16(%rbx), %r10 + movq 24(%rbx), %rdx + movq %r8, %r12 + addq (%rbp), %r8 + movq %r9, %r13 + adcq 8(%rbp), %r9 + movq %r10, %r14 + adcq 16(%rbp), %r10 + movq %rdx, %r15 + adcq 24(%rbp), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbp), %r12 + movq $0x00, %rdx + sbbq 8(%rbp), %r13 + movq $-19, %rcx + sbbq 16(%rbp), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbp), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rsi) + movq %r13, 8(%rsi) + movq %r14, 16(%rsi) + movq %r15, 24(%rsi) + movq 16(%rsp), %rbx + movq 176(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rdi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rdi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rdi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rdi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rdi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rdi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rdi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rdi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rdi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rdi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rdi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rdi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rdi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rdi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rdi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq 168(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 24(%rsp), %rsi + movq 160(%rsp), %rbx + movq 144(%rsp), %rbp + # Multiply + # A[0] * B[0] + movq (%rbp), %rdx + mulxq (%rbx), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rbx), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rbx), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbp), %rdx + mulxq 8(%rbx), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbp), %rdx + mulxq (%rbx), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rbx), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbp), %rdx + mulxq 8(%rbx), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rbx), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbp), %rdx + mulxq 8(%rbx), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbp), %rdx + adoxq %rcx, %r11 + mulxq 24(%rbx), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbp), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbp), %rdx + adoxq %rcx, %r13 + mulxq 24(%rbx), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rbx), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbp), %rdx + adcxq %rcx, %r12 + mulxq 24(%rbx), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbp), %rdx + mulxq 16(%rbx), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbp), %rdx + adcxq %rcx, %r14 + mulxq 24(%rbx), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 136(%rsp), %rsi + movq 152(%rsp), %rbx + # Multiply + # A[0] * B[0] + movq (%rbx), %rdx + mulxq (%rsi), %r8, %r9 + # A[2] * B[0] + mulxq 16(%rsi), %r10, %r11 + # A[1] * B[0] + mulxq 8(%rsi), %rcx, %rax + xorq %r15, %r15 + adcxq %rcx, %r9 + # A[1] * B[3] + movq 24(%rbx), %rdx + mulxq 8(%rsi), %r12, %r13 + adcxq %rax, %r10 + # A[0] * B[1] + movq 8(%rbx), %rdx + mulxq (%rsi), %rcx, %rax + adoxq %rcx, %r9 + # A[2] * B[1] + mulxq 16(%rsi), %rcx, %r14 + adoxq %rax, %r10 + adcxq %rcx, %r11 + # A[1] * B[2] + movq 16(%rbx), %rdx + mulxq 8(%rsi), %rcx, %rax + adcxq %r14, %r12 + adoxq %rcx, %r11 + adcxq %r15, %r13 + adoxq %rax, %r12 + # A[0] * B[2] + mulxq (%rsi), %rcx, %rax + adoxq %r15, %r13 + xorq %r14, %r14 + adcxq %rcx, %r10 + # A[1] * B[1] + movq 8(%rbx), %rdx + mulxq 8(%rsi), %rdx, %rcx + adcxq %rax, %r11 + adoxq %rdx, %r10 + # A[3] * B[1] + movq 8(%rbx), %rdx + adoxq %rcx, %r11 + mulxq 24(%rsi), %rcx, %rax + adcxq %rcx, %r12 + # A[2] * B[2] + movq 16(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rax, %r13 + adoxq %rdx, %r12 + # A[3] * B[3] + movq 24(%rbx), %rdx + adoxq %rcx, %r13 + mulxq 24(%rsi), %rcx, %rax + adoxq %r15, %r14 + adcxq %rcx, %r14 + # A[0] * B[3] + mulxq (%rsi), %rdx, %rcx + adcxq %rax, %r15 + xorq %rax, %rax + adcxq %rdx, %r11 + # A[3] * B[0] + movq (%rbx), %rdx + adcxq %rcx, %r12 + mulxq 24(%rsi), %rdx, %rcx + adoxq %rdx, %r11 + adoxq %rcx, %r12 + # A[2] * B[3] + movq 24(%rbx), %rdx + mulxq 16(%rsi), %rdx, %rcx + adcxq %rdx, %r13 + # A[3] * B[2] + movq 16(%rbx), %rdx + adcxq %rcx, %r14 + mulxq 24(%rsi), %rcx, %rdx + adcxq %rax, %r15 + adoxq %rcx, %r13 + adoxq %rdx, %r14 + adoxq %rax, %r15 + # Reduce + movq $0x7fffffffffffffff, %rax + # Move top half into t4-t7 and remove top bit from t3 + shldq $0x01, %r14, %r15 + shldq $0x01, %r13, %r14 + shldq $0x01, %r12, %r13 + shldq $0x01, %r11, %r12 + andq %rax, %r11 + # Multiply top half by 19 + movq $19, %rdx + xorq %rax, %rax + mulxq %r12, %rcx, %r12 + adcxq %rcx, %r8 + adoxq %r12, %r9 + mulxq %r13, %rcx, %r13 + adcxq %rcx, %r9 + adoxq %r13, %r10 + mulxq %r14, %rcx, %r14 + adcxq %rcx, %r10 + adoxq %r14, %r11 + mulxq %r15, %r15, %rdx + adcxq %r15, %r11 + adoxq %rax, %rdx + adcxq %rax, %rdx + # Overflow + shldq $0x01, %r11, %rdx + movq $0x7fffffffffffffff, %rax + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Reduce if top bit set + movq %r11, %rdx + shrq $63, %rdx + imulq $19, %rdx, %rcx + andq %rax, %r11 + addq %rcx, %r8 + adcq $0x00, %r9 + adcq $0x00, %r10 + adcq $0x00, %r11 + # Store + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + leaq 48(%rsp), %rsi + # Double + movq (%rdi), %r8 + movq 8(%rdi), %r9 + addq %r8, %r8 + movq 16(%rdi), %r10 + adcq %r9, %r9 + movq 24(%rdi), %rdx + adcq %r10, %r10 + movq $-19, %rcx + adcq %rdx, %rdx + movq $0x7fffffffffffffff, %rax + movq %rdx, %r11 + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + movq %r8, (%rsi) + movq %r9, 8(%rsi) + movq %r10, 16(%rsi) + movq %r11, 24(%rsi) + movq 8(%rsp), %rbx + movq 16(%rsp), %rbp + # Add + movq (%rbp), %r8 + movq 8(%rbp), %r9 + movq 16(%rbp), %r10 + movq 24(%rbp), %rdx + movq %r8, %r12 + addq (%rbx), %r8 + movq %r9, %r13 + adcq 8(%rbx), %r9 + movq %r10, %r14 + adcq 16(%rbx), %r10 + movq %rdx, %r15 + adcq 24(%rbx), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rbx), %r12 + movq $0x00, %rdx + sbbq 8(%rbx), %r13 + movq $-19, %rcx + sbbq 16(%rbx), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rbx), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rbx) + movq %r9, 8(%rbx) + movq %r10, 16(%rbx) + movq %r11, 24(%rbx) + movq %r12, (%rdi) + movq %r13, 8(%rdi) + movq %r14, 16(%rdi) + movq %r15, 24(%rdi) + movq 24(%rsp), %rdi + # Add + movq (%rsi), %r8 + movq 8(%rsi), %r9 + movq 16(%rsi), %r10 + movq 24(%rsi), %rdx + movq %r8, %r12 + addq (%rdi), %r8 + movq %r9, %r13 + adcq 8(%rdi), %r9 + movq %r10, %r14 + adcq 16(%rdi), %r10 + movq %rdx, %r15 + adcq 24(%rdi), %rdx + movq $-19, %rcx + movq %rdx, %r11 + movq $0x7fffffffffffffff, %rax + sarq $63, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Sub modulus (if overflow) + subq %rcx, %r8 + sbbq %rdx, %r9 + sbbq %rdx, %r10 + sbbq %rax, %r11 + # Sub + subq (%rdi), %r12 + movq $0x00, %rdx + sbbq 8(%rdi), %r13 + movq $-19, %rcx + sbbq 16(%rdi), %r14 + movq $0x7fffffffffffffff, %rax + sbbq 24(%rdi), %r15 + sbbq $0x00, %rdx + # Mask the modulus + andq %rdx, %rcx + andq %rdx, %rax + # Add modulus (if underflow) + addq %rcx, %r12 + adcq %rdx, %r13 + adcq %rdx, %r14 + adcq %rax, %r15 + movq %r8, (%rdi) + movq %r9, 8(%rdi) + movq %r10, 16(%rdi) + movq %r11, 24(%rdi) + movq %r12, (%rbp) + movq %r13, 8(%rbp) + movq %r14, 16(%rbp) + movq %r15, 24(%rbp) + addq $0x50, %rsp + popq %r15 + popq %r14 + popq %r13 + popq %r12 + popq %rbp + popq %rbx + repz retq +#ifndef __APPLE__ +.size fe_ge_sub_avx2,.-fe_ge_sub_avx2 +#endif /* __APPLE__ */ +#endif /* HAVE_INTEL_AVX2 */ diff --git a/wolfcrypt/src/fe_x25519_x64.i b/wolfcrypt/src/fe_x25519_x64.i deleted file mode 100644 index e22424c7d..000000000 --- a/wolfcrypt/src/fe_x25519_x64.i +++ /dev/null @@ -1,2089 +0,0 @@ -/* fe_x25519_x64.i - * - * Copyright (C) 2006-2019 wolfSSL Inc. - * - * This file is part of wolfSSL. - * - * wolfSSL is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * wolfSSL is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA - */ - -#if defined(USE_INTEL_SPEEDUP) - #if defined(__GNUC__) && ((__GNUC__ < 4) || \ - (__GNUC__ == 4 && __GNUC_MINOR__ <= 8)) - #undef NO_AVX2_SUPPORT - #define NO_AVX2_SUPPORT - #endif - #if defined(__clang__) && ((__clang_major__ < 3) || \ - (__clang_major__ == 3 && __clang_minor__ <= 5)) - #define NO_AVX2_SUPPORT - #elif defined(__clang__) && defined(NO_AVX2_SUPPORT) - #undef NO_AVX2_SUPPORT - #endif - - #define HAVE_INTEL_AVX1 - #ifndef NO_AVX2_SUPPORT - #define HAVE_INTEL_AVX2 - #endif - #include -#endif - -#ifdef HAVE_INTEL_AVX2 -static void fe_mul_avx2(fe r, const fe a, const fe b); -static void fe_sq_avx2(fe r, const fe a); -static void fe_sq2_avx2(fe r, const fe a); -#endif -static void fe_mul_x64(fe r, const fe a, const fe b); -static void fe_sq_x64(fe r, const fe a); -static void fe_sq2_x64(fe r, const fe a); - -static void (*fe_mul_p)(fe r, const fe a, const fe b) = fe_mul_x64; -static void (*fe_sq_p)(fe r, const fe a) = fe_sq_x64; -static void (*fe_sq2_p)(fe r, const fe a) = fe_sq2_x64; - -#if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2) - -static int cpuFlagsSet = 0; -static int intelFlags; - -#endif - -void fe_init(void) -{ -#if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2) - if (cpuFlagsSet) - return; - - intelFlags = cpuid_get_flags(); - cpuFlagsSet = 1; - - #ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_mul_p = fe_mul_avx2; - fe_sq_p = fe_sq_avx2; - fe_sq2_p = fe_sq2_avx2; - } - #endif -#endif -} - -/* Convert a number represented as an array of bytes to an array of words with - * 64-bits of data in each word. - * - * in An array of bytes. - * out An array of words. - */ -void fe_frombytes(fe out, const unsigned char *in) -{ -#ifdef LITTLE_ENDIAN_ORDER - XMEMCPY(out, in, 32); -#else - out[0] = (((int64_t)in[ 0]) << 0) - | (((int64_t)in[ 1]) << 8) - | (((int64_t)in[ 2]) << 16) - | (((int64_t)in[ 3]) << 24) - | (((int64_t)in[ 4]) << 32) - | (((int64_t)in[ 5]) << 40) - | (((int64_t)in[ 6]) << 48) - | (((int64_t)in[ 7]) << 56); - out[1] = (((int64_t)in[ 8]) << 0) - | (((int64_t)in[ 9]) << 8) - | (((int64_t)in[10]) << 16) - | (((int64_t)in[11]) << 24) - | (((int64_t)in[12]) << 32) - | (((int64_t)in[13]) << 40) - | (((int64_t)in[14]) << 48) - | (((int64_t)in[15]) << 56); - out[2] = (((int64_t)in[16]) << 0) - | (((int64_t)in[17]) << 8) - | (((int64_t)in[18]) << 16) - | (((int64_t)in[19]) << 24) - | (((int64_t)in[20]) << 32) - | (((int64_t)in[21]) << 40) - | (((int64_t)in[22]) << 48) - | (((int64_t)in[23]) << 56); - out[3] = (((int64_t)in[24]) << 0) - | (((int64_t)in[25]) << 8) - | (((int64_t)in[26]) << 16) - | (((int64_t)in[27]) << 24) - | (((int64_t)in[28]) << 32) - | (((int64_t)in[29]) << 40) - | (((int64_t)in[30]) << 48) - | (((int64_t)in[31]) << 56); -#endif - out[3] &= 0x7fffffffffffffff; -} - -/* Convert a number represented as an array of words to an array of bytes. - * If greater than the mod, modulo reduced by the prime 2^255 - 19. - * - * n An array of words. - * out An array of bytes. - */ -void fe_tobytes(unsigned char *out, const fe n) -{ - __asm__ __volatile__ ( - "movq $0x7fffffffffffffff, %%r10\n\t" - "movq 0(%[n]), %%rax\n\t" - "movq 8(%[n]), %%rcx\n\t" - "addq $19, %%rax\n\t" - "movq 16(%[n]), %%rdx\n\t" - "adcq $0, %%rcx\n\t" - "movq 24(%[n]), %%r8\n\t" - "adcq $0, %%rdx\n\t" - "adcq $0, %%r8\n\t" - "shrq $63, %%r8\n\t" - "movq 0(%[n]), %%rax\n\t" - "imulq $19, %%r8, %%r9\n\t" - "movq 8(%[n]), %%rcx\n\t" - "addq %%r9, %%rax\n\t" - "movq 16(%[n]), %%rdx\n\t" - "adcq $0, %%rcx\n\t" - "movq 24(%[n]), %%r8\n\t" - "adcq $0, %%rdx\n\t" - "movq %%rax, 0(%[n])\n\t" - "adcq $0, %%r8\n\t" - "movq %%rcx, 8(%[n])\n\t" - "andq %%r10, %%r8\n\t" - "movq %%rdx, 16(%[n])\n\t" - "movq %%r8, 24(%[n])\n\t" - : - : [n] "r" (n) - : "memory", "rax", "rcx", "rdx", "r8", "r9", "r10" - ); -#ifdef LITTLE_ENDIAN_ORDER - XMEMCPY(out, n, 32); -#else - out[0] = n[0] >> 0; - out[1] = n[0] >> 8; - out[2] = n[0] >> 16; - out[3] = n[0] >> 24; - out[4] = n[0] >> 32; - out[5] = n[0] >> 40; - out[6] = n[0] >> 48; - out[7] = n[0] >> 56; - out[8] = n[1] >> 0; - out[9] = n[1] >> 8; - out[10] = n[1] >> 16; - out[11] = n[1] >> 24; - out[12] = n[1] >> 32; - out[13] = n[1] >> 40; - out[14] = n[1] >> 48; - out[15] = n[1] >> 56; - out[16] = n[2] >> 0; - out[17] = n[2] >> 8; - out[18] = n[2] >> 16; - out[19] = n[2] >> 24; - out[20] = n[2] >> 32; - out[21] = n[2] >> 40; - out[22] = n[2] >> 48; - out[23] = n[2] >> 56; - out[24] = n[3] >> 0; - out[25] = n[3] >> 8; - out[26] = n[3] >> 16; - out[27] = n[3] >> 24; - out[28] = n[3] >> 32; - out[29] = n[3] >> 40; - out[30] = n[3] >> 48; - out[31] = n[3] >> 56; -#endif -} - -/* Set the field element to 1. - * - * n The field element number. - */ -void fe_1(fe n) -{ - n[0] = 0x0000000000000001; - n[1] = 0x0000000000000000; - n[2] = 0x0000000000000000; - n[3] = 0x0000000000000000; -} - -/* Set the field element to 0. - * - * n The field element number. - */ -void fe_0(fe n) -{ - n[0] = 0x0000000000000000; - n[1] = 0x0000000000000000; - n[2] = 0x0000000000000000; - n[3] = 0x0000000000000000; -} - -/* Copy field element a into field element r. - * - * r Field element to copy into. - * a Field element to copy. - */ -void fe_copy(fe r, const fe a) -{ - r[0] = a[0]; - r[1] = a[1]; - r[2] = a[2]; - r[3] = a[3]; -} - -/* Constant time, conditional swap of field elements a and b. - * - * a A field element. - * b A field element. - * c If 1 then swap and if 0 then don't swap. - */ -static WC_INLINE void fe_cswap_int(fe a, fe b, int c) -{ - __asm__ __volatile__ ( - "movslq %[c], %%rax\n\t" - "movq 0(%[a]), %%rcx\n\t" - "movq 8(%[a]), %%rdx\n\t" - "movq 16(%[a]), %%r8\n\t" - "movq 24(%[a]), %%r9\n\t" - "negq %%rax\n\t" - "xorq 0(%[b]), %%rcx\n\t" - "xorq 8(%[b]), %%rdx\n\t" - "xorq 16(%[b]), %%r8\n\t" - "xorq 24(%[b]), %%r9\n\t" - "andq %%rax, %%rcx\n\t" - "andq %%rax, %%rdx\n\t" - "andq %%rax, %%r8\n\t" - "andq %%rax, %%r9\n\t" - "xorq %%rcx, 0(%[a])\n\t" - "xorq %%rdx, 8(%[a])\n\t" - "xorq %%r8, 16(%[a])\n\t" - "xorq %%r9, 24(%[a])\n\t" - "xorq %%rcx, 0(%[b])\n\t" - "xorq %%rdx, 8(%[b])\n\t" - "xorq %%r8, 16(%[b])\n\t" - "xorq %%r9, 24(%[b])\n\t" - : - : [a] "r" (a), [b] "r" (b), [c] "r" (c) - : "memory", "rax", "rcx", "rdx", "r8", "r9" - ); -} - -void fe_cswap(fe a, fe b, int c) -{ - fe_cswap_int(a, b, c); -} - -/* Subtract b from a into r. (r = a - b) - * - * r A field element. - * a A field element. - * b A field element. - */ -static WC_INLINE void fe_sub_int(fe r, const fe a, const fe b) -{ - __asm__ __volatile__ ( - "movq $0x7fffffffffffffff, %%rcx\n\t" - "movq $-19, %%r11\n\t" - "movq 0(%[a]), %%rax\n\t" - "movq 8(%[a]), %%rdx\n\t" - "subq 0(%[b]), %%rax\n\t" - "movq 16(%[a]), %%r8\n\t" - "sbbq 8(%[b]), %%rdx\n\t" - "movq 24(%[a]), %%r9\n\t" - "sbbq 16(%[b]), %%r8\n\t" - "movq $0, %%r10\n\t" - "sbbq 24(%[b]), %%r9\n\t" - "sbbq $0, %%r10\n\t" - "# Mask the modulus\n\t" - "andq %%r10, %%r11\n\t" - "andq %%r10, %%rcx\n\t" - "# Add modulus (if underflow)\n\t" - "addq %%r11, %%rax\n\t" - "adcq %%r10, %%rdx\n\t" - "movq %%rax, 0(%[r])\n\t" - "adcq %%r10, %%r8\n\t" - "movq %%rdx, 8(%[r])\n\t" - "adcq %%rcx, %%r9\n\t" - "movq %%r8, 16(%[r])\n\t" - "movq %%r9, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a), [b] "r" (b) - : "rax", "rcx", "rdx", "r8", "r9", "r10", "r11", "memory" - ); -} - -void fe_sub(fe r, const fe a, const fe b) -{ - fe_sub_int(r, a, b); -} - -/* Add b to a into r. (r = a + b) - * - * r A field element. - * a A field element. - * b A field element. - */ -static WC_INLINE void fe_add_int(fe r, const fe a, const fe b) -{ - __asm__ __volatile__ ( - "movq 0(%[a]), %%rax\n\t" - "movq 8(%[a]), %%rdx\n\t" - "addq 0(%[b]), %%rax\n\t" - "movq 16(%[a]), %%r8\n\t" - "adcq 8(%[b]), %%rdx\n\t" - "movq 24(%[a]), %%r10\n\t" - "adcq 16(%[b]), %%r8\n\t" - "movq $0x7fffffffffffffff, %%rcx\n\t" - "adcq 24(%[b]), %%r10\n\t" - "movq $-19, %%r11\n\t" - "movq %%r10, %%r9\n\t" - "sarq $63, %%r10\n\t" - "# Mask the modulus\n\t" - "andq %%r10, %%r11\n\t" - "andq %%r10, %%rcx\n\t" - "# Sub modulus (if overflow)\n\t" - "subq %%r11, %%rax\n\t" - "sbbq %%r10, %%rdx\n\t" - "movq %%rax, 0(%[r])\n\t" - "sbbq %%r10, %%r8\n\t" - "movq %%rdx, 8(%[r])\n\t" - "sbbq %%rcx, %%r9\n\t" - "movq %%r8, 16(%[r])\n\t" - "movq %%r9, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a), [b] "r" (b) - : "rax", "rcx", "rdx", "r8", "r9", "r10", "r11", "memory" - ); -} - -void fe_add(fe r, const fe a, const fe b) -{ - fe_add_int(r, a, b); -} - -/* Multiply a and b into r. (r = a * b) - * - * r A field element. - * a A field element. - * b A field element. - */ -void fe_mul(fe r, const fe a, const fe b) -{ - (*fe_mul_p)(r, a, b); -} - -#ifdef HAVE_INTEL_AVX2 -static WC_INLINE void fe_mul_avx2(fe r, const fe a, const fe b) -{ - __asm__ __volatile__ ( - "# A[0] * B[0]\n\t" - "movq 0(%[b]), %%rdx\n\t" - "mulxq 0(%[a]), %%r8, %%r9\n\t" - "# A[2] * B[0]\n\t" - "mulxq 16(%[a]), %%r10, %%r11\n\t" - "# A[1] * B[0]\n\t" - "mulxq 8(%[a]), %%rax, %%rcx\n\t" - "xorq %%r15, %%r15\n\t" - "adcxq %%rax, %%r9\n\t" - "# A[1] * B[3]\n\t" - "movq 24(%[b]), %%rdx\n\t" - "mulxq 8(%[a]), %%r12, %%r13\n\t" - "adcxq %%rcx, %%r10\n\t" - "# A[0] * B[1]\n\t" - "movq 8(%[b]), %%rdx\n\t" - "mulxq 0(%[a]), %%rax, %%rcx\n\t" - "adoxq %%rax, %%r9\n\t" - "# A[2] * B[1]\n\t" - "mulxq 16(%[a]), %%rax, %%r14\n\t" - "adoxq %%rcx, %%r10\n\t" - "adcxq %%rax, %%r11\n\t" - "# A[1] * B[2]\n\t" - "movq 16(%[b]), %%rdx\n\t" - "mulxq 8(%[a]), %%rax, %%rcx\n\t" - "adcxq %%r14, %%r12\n\t" - "adoxq %%rax, %%r11\n\t" - "adcxq %%r15, %%r13\n\t" - "adoxq %%rcx, %%r12\n\t" - "# A[0] * B[2]\n\t" - "mulxq 0(%[a]), %%rax, %%rcx\n\t" - "adoxq %%r15, %%r13\n\t" - "xorq %%r14, %%r14\n\t" - "adcxq %%rax, %%r10\n\t" - "# A[1] * B[1]\n\t" - "movq 8(%[b]), %%rdx\n\t" - "mulxq 8(%[a]), %%rdx, %%rax\n\t" - "adcxq %%rcx, %%r11\n\t" - "adoxq %%rdx, %%r10\n\t" - "# A[3] * B[1]\n\t" - "movq 8(%[b]), %%rdx\n\t" - "adoxq %%rax, %%r11\n\t" - "mulxq 24(%[a]), %%rax, %%rcx\n\t" - "adcxq %%rax, %%r12\n\t" - "# A[2] * B[2]\n\t" - "movq 16(%[b]), %%rdx\n\t" - "mulxq 16(%[a]), %%rdx, %%rax\n\t" - "adcxq %%rcx, %%r13\n\t" - "adoxq %%rdx, %%r12\n\t" - "# A[3] * B[3]\n\t" - "movq 24(%[b]), %%rdx\n\t" - "adoxq %%rax, %%r13\n\t" - "mulxq 24(%[a]), %%rax, %%rcx\n\t" - "adoxq %%r15, %%r14\n\t" - "adcxq %%rax, %%r14\n\t" - "# A[0] * B[3]\n\t" - "mulxq 0(%[a]), %%rdx, %%rax\n\t" - "adcxq %%rcx, %%r15\n\t" - "xorq %%rcx, %%rcx\n\t" - "adcxq %%rdx, %%r11\n\t" - "# A[3] * B[0]\n\t" - "movq 0(%[b]), %%rdx\n\t" - "adcxq %%rax, %%r12\n\t" - "mulxq 24(%[a]), %%rdx, %%rax\n\t" - "adoxq %%rdx, %%r11\n\t" - "adoxq %%rax, %%r12\n\t" - "# A[2] * B[3]\n\t" - "movq 24(%[b]), %%rdx\n\t" - "mulxq 16(%[a]), %%rdx, %%rax\n\t" - "adcxq %%rdx, %%r13\n\t" - "# A[3] * B[2]\n\t" - "movq 16(%[b]), %%rdx\n\t" - "adcxq %%rax, %%r14\n\t" - "mulxq 24(%[a]), %%rax, %%rdx\n\t" - "adcxq %%rcx, %%r15\n\t" - "adoxq %%rax, %%r13\n\t" - "adoxq %%rdx, %%r14\n\t" - "adoxq %%rcx, %%r15\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rcx\n\t" - "# Move top half into t4-t7 and remove top bit from t3\n\t" - "shldq $1, %%r14, %%r15\n\t" - "shldq $1, %%r13, %%r14\n\t" - "shldq $1, %%r12, %%r13\n\t" - "shldq $1, %%r11, %%r12\n\t" - "andq %%rcx, %%r11\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rdx\n\t" - "xorq %%rcx, %%rcx\n\t" - "mulxq %%r12, %%rax, %%r12\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r8\n\t" - "adoxq %%r12, %%r9\n\t" - "mulxq %%r13, %%rax, %%r13\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r9\n\t" - "adoxq %%r13, %%r10\n\t" - "mulxq %%r14, %%rax, %%r14\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r10\n\t" - "adoxq %%r14, %%r11\n\t" - "mulxq %%r15, %%r15, %%rdx\n\t" - "adcxq %%r15, %%r11\n\t" - "adoxq %%rcx, %%rdx\n\t" - "adcxq %%rcx, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r11, %%rdx\n\t" - "movq $0x7fffffffffffffff, %%rcx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rcx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Reduce if top bit set\n\t" - "movq %%r11, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rcx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Store\n\t" - "movq %%r8, 0(%[r])\n\t" - "movq %%r9, 8(%[r])\n\t" - "movq %%r10, 16(%[r])\n\t" - "movq %%r11, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a), [b] "r" (b) - : "memory", "rax", "rdx", "rcx", - "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15" - ); -} -#endif /* HAVE_INTEL_AVX2 */ - -static WC_INLINE void fe_mul_x64(fe r, const fe a, const fe b) -{ - __asm__ __volatile__ ( - "# A[0] * B[0]\n\t" - "movq 0(%[b]), %%rax\n\t" - "mulq 0(%[a])\n\t" - "movq %%rax, %%rcx\n\t" - "movq %%rdx, %%r8\n\t" - "# A[0] * B[1]\n\t" - "movq 8(%[b]), %%rax\n\t" - "mulq 0(%[a])\n\t" - "xorq %%r9, %%r9\n\t" - "addq %%rax, %%r8\n\t" - "adcq %%rdx, %%r9\n\t" - "# A[1] * B[0]\n\t" - "movq 0(%[b]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "xorq %%r10, %%r10\n\t" - "addq %%rax, %%r8\n\t" - "adcq %%rdx, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# A[0] * B[2]\n\t" - "movq 16(%[b]), %%rax\n\t" - "mulq 0(%[a])\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "# A[1] * B[1]\n\t" - "movq 8(%[b]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# A[2] * B[0]\n\t" - "movq 0(%[b]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# A[0] * B[3]\n\t" - "movq 24(%[b]), %%rax\n\t" - "mulq 0(%[a])\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[1] * B[2]\n\t" - "movq 16(%[b]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[2] * B[1]\n\t" - "movq 8(%[b]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[3] * B[0]\n\t" - "movq 0(%[b]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[1] * B[3]\n\t" - "movq 24(%[b]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "adcq $0, %%r13\n\t" - "# A[2] * B[2]\n\t" - "movq 16(%[b]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "adcq $0, %%r13\n\t" - "# A[3] * B[1]\n\t" - "movq 8(%[b]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "adcq $0, %%r13\n\t" - "# A[2] * B[3]\n\t" - "movq 24(%[b]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r14, %%r14\n\t" - "addq %%rax, %%r12\n\t" - "adcq %%rdx, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# A[3] * B[2]\n\t" - "movq 16(%[b]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r12\n\t" - "adcq %%rdx, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# A[3] * B[3]\n\t" - "movq 24(%[b]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r13\n\t" - "adcq %%rdx, %%r14\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rbx\n\t" - "# Move top half into t4-t7 and remove top bit from t3\n\t" - "shldq $1, %%r13, %%r14\n\t" - "shldq $1, %%r12, %%r13\n\t" - "shldq $1, %%r11, %%r12\n\t" - "shldq $1, %%r10, %%r11\n\t" - "andq %%rbx, %%r10\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rax\n\t" - "mulq %%r11\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%rcx\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r11\n\t" - "mulq %%r12\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r8\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r12\n\t" - "mulq %%r13\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r9\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r13\n\t" - "mulq %%r14\n\t" - "# Add remaining product results in\n\t" - "addq %%r11, %%r8\n\t" - "adcq %%r12, %%r9\n\t" - "adcq %%r13, %%r10\n\t" - "addq %%rax, %%r10\n\t" - "adcq $0, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r10, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Reduce if top bit set\n\t" - "movq %%r10, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Store\n\t" - "movq %%rcx, 0(%[r])\n\t" - "movq %%r8, 8(%[r])\n\t" - "movq %%r9, 16(%[r])\n\t" - "movq %%r10, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a), [b] "r" (b) - : "memory", "rax", "rbx", "rdx", "rcx", "r8", "r9", - "r10", "r11", "r12", "r13", "r14" - ); -} - -/* Square a and put result in r. (r = a * a) - * - * r A field element. - * a A field element. - * b A field element. - */ -void fe_sq(fe r, const fe a) -{ - (*fe_sq_p)(r, a); -} - -#ifdef HAVE_INTEL_AVX2 -static WC_INLINE void fe_sq_avx2(fe r, const fe a) -{ - __asm__ __volatile__ ( - "# A[0] * A[1]\n\t" - "movq 0(%[a]), %%rdx\n\t" - "mulxq 8(%[a]), %%r9, %%r10\n\t" - "# A[0] * A[3]\n\t" - "mulxq 24(%[a]), %%r11, %%r12\n\t" - "# A[2] * A[1]\n\t" - "movq 16(%[a]), %%rdx\n\t" - "mulxq 8(%[a]), %%rcx, %%rbx\n\t" - "xorq %%r15, %%r15\n\t" - "adoxq %%rcx, %%r11\n\t" - "# A[2] * A[3]\n\t" - "mulxq 24(%[a]), %%r13, %%r14\n\t" - "adoxq %%rbx, %%r12\n\t" - "# A[2] * A[0]\n\t" - "mulxq 0(%[a]), %%rcx, %%rbx\n\t" - "adoxq %%r15, %%r13\n\t" - "adcxq %%rcx, %%r10\n\t" - "adoxq %%r15, %%r14\n\t" - "# A[1] * A[3]\n\t" - "movq 8(%[a]), %%rdx\n\t" - "mulxq 24(%[a]), %%rax, %%r8\n\t" - "adcxq %%rbx, %%r11\n\t" - "adcxq %%rax, %%r12\n\t" - "adcxq %%r8, %%r13\n\t" - "adcxq %%r15, %%r14\n\t" - "# Double with Carry Flag\n\t" - "xorq %%r15, %%r15\n\t" - "# A[0] * A[0]\n\t" - "movq 0(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%r8, %%rax\n\t" - "adcxq %%r9, %%r9\n\t" - "# A[1] * A[1]\n\t" - "movq 8(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rcx, %%rbx\n\t" - "adcxq %%r10, %%r10\n\t" - "adoxq %%rax, %%r9\n\t" - "adcxq %%r11, %%r11\n\t" - "adoxq %%rcx, %%r10\n\t" - "# A[2] * A[2]\n\t" - "movq 16(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rax, %%rcx\n\t" - "adcxq %%r12, %%r12\n\t" - "adoxq %%rbx, %%r11\n\t" - "adcxq %%r13, %%r13\n\t" - "adoxq %%rax, %%r12\n\t" - "# A[3] * A[3]\n\t" - "movq 24(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rax, %%rbx\n\t" - "adcxq %%r14, %%r14\n\t" - "adoxq %%rcx, %%r13\n\t" - "adcxq %%r15, %%r15\n\t" - "adoxq %%rax, %%r14\n\t" - "adoxq %%rbx, %%r15\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rcx\n\t" - "# Move top half into t4-t7 and remove top bit from t3\n\t" - "shldq $1, %%r14, %%r15\n\t" - "shldq $1, %%r13, %%r14\n\t" - "shldq $1, %%r12, %%r13\n\t" - "shldq $1, %%r11, %%r12\n\t" - "andq %%rcx, %%r11\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rdx\n\t" - "xorq %%rcx, %%rcx\n\t" - "mulxq %%r12, %%rax, %%r12\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r8\n\t" - "adoxq %%r12, %%r9\n\t" - "mulxq %%r13, %%rax, %%r13\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r9\n\t" - "adoxq %%r13, %%r10\n\t" - "mulxq %%r14, %%rax, %%r14\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r10\n\t" - "adoxq %%r14, %%r11\n\t" - "mulxq %%r15, %%r15, %%rdx\n\t" - "adcxq %%r15, %%r11\n\t" - "adoxq %%rcx, %%rdx\n\t" - "adcxq %%rcx, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r11, %%rdx\n\t" - "movq $0x7fffffffffffffff, %%rcx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rcx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Reduce if top bit set\n\t" - "movq %%r11, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rcx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Store\n\t" - "movq %%r8, 0(%[r])\n\t" - "movq %%r9, 8(%[r])\n\t" - "movq %%r10, 16(%[r])\n\t" - "movq %%r11, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rdx", "rcx", "rbx", "r8", "r9", "r10", "r11", - "r12", "r13", "r14", "r15" - ); -} -#endif /* HAVE_INTEL_AVX2 */ - -static WC_INLINE void fe_sq_x64(fe r, const fe a) -{ - __asm__ __volatile__ ( - "# A[0] * A[1]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "movq %%rax, %%r8\n\t" - "movq %%rdx, %%r9\n\t" - "# A[0] * A[2]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r10, %%r10\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "# A[0] * A[3]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "# A[1] * A[2]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[1] * A[3]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "# A[2] * A[3]\n\t" - "movq 16(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r12\n\t" - "adcq %%rdx, %%r13\n\t" - "# Double\n\t" - "xorq %%r14, %%r14\n\t" - "addq %%r8, %%r8\n\t" - "adcq %%r9, %%r9\n\t" - "adcq %%r10, %%r10\n\t" - "adcq %%r11, %%r11\n\t" - "adcq %%r12, %%r12\n\t" - "adcq %%r13, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# A[0] * A[0]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "movq %%rax, %%rcx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[1] * A[1]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%r15, %%r8\n\t" - "adcq %%rax, %%r9\n\t" - "adcq $0, %%rdx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[2] * A[2]\n\t" - "movq 16(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%r15, %%r10\n\t" - "adcq %%rax, %%r11\n\t" - "adcq $0, %%rdx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[3] * A[3]\n\t" - "movq 24(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%rax, %%r13\n\t" - "adcq %%rdx, %%r14\n\t" - "addq %%r15, %%r12\n\t" - "adcq $0, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rbx\n\t" - "# Move top half into t4-t7 and remove top bit from t3\n\t" - "shldq $1, %%r13, %%r14\n\t" - "shldq $1, %%r12, %%r13\n\t" - "shldq $1, %%r11, %%r12\n\t" - "shldq $1, %%r10, %%r11\n\t" - "andq %%rbx, %%r10\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rax\n\t" - "mulq %%r11\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%rcx\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r11\n\t" - "mulq %%r12\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r8\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r12\n\t" - "mulq %%r13\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r9\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r13\n\t" - "mulq %%r14\n\t" - "# Add remaining product results in\n\t" - "addq %%r11, %%r8\n\t" - "adcq %%r12, %%r9\n\t" - "adcq %%r13, %%r10\n\t" - "addq %%rax, %%r10\n\t" - "adcq $0, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r10, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Reduce if top bit set\n\t" - "movq %%r10, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Store\n\t" - "movq %%rcx, 0(%[r])\n\t" - "movq %%r8, 8(%[r])\n\t" - "movq %%r9, 16(%[r])\n\t" - "movq %%r10, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rbx", "rdx", "rcx", "r8", "r9", "r10", "r11", "r12", - "r13", "r14", "r15" - ); -} - -/* Multiply a by 121666 and put result in r. (r = 121666 * a) - * - * r A field element. - * a A field element. - * b A field element. - */ -static WC_INLINE void fe_mul121666_int(fe r, fe a) -{ - __asm__ __volatile__ ( - "movq $0x7fffffffffffffff, %%rcx\n\t" - "movq $121666, %%rax\n\t" - "mulq 0(%[a])\n\t" - "xorq %%r10, %%r10\n\t" - "movq %%rax, %%r8\n\t" - "movq %%rdx, %%r9\n\t" - "movq $121666, %%rax\n\t" - "mulq 8(%[a])\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "movq $121666, %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "movq $121666, %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "shldq $1, %%r11, %%r12\n\t" - "andq %%rcx, %%r11\n\t" - "movq $19, %%rax\n\t" - "mulq %%r12\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "movq %%r8, 0(%[r])\n\t" - "movq %%r9, 8(%[r])\n\t" - "movq %%r10, 16(%[r])\n\t" - "movq %%r11, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rcx", "rdx", "r8", "r9", "r10", "r11", "r12" - ); -} - -void fe_mul121666(fe r, fe a) -{ - fe_mul121666_int(r, a); -} - -/* Find the inverse of a modulo 2^255 - 1 and put result in r. - * (r * a) mod (2^255 - 1) = 1 - * Implementation is constant time. - * - * r A field element. - * a A field element. - */ -void fe_invert(fe r, const fe a) -{ - fe t0, t1, t2, t3; - int i; - - /* a ^ (2^255 - 21) */ - fe_sq(t0, a); for (i = 1; i < 1; ++i) fe_sq(t0, t0); - fe_sq(t1, t0); for (i = 1; i < 2; ++i) fe_sq(t1, t1); fe_mul(t1, a, t1); - fe_mul(t0, t0, t1); - fe_sq(t2, t0); for (i = 1; i < 1; ++i) fe_sq(t2, t2); fe_mul(t1, t1, t2); - fe_sq(t2, t1); for (i = 1; i < 5; ++i) fe_sq(t2, t2); fe_mul(t1, t2, t1); - fe_sq(t2, t1); for (i = 1; i < 10; ++i) fe_sq(t2, t2); fe_mul(t2, t2, t1); - fe_sq(t3, t2); for (i = 1; i < 20; ++i) fe_sq(t3, t3); fe_mul(t2, t3, t2); - fe_sq(t2, t2); for (i = 1; i < 10; ++i) fe_sq(t2, t2); fe_mul(t1, t2, t1); - fe_sq(t2, t1); for (i = 1; i < 50; ++i) fe_sq(t2, t2); fe_mul(t2, t2, t1); - fe_sq(t3, t2); for (i = 1; i < 100; ++i) fe_sq(t3, t3); fe_mul(t2, t3, t2); - fe_sq(t2, t2); for (i = 1; i < 50; ++i) fe_sq(t2, t2); fe_mul(t1, t2, t1); - fe_sq(t1, t1); for (i = 1; i < 5; ++i) fe_sq(t1, t1); fe_mul( r, t1, t0); -} - -#ifdef HAVE_INTEL_AVX2 -/* Find the inverse of a modulo 2^255 - 1 and put result in r. - * (r * a) mod (2^255 - 1) = 1 - * Implementation is constant time. - * - * r A field element. - * a A field element. - */ -static void fe_invert_avx2(fe r, const fe a) -{ - fe t0, t1, t2, t3; - int i; - - /* a ^ (2^255 - 21) */ - fe_sq_avx2(t0, a); for (i = 1; i < 1; ++i) fe_sq_avx2(t0, t0); - fe_sq_avx2(t1, t0); for (i = 1; i < 2; ++i) fe_sq_avx2(t1, t1); fe_mul_avx2(t1, a, t1); - fe_mul_avx2(t0, t0, t1); - fe_sq_avx2(t2, t0); for (i = 1; i < 1; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t1, t1, t2); - fe_sq_avx2(t2, t1); for (i = 1; i < 5; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t1, t2, t1); - fe_sq_avx2(t2, t1); for (i = 1; i < 10; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t2, t2, t1); - fe_sq_avx2(t3, t2); for (i = 1; i < 20; ++i) fe_sq_avx2(t3, t3); fe_mul_avx2(t2, t3, t2); - fe_sq_avx2(t2, t2); for (i = 1; i < 10; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t1, t2, t1); - fe_sq_avx2(t2, t1); for (i = 1; i < 50; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t2, t2, t1); - fe_sq_avx2(t3, t2); for (i = 1; i < 100; ++i) fe_sq_avx2(t3, t3); fe_mul_avx2(t2, t3, t2); - fe_sq_avx2(t2, t2); for (i = 1; i < 50; ++i) fe_sq_avx2(t2, t2); fe_mul_avx2(t1, t2, t1); - fe_sq_avx2(t1, t1); for (i = 1; i < 5; ++i) fe_sq_avx2(t1, t1); fe_mul_avx2( r, t1, t0); -} -#endif - -/* Find the inverse of a modulo 2^255 - 1 and put result in r. - * (r * a) mod (2^255 - 1) = 1 - * Implementation is constant time. - * - * r A field element. - * a A field element. - */ -static void fe_invert_x64(fe r, const fe a) -{ - fe t0, t1, t2, t3; - int i; - - /* a ^ (2^255 - 21) */ - fe_sq_x64(t0, a); for (i = 1; i < 1; ++i) fe_sq_x64(t0, t0); - fe_sq_x64(t1, t0); for (i = 1; i < 2; ++i) fe_sq_x64(t1, t1); fe_mul_x64(t1, a, t1); - fe_mul_x64(t0, t0, t1); - fe_sq_x64(t2, t0); for (i = 1; i < 1; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t1, t1, t2); - fe_sq_x64(t2, t1); for (i = 1; i < 5; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t1, t2, t1); - fe_sq_x64(t2, t1); for (i = 1; i < 10; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t2, t2, t1); - fe_sq_x64(t3, t2); for (i = 1; i < 20; ++i) fe_sq_x64(t3, t3); fe_mul_x64(t2, t3, t2); - fe_sq_x64(t2, t2); for (i = 1; i < 10; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t1, t2, t1); - fe_sq_x64(t2, t1); for (i = 1; i < 50; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t2, t2, t1); - fe_sq_x64(t3, t2); for (i = 1; i < 100; ++i) fe_sq_x64(t3, t3); fe_mul_x64(t2, t3, t2); - fe_sq_x64(t2, t2); for (i = 1; i < 50; ++i) fe_sq_x64(t2, t2); fe_mul_x64(t1, t2, t1); - fe_sq_x64(t1, t1); for (i = 1; i < 5; ++i) fe_sq_x64(t1, t1); fe_mul_x64( r, t1, t0); -} - -/* Scalar multiply the field element a by n using Montgomery Ladder and places - * result in r. - * - * r A field element as an array of bytes. - * n The scalar as an array of bytes. - * a A field element as an array of bytes. - */ -int curve25519(byte* r, byte* n, byte* a) -{ - fe x1, x2, z2, x3, z3; - fe t0, t1; - int i, j; - unsigned int swap = 0; - unsigned int b, prev_b = 0; - - fe_frombytes(x1, a); - fe_1(x2); - fe_0(z2); - fe_copy(x3, x1); - fe_1(z3); - -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - j = 6; - for (i = 31; i >= 0; i--) { - while (j >= 0) { - b = n[i] >> j; - b &= 1; - swap = b ^ prev_b; - prev_b = b; - fe_cswap_int(x2, x3, swap); - fe_cswap_int(z2, z3, swap); - - fe_sub_int(t0, x3, z3); - fe_sub_int(t1, x2, z2); - fe_add_int(x2, x2, z2); - fe_add_int(z2, x3, z3); - fe_mul_avx2(z3, t0, x2); - fe_mul_avx2(z2, z2, t1); - fe_sq_avx2(t0, t1); - fe_sq_avx2(t1, x2); - fe_add_int(x3, z3, z2); - fe_sub_int(z2, z3, z2); - fe_mul_avx2(x2, t1, t0); - fe_sub_int(t1, t1, t0); - fe_sq_avx2(z2, z2); - fe_mul121666_int(z3, t1); - fe_sq_avx2(x3, x3); - fe_add_int(t0, t0, z3); - fe_mul_avx2(z3, x1, z2); - fe_mul_avx2(z2, t1, t0); - j--; - } - j = 7; - } - - fe_invert_avx2(z2, z2); - fe_mul_avx2(x2, x2, z2); - fe_tobytes(r, x2); - } - else -#endif - { - j = 6; - for (i = 31; i >= 0; i--) { - while (j >= 0) { - b = n[i] >> j; - b &= 1; - swap = b ^ prev_b; - prev_b = b; - fe_cswap_int(x2, x3, swap); - fe_cswap_int(z2, z3, swap); - - fe_sub_int(t0, x3, z3); - fe_sub_int(t1, x2, z2); - fe_add_int(x2, x2, z2); - fe_add_int(z2, x3, z3); - fe_mul_x64(z3, t0, x2); - fe_mul_x64(z2, z2, t1); - fe_sq_x64(t0, t1); - fe_sq_x64(t1, x2); - fe_add_int(x3, z3, z2); - fe_sub_int(z2, z3, z2); - fe_mul_x64(x2, t1, t0); - fe_sub_int(t1, t1, t0); - fe_sq_x64(z2, z2); - fe_mul121666_int(z3, t1); - fe_sq_x64(x3, x3); - fe_add_int(t0, t0, z3); - fe_mul_x64(z3, x1, z2); - fe_mul_x64(z2, t1, t0); - j--; - } - j = 7; - } - - fe_invert_x64(z2, z2); - fe_mul_x64(x2, x2, z2); - fe_tobytes(r, x2); - } - - return 0; -} - -/* The field element value 0 as an array of bytes. */ -static const unsigned char zero[32] = {0}; - -/* Constant time check as to whether a is not 0. - * - * a A field element. - */ -int fe_isnonzero(const fe a) -{ - unsigned char s[32]; - fe_tobytes(s, a); - return ConstantCompare(s, zero, 32); -} - -/* Checks whether a is negative. - * - * a A field element. - */ -int fe_isnegative(const fe a) -{ - unsigned char s[32]; - fe_tobytes(s, a); - return s[0] & 1; -} - -/* Negates field element a and stores the result in r. - * - * r A field element. - * a A field element. - */ -void fe_neg(fe r, const fe a) -{ - __asm__ __volatile__ ( - "movq $-19, %%rax\n\t" - "movq $-1, %%rdx\n\t" - "movq $-1, %%r8\n\t" - "movq $0x7fffffffffffffff, %%r9\n\t" - "subq 0(%[a]), %%rax\n\t" - "sbbq 8(%[a]), %%rdx\n\t" - "sbbq 16(%[a]), %%r8\n\t" - "sbbq 24(%[a]), %%r9\n\t" - "movq %%rax, 0(%[r])\n\t" - "movq %%rdx, 8(%[r])\n\t" - "movq %%r8, 16(%[r])\n\t" - "movq %%r9, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rdx", "r8", "r9" - ); -} - -/* Constant time, conditional move of b into a. - * a is not changed if the condition is 0. - * - * a A field element. - * b A field element. - * c If 1 then copy and if 0 then don't copy. - */ -void fe_cmov(fe a, const fe b, int c) -{ - __asm__ __volatile__ ( - "cmpl $1, %[c]\n\t" - "movq 0(%[a]), %%rcx\n\t" - "movq 8(%[a]), %%rdx\n\t" - "movq 16(%[a]), %%r8\n\t" - "movq 24(%[a]), %%r9\n\t" - "cmoveq 0(%[b]), %%rcx\n\t" - "cmoveq 8(%[b]), %%rdx\n\t" - "cmoveq 16(%[b]), %%r8\n\t" - "cmoveq 24(%[b]), %%r9\n\t" - "movq %%rcx, 0(%[a])\n\t" - "movq %%rdx, 8(%[a])\n\t" - "movq %%r8, 16(%[a])\n\t" - "movq %%r9, 24(%[a])\n\t" - : - : [a] "r" (a), [b] "r" (b), [c] "r" (c) - : "memory", "rax", "rcx", "rdx", "r8", "r9" - ); -} - -void fe_pow22523(fe r, const fe a) -{ - fe t0, t1, t2; - int i; - - /* a ^ (2^255 - 23) */ - fe_sq(t0, a); for (i = 1; i < 1; ++i) fe_sq(t0, t0); - fe_sq(t1, t0); for (i = 1; i < 2; ++i) fe_sq(t1, t1); fe_mul(t1, a, t1); - fe_mul(t0, t0, t1); - fe_sq(t0, t0); for (i = 1; i < 1; ++i) fe_sq(t0, t0); fe_mul(t0, t1, t0); - fe_sq(t1, t0); for (i = 1; i < 5; ++i) fe_sq(t1, t1); fe_mul(t0, t1, t0); - fe_sq(t1, t0); for (i = 1; i < 10; ++i) fe_sq(t1, t1); fe_mul(t1, t1, t0); - fe_sq(t2, t1); for (i = 1; i < 20; ++i) fe_sq(t2, t2); fe_mul(t1, t2, t1); - fe_sq(t1, t1); for (i = 1; i < 10; ++i) fe_sq(t1, t1); fe_mul(t0, t1, t0); - fe_sq(t1, t0); for (i = 1; i < 50; ++i) fe_sq(t1, t1); fe_mul(t1, t1, t0); - fe_sq(t2, t1); for (i = 1; i < 100; ++i) fe_sq(t2, t2); fe_mul(t1, t2, t1); - fe_sq(t1, t1); for (i = 1; i < 50; ++i) fe_sq(t1, t1); fe_mul(t0, t1, t0); - fe_sq(t0, t0); for (i = 1; i < 2; ++i) fe_sq(t0, t0); fe_mul( r, t0, a); - - return; -} - -/* Double the square of a and put result in r. (r = 2 * a * a) - * - * r A field element. - * a A field element. - * b A field element. - */ -void fe_sq2(fe r, const fe a) -{ - (*fe_sq2_p)(r, a); -} - -#ifdef HAVE_INTEL_AVX2 -static WC_INLINE void fe_sq2_avx2(fe r, const fe a) -{ - __asm__ __volatile__ ( - "# A[0] * A[1]\n\t" - "movq 0(%[a]), %%rdx\n\t" - "mulxq 8(%[a]), %%r9, %%r10\n\t" - "# A[0] * A[3]\n\t" - "mulxq 24(%[a]), %%r11, %%r12\n\t" - "# A[2] * A[1]\n\t" - "movq 16(%[a]), %%rdx\n\t" - "mulxq 8(%[a]), %%rcx, %%rbx\n\t" - "xorq %%r15, %%r15\n\t" - "adoxq %%rcx, %%r11\n\t" - "# A[2] * A[3]\n\t" - "mulxq 24(%[a]), %%r13, %%r14\n\t" - "adoxq %%rbx, %%r12\n\t" - "# A[2] * A[0]\n\t" - "mulxq 0(%[a]), %%rcx, %%rbx\n\t" - "adoxq %%r15, %%r13\n\t" - "adcxq %%rcx, %%r10\n\t" - "adoxq %%r15, %%r14\n\t" - "# A[1] * A[3]\n\t" - "movq 8(%[a]), %%rdx\n\t" - "mulxq 24(%[a]), %%rax, %%r8\n\t" - "adcxq %%rbx, %%r11\n\t" - "adcxq %%rax, %%r12\n\t" - "adcxq %%r8, %%r13\n\t" - "adcxq %%r15, %%r14\n\t" - "# Double with Carry Flag\n\t" - "xorq %%r15, %%r15\n\t" - "# A[0] * A[0]\n\t" - "movq 0(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%r8, %%rax\n\t" - "adcxq %%r9, %%r9\n\t" - "# A[1] * A[1]\n\t" - "movq 8(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rcx, %%rbx\n\t" - "adcxq %%r10, %%r10\n\t" - "adoxq %%rax, %%r9\n\t" - "adcxq %%r11, %%r11\n\t" - "adoxq %%rcx, %%r10\n\t" - "# A[2] * A[2]\n\t" - "movq 16(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rax, %%rcx\n\t" - "adcxq %%r12, %%r12\n\t" - "adoxq %%rbx, %%r11\n\t" - "adcxq %%r13, %%r13\n\t" - "adoxq %%rax, %%r12\n\t" - "# A[3] * A[3]\n\t" - "movq 24(%[a]), %%rdx\n\t" - "mulxq %%rdx, %%rax, %%rbx\n\t" - "adcxq %%r14, %%r14\n\t" - "adoxq %%rcx, %%r13\n\t" - "adcxq %%r15, %%r15\n\t" - "adoxq %%rax, %%r14\n\t" - "adoxq %%rbx, %%r15\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rbx\n\t" - "xorq %%rax, %%rax\n\t" - "# Move top half into t4-t7, remove top bit from t3 and double\n\t" - "shldq $3, %%r15, %%rax\n\t" - "shldq $2, %%r14, %%r15\n\t" - "shldq $2, %%r13, %%r14\n\t" - "shldq $2, %%r12, %%r13\n\t" - "shldq $2, %%r11, %%r12\n\t" - "shldq $1, %%r10, %%r11\n\t" - "shldq $1, %%r9, %%r10\n\t" - "shldq $1, %%r8, %%r9\n\t" - "shlq $1, %%r8\n\t" - "andq %%rbx, %%r11\n\t" - "# Two out left, one in right\n\t" - "andq %%rbx, %%r15\n\t" - "# Multiply top bits by 19*19\n\t" - "imulq $361, %%rax, %%rcx\n\t" - "xorq %%rbx, %%rbx\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rdx\n\t" - "adoxq %%rcx, %%r8\n\t" - "mulxq %%r12, %%rax, %%r12\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r8\n\t" - "adoxq %%r12, %%r9\n\t" - "mulxq %%r13, %%rax, %%r13\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r9\n\t" - "adoxq %%r13, %%r10\n\t" - "mulxq %%r14, %%rax, %%r14\n\t" - "movq $19, %%rdx\n\t" - "adcxq %%rax, %%r10\n\t" - "adoxq %%r14, %%r11\n\t" - "mulxq %%r15, %%r15, %%rdx\n\t" - "adcxq %%r15, %%r11\n\t" - "adoxq %%rbx, %%rdx\n\t" - "adcxq %%rbx, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r11, %%rdx\n\t" - "movq $0x7fffffffffffffff, %%rbx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Reduce if top bit set\n\t" - "movq %%r11, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r11\n\t" - "addq %%rax, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "adcq $0, %%r11\n\t" - "# Store\n\t" - "movq %%r8, 0(%[r])\n\t" - "movq %%r9, 8(%[r])\n\t" - "movq %%r10, 16(%[r])\n\t" - "movq %%r11, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rdx", "rcx", "rbx", "r8", "r9", "r10", "r11", - "r12", "r13", "r14", "r15" - ); -} -#endif /* HAVE_INTEL_AVX2 */ - -static WC_INLINE void fe_sq2_x64(fe r, const fe a) -{ - __asm__ __volatile__ ( - "# A[0] * A[1]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 8(%[a])\n\t" - "movq %%rax, %%r8\n\t" - "movq %%rdx, %%r9\n\t" - "# A[0] * A[2]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r10, %%r10\n\t" - "addq %%rax, %%r9\n\t" - "adcq %%rdx, %%r10\n\t" - "# A[0] * A[3]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "# A[1] * A[2]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq 16(%[a])\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r10\n\t" - "adcq %%rdx, %%r11\n\t" - "adcq $0, %%r12\n\t" - "# A[1] * A[3]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "addq %%rax, %%r11\n\t" - "adcq %%rdx, %%r12\n\t" - "# A[2] * A[3]\n\t" - "movq 16(%[a]), %%rax\n\t" - "mulq 24(%[a])\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r12\n\t" - "adcq %%rdx, %%r13\n\t" - "# Double\n\t" - "xorq %%r14, %%r14\n\t" - "addq %%r8, %%r8\n\t" - "adcq %%r9, %%r9\n\t" - "adcq %%r10, %%r10\n\t" - "adcq %%r11, %%r11\n\t" - "adcq %%r12, %%r12\n\t" - "adcq %%r13, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# A[0] * A[0]\n\t" - "movq 0(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "movq %%rax, %%rcx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[1] * A[1]\n\t" - "movq 8(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%r15, %%r8\n\t" - "adcq %%rax, %%r9\n\t" - "adcq $0, %%rdx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[2] * A[2]\n\t" - "movq 16(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%r15, %%r10\n\t" - "adcq %%rax, %%r11\n\t" - "adcq $0, %%rdx\n\t" - "movq %%rdx, %%r15\n\t" - "# A[3] * A[3]\n\t" - "movq 24(%[a]), %%rax\n\t" - "mulq %%rax\n\t" - "addq %%rax, %%r13\n\t" - "adcq %%rdx, %%r14\n\t" - "addq %%r15, %%r12\n\t" - "adcq $0, %%r13\n\t" - "adcq $0, %%r14\n\t" - "# Reduce\n\t" - "movq $0x7fffffffffffffff, %%rbx\n\t" - "xorq %%rax, %%rax\n\t" - "# Move top half into t4-t7 and remove top bit from t3\n\t" - "shldq $3, %%r14, %%rax\n\t" - "shldq $2, %%r13, %%r14\n\t" - "shldq $2, %%r12, %%r13\n\t" - "shldq $2, %%r11, %%r12\n\t" - "shldq $2, %%r10, %%r11\n\t" - "shldq $1, %%r9, %%r10\n\t" - "shldq $1, %%r8, %%r9\n\t" - "shldq $1, %%rcx, %%r8\n\t" - "shlq $1, %%rcx\n\t" - "andq %%rbx, %%r10\n\t" - "# Two out left, one in right\n\t" - "andq %%rbx, %%r14\n\t" - "# Multiply top bits by 19*19\n\t" - "imulq $361, %%rax, %%r15\n\t" - "# Multiply top half by 19\n\t" - "movq $19, %%rax\n\t" - "mulq %%r11\n\t" - "xorq %%r11, %%r11\n\t" - "addq %%rax, %%rcx\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r11\n\t" - "mulq %%r12\n\t" - "xorq %%r12, %%r12\n\t" - "addq %%rax, %%r8\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r12\n\t" - "mulq %%r13\n\t" - "xorq %%r13, %%r13\n\t" - "addq %%rax, %%r9\n\t" - "movq $19, %%rax\n\t" - "adcq %%rdx, %%r13\n\t" - "mulq %%r14\n\t" - "# Add remaining products back in\n\t" - "addq %%r15, %%rcx\n\t" - "adcq %%r11, %%r8\n\t" - "adcq %%r12, %%r9\n\t" - "adcq %%r13, %%r10\n\t" - "addq %%rax, %%r10\n\t" - "adcq $0, %%rdx\n\t" - "# Overflow\n\t" - "shldq $1, %%r10, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Reduce if top bit set\n\t" - "movq %%r10, %%rdx\n\t" - "shrq $63, %%rdx\n\t" - "imulq $19, %%rdx, %%rax\n\t" - "andq %%rbx, %%r10\n\t" - "addq %%rax, %%rcx\n\t" - "adcq $0, %%r8\n\t" - "adcq $0, %%r9\n\t" - "adcq $0, %%r10\n\t" - "# Store\n\t" - "movq %%rcx, 0(%[r])\n\t" - "movq %%r8, 8(%[r])\n\t" - "movq %%r9, 16(%[r])\n\t" - "movq %%r10, 24(%[r])\n\t" - : - : [r] "r" (r), [a] "r" (a) - : "memory", "rax", "rbx", "rdx", "rcx", "r8", "r9", "r10", "r11", "r12", - "r13", "r14", "r15" - ); -} - -/* Load 3 little endian bytes into a 64-bit word. - * - * in An array of bytes. - * returns a 64-bit word. - */ -uint64_t load_3(const unsigned char *in) -{ - uint64_t result; - - result = ((((uint64_t)in[0]) ) | - (((uint64_t)in[1]) << 8) | - (((uint64_t)in[2]) << 16)); - - return result; -} - -/* Load 4 little endian bytes into a 64-bit word. - * - * in An array of bytes. - * returns a 64-bit word. - */ -uint64_t load_4(const unsigned char *in) -{ - uint64_t result; - - result = ((((uint64_t)in[0]) ) | - (((uint64_t)in[1]) << 8) | - (((uint64_t)in[2]) << 16) | - (((uint64_t)in[3]) << 24)); - - return result; -} - -void fe_ge_to_p2(fe rx, fe ry, fe rz, const fe px, const fe py, const fe pz, - const fe pt) -{ -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_mul_avx2(rx, px, pt); - fe_mul_avx2(ry, py, pz); - fe_mul_avx2(rz, pz, pt); - } - else -#endif - { - fe_mul_x64(rx, px, pt); - fe_mul_x64(ry, py, pz); - fe_mul_x64(rz, pz, pt); - } -} - -void fe_ge_to_p3(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz, const fe pt) -{ -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_mul_avx2(rx, px, pt); - fe_mul_avx2(ry, py, pz); - fe_mul_avx2(rz, pz, pt); - fe_mul_avx2(rt, px, py); - } - else -#endif - { - fe_mul_x64(rx, px, pt); - fe_mul_x64(ry, py, pz); - fe_mul_x64(rz, pz, pt); - fe_mul_x64(rt, px, py); - } -} - -void fe_ge_dbl(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz) -{ - fe t0; -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_sq_avx2(rx,px); - fe_sq_avx2(rz,py); - fe_sq2_avx2(rt,pz); - fe_add_int(ry,px,py); - fe_sq_avx2(t0,ry); - fe_add_int(ry,rz,rx); - fe_sub_int(rz,rz,rx); - fe_sub_int(rx,t0,ry); - fe_sub_int(rt,rt,rz); - } - else -#endif - { - fe_sq_x64(rx,px); - fe_sq_x64(rz,py); - fe_sq2_x64(rt,pz); - fe_add_int(ry,px,py); - fe_sq_x64(t0,ry); - fe_add_int(ry,rz,rx); - fe_sub_int(rz,rz,rx); - fe_sub_int(rx,t0,ry); - fe_sub_int(rt,rt,rz); - } -} - -void fe_ge_madd(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz, const fe pt, const fe qxy2d, const fe qyplusx, - const fe qyminusx) -{ - fe t0; -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_avx2(rz,rx,qyplusx); - fe_mul_avx2(ry,ry,qyminusx); - fe_mul_avx2(rt,qxy2d,pt); - fe_add_int(t0,pz,pz); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_add_int(rz,t0,rt); - fe_sub_int(rt,t0,rt); - } - else -#endif - { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_x64(rz,rx,qyplusx); - fe_mul_x64(ry,ry,qyminusx); - fe_mul_x64(rt,qxy2d,pt); - fe_add_int(t0,pz,pz); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_add_int(rz,t0,rt); - fe_sub_int(rt,t0,rt); - } -} - -void fe_ge_msub(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz, const fe pt, const fe qxy2d, const fe qyplusx, - const fe qyminusx) -{ - fe t0; -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_avx2(rz,rx,qyminusx); - fe_mul_avx2(ry,ry,qyplusx); - fe_mul_avx2(rt,qxy2d,pt); - fe_add_int(t0,pz,pz); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_sub_int(rz,t0,rt); - fe_add_int(rt,t0,rt); - } - else -#endif - { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_x64(rz,rx,qyminusx); - fe_mul_x64(ry,ry,qyplusx); - fe_mul_x64(rt,qxy2d,pt); - fe_add_int(t0,pz,pz); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_sub_int(rz,t0,rt); - fe_add_int(rt,t0,rt); - } -} - -void fe_ge_add(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz, const fe pt, const fe qz, const fe qt2d, - const fe qyplusx, const fe qyminusx) -{ - fe t0; -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_avx2(rz,rx,qyplusx); - fe_mul_avx2(ry,ry,qyminusx); - fe_mul_avx2(rt,qt2d,pt); - fe_mul_avx2(rx,pz,qz); - fe_add_int(t0,rx,rx); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_add_int(rz,t0,rt); - fe_sub_int(rt,t0,rt); - } - else -#endif - { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_x64(rz,rx,qyplusx); - fe_mul_x64(ry,ry,qyminusx); - fe_mul_x64(rt,qt2d,pt); - fe_mul_x64(rx,pz,qz); - fe_add_int(t0,rx,rx); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_add_int(rz,t0,rt); - fe_sub_int(rt,t0,rt); - } -} - -void fe_ge_sub(fe rx, fe ry, fe rz, fe rt, const fe px, const fe py, - const fe pz, const fe pt, const fe qz, const fe qt2d, - const fe qyplusx, const fe qyminusx) -{ - fe t0; -#ifdef HAVE_INTEL_AVX2 - if (IS_INTEL_BMI2(intelFlags) && IS_INTEL_ADX(intelFlags)) { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_avx2(rz,rx,qyminusx); - fe_mul_avx2(ry,ry,qyplusx); - fe_mul_avx2(rt,qt2d,pt); - fe_mul_avx2(rx,pz,qz); - fe_add_int(t0,rx,rx); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_sub_int(rz,t0,rt); - fe_add_int(rt,t0,rt); - } - else -#endif - { - fe_add_int(rx,py,px); - fe_sub_int(ry,py,px); - fe_mul_x64(rz,rx,qyminusx); - fe_mul_x64(ry,ry,qyplusx); - fe_mul_x64(rt,qt2d,pt); - fe_mul_x64(rx,pz,qz); - fe_add_int(t0,rx,rx); - fe_sub_int(rx,rz,ry); - fe_add_int(ry,rz,ry); - fe_sub_int(rz,t0,rt); - fe_add_int(rt,t0,rt); - } -} - -void fe_cmov_table(fe* r, fe* base, signed char b) -{ - __asm__ __volatile__ ( - "movsbq %[b], %%rax\n\t" - "cdq\n\t" - "xorb %%dl, %%al\n\t" - "subb %%dl, %%al\n\t" - "movb %%al, %%r13b\n\t" - - "movq $1, %%rax\n\t" - "xorq %%rbx, %%rbx\n\t" - "xorq %%rcx, %%rcx\n\t" - "xorq %%rdx, %%rdx\n\t" - "movq $1, %%r8\n\t" - "xorq %%r9 , %%r9\n\t" - "xorq %%r10, %%r10\n\t" - "xorq %%r11, %%r11\n\t" - - "cmpb $1, %%r13b\n\t" - "movq (0*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (0*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (0*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (0*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (0*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (0*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (0*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (0*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $2, %%r13b\n\t" - "movq (1*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (1*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (1*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (1*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (1*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (1*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (1*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (1*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $3, %%r13b\n\t" - "movq (2*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (2*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (2*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (2*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (2*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (2*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (2*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (2*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $4, %%r13b\n\t" - "movq (3*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (3*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (3*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (3*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (3*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (3*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (3*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (3*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $5, %%r13b\n\t" - "movq (4*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (4*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (4*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (4*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (4*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (4*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (4*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (4*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $6, %%r13b\n\t" - "movq (5*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (5*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (5*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (5*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (5*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (5*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (5*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (5*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $7, %%r13b\n\t" - "movq (6*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (6*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (6*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (6*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (6*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (6*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (6*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (6*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - "cmpb $8, %%r13b\n\t" - "movq (7*96)+0(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (7*96)+8(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (7*96)+16(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (7*96)+24(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "movq (7*96)+32(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r8\n\t" - "movq (7*96)+40(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r9\n\t" - "movq (7*96)+48(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r10\n\t" - "movq (7*96)+56(%[base]), %%r12\n\t" - "cmoveq %%r12, %%r11\n\t" - - "cmpb $0, %[b]\n\t" - "movq %%rax, %%r12\n\t" - "cmovlq %%r8, %%rax\n\t" - "cmovlq %%r12, %%r8\n\t" - "movq %%rbx, %%r12\n\t" - "cmovlq %%r9, %%rbx\n\t" - "cmovlq %%r12, %%r9\n\t" - "movq %%rcx, %%r12\n\t" - "cmovlq %%r10, %%rcx\n\t" - "cmovlq %%r12, %%r10\n\t" - "movq %%rdx, %%r12\n\t" - "cmovlq %%r11, %%rdx\n\t" - "cmovlq %%r12, %%r11\n\t" - - "movq %%rax, 0(%[r])\n\t" - "movq %%rbx, 8(%[r])\n\t" - "movq %%rcx, 16(%[r])\n\t" - "movq %%rdx, 24(%[r])\n\t" - "movq %%r8 , 32(%[r])\n\t" - "movq %%r9 , 40(%[r])\n\t" - "movq %%r10, 48(%[r])\n\t" - "movq %%r11, 56(%[r])\n\t" - - "xorq %%rax, %%rax\n\t" - "xorq %%rbx, %%rbx\n\t" - "xorq %%rcx, %%rcx\n\t" - "xorq %%rdx, %%rdx\n\t" - - "cmpb $1, %%r13b\n\t" - "movq (0*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (0*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (0*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (0*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $2, %%r13b\n\t" - "movq (1*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (1*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (1*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (1*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $3, %%r13b\n\t" - "movq (2*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (2*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (2*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (2*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $4, %%r13b\n\t" - "movq (3*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (3*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (3*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (3*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $5, %%r13b\n\t" - "movq (4*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (4*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (4*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (4*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $6, %%r13b\n\t" - "movq (5*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (5*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (5*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (5*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $7, %%r13b\n\t" - "movq (6*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (6*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (6*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (6*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - "cmpb $8, %%r13b\n\t" - "movq (7*96)+64(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rax\n\t" - "movq (7*96)+72(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rbx\n\t" - "movq (7*96)+80(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rcx\n\t" - "movq (7*96)+88(%[base]), %%r12\n\t" - "cmoveq %%r12, %%rdx\n\t" - - "movq $-19, %%r8\n\t" - "movq $-1, %%r9\n\t" - "movq $-1, %%r10\n\t" - "movq $0x7fffffffffffffff, %%r11\n\t" - "subq %%rax, %%r8\n\t" - "sbbq %%rbx, %%r9\n\t" - "sbbq %%rcx, %%r10\n\t" - "sbbq %%rdx, %%r11\n\t" - "cmpb $0, %[b]\n\t" - "cmovlq %%r8, %%rax\n\t" - "cmovlq %%r9, %%rbx\n\t" - "cmovlq %%r10, %%rcx\n\t" - "cmovlq %%r11, %%rdx\n\t" - - "movq %%rax, 64(%[r])\n\t" - "movq %%rbx, 72(%[r])\n\t" - "movq %%rcx, 80(%[r])\n\t" - "movq %%rdx, 88(%[r])\n\t" - : - : [r] "r" (r), [base] "r" (base), [b] "r" (b) - : "rax", "rbx", "rcx", "rdx", "r8", "r9", "r10", "r11", "r12", "memory", - "r13" - ); -} - diff --git a/wolfcrypt/src/ge_operations.c b/wolfcrypt/src/ge_operations.c index 134bafdd8..f181d43b8 100644 --- a/wolfcrypt/src/ge_operations.c +++ b/wolfcrypt/src/ge_operations.c @@ -981,1047 +981,1047 @@ static WC_INLINE void cmov(ge_precomp *t,const ge_precomp *u,unsigned char b, static const ge_precomp base[64][8] = { { { - { 0x2fbc93c6f58c3b85, 0xcf932dc6fb8c0e19, 0x270b4898643d42c2, 0x07cf9d3a33d4ba65 }, - { 0x9d103905d740913e, 0xfd399f05d140beb3, 0xa5c18434688f8a09, 0x44fd2f9298f81267 }, - { 0xdbbd15674b6fbb59, 0x41e13f00eea2a5ea, 0xcdd49d1cc957c6fa, 0x4f0ebe1faf16ecca } + { 0x2fbc93c6f58c3b85, -0x306cd2390473f1e7, 0x270b4898643d42c2, 0x07cf9d3a33d4ba65 }, + { -0x62efc6fa28bf6ec2, -0x02c660fa2ebf414d, -0x5a3e7bcb977075f7, 0x44fd2f9298f81267 }, + { -0x2442ea98b49044a7, 0x41e13f00eea2a5ea, -0x322b62e336a83906, 0x4f0ebe1faf16ecca } }, { - { 0x9224e7fc933c71d7, 0x9f469d967a0ff5b5, 0x5aa69a65e1d60702, 0x590c063fa87d2e2e }, - { 0x8a99a56042b4d5a8, 0x8f2b810c4e60acf6, 0xe09e236bb16e37aa, 0x6bb595a669c92555 }, - { 0x6e347eaadad36802, 0xbaf3599383ee4805, 0x3bcabe10e6076826, 0x49314f0a165ed1b8 } + { -0x6ddb18036cc38e29, -0x60b9626985f00a4b, 0x5aa69a65e1d60702, 0x590c063fa87d2e2e }, + { -0x75665a9fbd4b2a58, -0x70d47ef3b19f530a, -0x1f61dc944e91c856, 0x6bb595a669c92555 }, + { 0x6e347eaadad36802, -0x450ca66c7c11b7fb, 0x3bcabe10e6076826, 0x49314f0a165ed1b8 } }, { - { 0xaf25b0a84cee9730, 0x025a8430e8864b8a, 0xc11b50029f016732, 0x7a164e1b9a80f8f4 }, - { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, 0x8131f31a214bd6bd, 0x2ab91587555bda62 }, - { 0x9bf211f4f1674834, 0xb84e6b17f62df895, 0xd7de6f075b722a4e, 0x549a04b963bb2a21 } + { -0x50da4f57b31168d0, 0x025a8430e8864b8a, -0x3ee4affd60fe98ce, 0x7a164e1b9a80f8f4 }, + { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, -0x7ece0ce5deb42943, 0x2ab91587555bda62 }, + { -0x640dee0b0e98b7cc, -0x47b194e809d2076b, -0x282190f8a48dd5b2, 0x549a04b963bb2a21 } }, { - { 0x287351b98efc099f, 0x6765c6f47dfd2538, 0xca348d3dfb0a9265, 0x680e910321e58727 }, - { 0x95fe050a056818bf, 0x327e89715660faa9, 0xc3e8e3cd06a05073, 0x27933f4c7445a49a }, - { 0xbf1e45ece51426b0, 0xe32bc63d6dba0f94, 0xe42974d58cf852c0, 0x44f079b1b0e64c18 } + { 0x287351b98efc099f, 0x6765c6f47dfd2538, -0x35cb72c204f56d9b, 0x680e910321e58727 }, + { -0x6a01faf5fa97e741, 0x327e89715660faa9, -0x3c171c32f95faf8d, 0x27933f4c7445a49a }, + { -0x40e1ba131aebd950, -0x1cd439c29245f06c, -0x1bd68b2a7307ad40, 0x44f079b1b0e64c18 } }, { - { 0xa212bc4408a5bb33, 0x8d5048c3c75eed02, 0xdd1beb0c5abfec44, 0x2945ccf146e206eb }, - { 0x7f9182c3a447d6ba, 0xd50014d14b2729b7, 0xe33cf11cb864a087, 0x154a7e73eb1b55f3 }, - { 0xc832a179e7d003b3, 0x5f729d0a00124d7e, 0x62c1d4a10e6d8ff3, 0x68b8ac5938b27a98 } + { -0x5ded43bbf75a44cd, -0x72afb73c38a112fe, -0x22e414f3a54013bc, 0x2945ccf146e206eb }, + { 0x7f9182c3a447d6ba, -0x2affeb2eb4d8d649, -0x1cc30ee3479b5f79, 0x154a7e73eb1b55f3 }, + { -0x37cd5e86182ffc4d, 0x5f729d0a00124d7e, 0x62c1d4a10e6d8ff3, 0x68b8ac5938b27a98 } }, { - { 0x3a0ceeeb77157131, 0x9b27158900c8af88, 0x8065b668da59a736, 0x51e57bb6a2cc38bd }, - { 0x499806b67b7d8ca4, 0x575be28427d22739, 0xbb085ce7204553b9, 0x38b64c41ae417884 }, - { 0x8f9dad91689de3a4, 0x175f2428f8fb9137, 0x050ab5329fcfb988, 0x7865dfa21354c09f } + { 0x3a0ceeeb77157131, -0x64d8ea76ff375078, -0x7f9a499725a658ca, 0x51e57bb6a2cc38bd }, + { 0x499806b67b7d8ca4, 0x575be28427d22739, -0x44f7a318dfbaac47, 0x38b64c41ae417884 }, + { -0x7062526e97621c5c, 0x175f2428f8fb9137, 0x050ab5329fcfb988, 0x7865dfa21354c09f } }, { { 0x6b1a5cd0944ea3bf, 0x7470353ab39dc0d2, 0x71b2528228542e49, 0x461bea69283c927e }, - { 0xba6f2c9aaa3221b1, 0x6ca021533bba23a7, 0x9dea764f92192c3a, 0x1d6edd5d2e5317e0 }, - { 0x217a8aacab0fda36, 0xa528c6543d3549c8, 0x37d05b8b13ab7568, 0x233cef623a2cbc37 } + { -0x4590d36555cdde4f, 0x6ca021533bba23a7, -0x621589b06de6d3c6, 0x1d6edd5d2e5317e0 }, + { 0x217a8aacab0fda36, -0x5ad739abc2cab638, 0x37d05b8b13ab7568, 0x233cef623a2cbc37 } }, { - { 0x59b7596604dd3e8f, 0x6cb30377e288702c, 0xb1339c665ed9c323, 0x0915e76061bce52f }, - { 0xe2a75dedf39234d9, 0x963d7680e1b558f9, 0x2c2741ac6e3c23fb, 0x3a9024a1320e01c3 }, - { 0xdf7de835a834a37e, 0x8be19cda689857ea, 0x2c1185367167b326, 0x589eb3d9dbefd5c2 } + { 0x59b7596604dd3e8f, 0x6cb30377e288702c, -0x4ecc6399a1263cdd, 0x0915e76061bce52f }, + { -0x1d58a2120c6dcb27, -0x69c2897f1e4aa707, 0x2c2741ac6e3c23fb, 0x3a9024a1320e01c3 }, + { -0x208217ca57cb5c82, -0x741e63259767a816, 0x2c1185367167b326, 0x589eb3d9dbefd5c2 } }, }, { { - { 0x322d04a52d9021f6, 0xb9c19f3375c6bf9c, 0x587a3a4342d20b09, 0x143b1cf8aa64fe61 }, - { 0x7ec851ca553e2df3, 0xa71284cba64878b3, 0xe6b5e4193288d1e7, 0x4cf210ec5a9a8883 }, - { 0x9f867c7d968acaab, 0x5f54258e27092729, 0xd0a7d34bea180975, 0x21b546a3374126e1 } + { 0x322d04a52d9021f6, -0x463e60cc8a394064, 0x587a3a4342d20b09, 0x143b1cf8aa64fe61 }, + { 0x7ec851ca553e2df3, -0x58ed7b3459b7874d, -0x194a1be6cd772e19, 0x4cf210ec5a9a8883 }, + { -0x6079838269753555, 0x5f54258e27092729, -0x2f582cb415e7f68b, 0x21b546a3374126e1 } }, { - { 0x490a7a45d185218f, 0x9a15377846049335, 0x0060ea09cc31e1f6, 0x7e041577f86ee965 }, - { 0xa94ff858a2888343, 0xce0ed4565313ed3c, 0xf55c3dcfb5bf34fa, 0x0a653ca5c9eab371 }, - { 0x66b2a496ce5b67f3, 0xff5492d8bd569796, 0x503cec294a592cd0, 0x566943650813acb2 } + { 0x490a7a45d185218f, -0x65eac887b9fb6ccb, 0x0060ea09cc31e1f6, 0x7e041577f86ee965 }, + { -0x56b007a75d777cbd, -0x31f12ba9acec12c4, -0x0aa3c2304a40cb06, 0x0a653ca5c9eab371 }, + { 0x66b2a496ce5b67f3, -0x00ab6d2742a9686a, 0x503cec294a592cd0, 0x566943650813acb2 } }, { - { 0x5672f9eb1dabb69d, 0xba70b535afe853fc, 0x47ac0f752796d66d, 0x32a5351794117275 }, - { 0xb818db0c26620798, 0x5d5c31d9606e354a, 0x0982fa4f00a8cdc7, 0x17e12bcd4653e2d4 }, - { 0xd3a644a6df648437, 0x703b6559880fbfdd, 0xcb852540ad3a1aa5, 0x0900b3f78e4c6468 } + { 0x5672f9eb1dabb69d, -0x458f4aca5017ac04, 0x47ac0f752796d66d, 0x32a5351794117275 }, + { -0x47e724f3d99df868, 0x5d5c31d9606e354a, 0x0982fa4f00a8cdc7, 0x17e12bcd4653e2d4 }, + { -0x2c59bb59209b7bc9, 0x703b6559880fbfdd, -0x347adabf52c5e55b, 0x0900b3f78e4c6468 } }, { - { 0xed280fbec816ad31, 0x52d9595bd8e6efe3, 0x0fe71772f6c623f5, 0x4314030b051e293c }, - { 0x0a851b9f679d651b, 0xe108cb61033342f2, 0xd601f57fe88b30a3, 0x371f3acaed2dd714 }, - { 0xd560005efbf0bcad, 0x8eb70f2ed1870c5e, 0x201f9033d084e6a0, 0x4c3a5ae1ce7b6670 } + { -0x12d7f04137e952cf, 0x52d9595bd8e6efe3, 0x0fe71772f6c623f5, 0x4314030b051e293c }, + { 0x0a851b9f679d651b, -0x1ef7349efcccbd0e, -0x29fe0a801774cf5d, 0x371f3acaed2dd714 }, + { -0x2a9fffa1040f4353, -0x7148f0d12e78f3a2, 0x201f9033d084e6a0, 0x4c3a5ae1ce7b6670 } }, { - { 0xbaf875e4c93da0dd, 0xb93282a771b9294d, 0x80d63fb7f4c6c460, 0x6de9c73dea66c181 }, - { 0x4138a434dcb8fa95, 0x870cf67d6c96840b, 0xde388574297be82c, 0x7c814db27262a55a }, - { 0x478904d5a04df8f2, 0xfafbae4ab10142d3, 0xf6c8ac63555d0998, 0x5aac4a412f90b104 } + { -0x45078a1b36c25f23, -0x46cd7d588e46d6b3, -0x7f29c0480b393ba0, 0x6de9c73dea66c181 }, + { 0x4138a434dcb8fa95, -0x78f3098293697bf5, -0x21c77a8bd68417d4, 0x7c814db27262a55a }, + { 0x478904d5a04df8f2, -0x050451b54efebd2d, -0x0937539caaa2f668, 0x5aac4a412f90b104 } }, { - { 0x603a0d0abd7f5134, 0x8089c932e1d3ae46, 0xdf2591398798bd63, 0x1c145cd274ba0235 }, - { 0xc64f326b3ac92908, 0x5551b282e663e1e0, 0x476b35f54a1a4b83, 0x1b9da3fe189f68c2 }, - { 0x32e8386475f3d743, 0x365b8baf6ae5d9ef, 0x825238b6385b681e, 0x234929c1167d65e1 } + { 0x603a0d0abd7f5134, -0x7f7636cd1e2c51ba, -0x20da6ec67867429d, 0x1c145cd274ba0235 }, + { -0x39b0cd94c536d6f8, 0x5551b282e663e1e0, 0x476b35f54a1a4b83, 0x1b9da3fe189f68c2 }, + { 0x32e8386475f3d743, 0x365b8baf6ae5d9ef, -0x7dadc749c7a497e2, 0x234929c1167d65e1 } }, { - { 0x48145cc21d099fcf, 0x4535c192cc28d7e5, 0x80e7c1e548247e01, 0x4a5f28743b2973ee }, - { 0x984decaba077ade8, 0x383f77ad19eb389d, 0xc7ec6b7e2954d794, 0x59c77b3aeb7c3a7a }, - { 0xd3add725225ccf62, 0x911a3381b2152c5d, 0xd8b39fad5b08f87d, 0x6f05606b4799fe3b } + { 0x48145cc21d099fcf, 0x4535c192cc28d7e5, -0x7f183e1ab7db81ff, 0x4a5f28743b2973ee }, + { -0x67b213545f885218, 0x383f77ad19eb389d, -0x38139481d6ab286c, 0x59c77b3aeb7c3a7a }, + { -0x2c5228dadda3309e, -0x6ee5cc7e4dead3a3, -0x274c6052a4f70783, 0x6f05606b4799fe3b } }, { - { 0x5b433149f91b6483, 0xadb5dc655a2cbf62, 0x87fa8412632827b3, 0x60895e91ab49f8d8 }, - { 0x9ffe9e92177ba962, 0x98aee71d0de5cae1, 0x3ff4ae942d831044, 0x714de12e58533ac8 }, - { 0xe9ecf2ed0cf86c18, 0xb46d06120735dfd4, 0xbc9da09804b96be7, 0x73e2e62fd96dc26b } + { 0x5b433149f91b6483, -0x524a239aa5d3409e, -0x78057bed9cd7d84d, 0x60895e91ab49f8d8 }, + { -0x6001616de884569e, -0x675118e2f21a351f, 0x3ff4ae942d831044, 0x714de12e58533ac8 }, + { -0x16130d12f30793e8, -0x4b92f9edf8ca202c, -0x43625f67fb469419, 0x73e2e62fd96dc26b } }, }, { { { 0x2eccdd0e632f9c1d, 0x51d0b69676893115, 0x52dfb76ba8637a58, 0x6dd37d49a00eef39 }, - { 0xed5b635449aa515e, 0xa865c49f0bc6823a, 0x850c1fe95b42d1c4, 0x30d76d6f03d315b9 }, - { 0x6c4444172106e4c7, 0xfb53d680928d7f69, 0xb4739ea4694d3f26, 0x10c697112e864bb0 } + { -0x12a49cabb655aea2, -0x579a3b60f4397dc6, -0x7af3e016a4bd2e3c, 0x30d76d6f03d315b9 }, + { 0x6c4444172106e4c7, -0x04ac297f6d728097, -0x4b8c615b96b2c0da, 0x10c697112e864bb0 } }, { { 0x0ca62aa08358c805, 0x6a3d4ae37a204247, 0x7464d3a63b11eddc, 0x03bf9baf550806ef }, { 0x6493c4277dbe5fde, 0x265d4fad19ad7ea2, 0x0e00dfc846304590, 0x25e61cabed66fe09 }, - { 0x3f13e128cc586604, 0x6f5873ecb459747e, 0xa0b63dedcc1268f5, 0x566d78634586e22c } + { 0x3f13e128cc586604, 0x6f5873ecb459747e, -0x5f49c21233ed970b, 0x566d78634586e22c } }, { - { 0xa1054285c65a2fd0, 0x6c64112af31667c3, 0x680ae240731aee58, 0x14fba5f34793b22a }, - { 0x1637a49f9cc10834, 0xbc8e56d5a89bc451, 0x1cb5ec0f7f7fd2db, 0x33975bca5ecc35d9 }, + { -0x5efabd7a39a5d030, 0x6c64112af31667c3, 0x680ae240731aee58, 0x14fba5f34793b22a }, + { 0x1637a49f9cc10834, -0x4371a92a57643baf, 0x1cb5ec0f7f7fd2db, 0x33975bca5ecc35d9 }, { 0x3cd746166985f7d4, 0x593e5e84c9c80057, 0x2fc3f2b67b61131e, 0x14829cea83fc526c } }, { - { 0x21e70b2f4e71ecb8, 0xe656ddb940a477e3, 0xbf6556cece1d4f80, 0x05fc3bc4535d7b7e }, - { 0xff437b8497dd95c2, 0x6c744e30aa4eb5a7, 0x9e0c5d613c85e88b, 0x2fd9c71e5f758173 }, + { 0x21e70b2f4e71ecb8, -0x19a92246bf5b881d, -0x409aa93131e2b080, 0x05fc3bc4535d7b7e }, + { -0x00bc847b68226a3e, 0x6c744e30aa4eb5a7, -0x61f3a29ec37a1775, 0x2fd9c71e5f758173 }, { 0x24b8b3ae52afdedd, 0x3495638ced3b30cf, 0x33a4bc83a9be8195, 0x373767475c651f04 } }, { - { 0x634095cb14246590, 0xef12144016c15535, 0x9e38140c8910bc60, 0x6bf5905730907c8c }, - { 0x2fba99fd40d1add9, 0xb307166f96f4d027, 0x4363f05215f03bae, 0x1fbea56c3b18f999 }, + { 0x634095cb14246590, -0x10edebbfe93eaacb, -0x61c7ebf376ef43a0, 0x6bf5905730907c8c }, + { 0x2fba99fd40d1add9, -0x4cf8e990690b2fd9, 0x4363f05215f03bae, 0x1fbea56c3b18f999 }, { 0x0fa778f1e1415b8a, 0x06409ff7bac3a77e, 0x6f52d7b89aa29a50, 0x02521cf67a635a56 } }, { - { 0xb1146720772f5ee4, 0xe8f894b196079ace, 0x4af8224d00ac824a, 0x001753d9f7cd6cc4 }, - { 0x513fee0b0a9d5294, 0x8f98e75c0fdf5a66, 0xd4618688bfe107ce, 0x3fa00a7e71382ced }, - { 0x3c69232d963ddb34, 0x1dde87dab4973858, 0xaad7d1f9a091f285, 0x12b5fe2fa048edb6 } + { -0x4eeb98df88d0a11c, -0x17076b4e69f86532, 0x4af8224d00ac824a, 0x001753d9f7cd6cc4 }, + { 0x513fee0b0a9d5294, -0x706718a3f020a59a, -0x2b9e7977401ef832, 0x3fa00a7e71382ced }, + { 0x3c69232d963ddb34, 0x1dde87dab4973858, -0x55282e065f6e0d7b, 0x12b5fe2fa048edb6 } }, { - { 0xdf2b7c26ad6f1e92, 0x4b66d323504b8913, 0x8c409dc0751c8bc3, 0x6f7e93c20796c7b8 }, - { 0x71f0fbc496fce34d, 0x73b9826badf35bed, 0xd2047261ff28c561, 0x749b76f96fb1206f }, - { 0x1f5af604aea6ae05, 0xc12351f1bee49c99, 0x61a808b5eeff6b66, 0x0fcec10f01e02151 } + { -0x20d483d95290e16e, 0x4b66d323504b8913, -0x73bf623f8ae3743d, 0x6f7e93c20796c7b8 }, + { 0x71f0fbc496fce34d, 0x73b9826badf35bed, -0x2dfb8d9e00d73a9f, 0x749b76f96fb1206f }, + { 0x1f5af604aea6ae05, -0x3edcae0e411b6367, 0x61a808b5eeff6b66, 0x0fcec10f01e02151 } }, { { 0x3df2d29dc4244e45, 0x2b020e7493d8de0a, 0x6cc8067e820c214d, 0x413779166feab90a }, { 0x644d58a649fe1e44, 0x21fcaea231ad777e, 0x02441c5a887fd0d2, 0x4901aa7183c511f3 }, - { 0x08b1b7548c1af8f0, 0xce0f7a7c246299b4, 0xf760b0f91e06d939, 0x41bb887b726d1213 } + { 0x08b1b7548c1af8f0, -0x31f08583db9d664c, -0x089f4f06e1f926c7, 0x41bb887b726d1213 } }, }, { { - { 0x97d980e0aa39f7d2, 0x35d0384252c6b51c, 0x7d43f49307cd55aa, 0x56bd36cfb78ac362 }, - { 0x9267806c567c49d8, 0x066d04ccca791e6a, 0xa69f5645e3cc394b, 0x5c95b686a0788cd2 }, - { 0x2ac519c10d14a954, 0xeaf474b494b5fa90, 0xe6af8382a9f87a5a, 0x0dea6db1879be094 } + { -0x68267f1f55c6082e, 0x35d0384252c6b51c, 0x7d43f49307cd55aa, 0x56bd36cfb78ac362 }, + { -0x6d987f93a983b628, 0x066d04ccca791e6a, -0x5960a9ba1c33c6b5, 0x5c95b686a0788cd2 }, + { 0x2ac519c10d14a954, -0x150b8b4b6b4a0570, -0x19507c7d560785a6, 0x0dea6db1879be094 } }, { { 0x15baeb74d6a8797a, 0x7ef55cf1fac41732, 0x29001f5a3c8b05c5, 0x0ad7cc8752eaccfb }, - { 0xaa66bf547344e5ab, 0xda1258888f1b4309, 0x5e87d2b3fd564b2f, 0x5b2c78885483b1dd }, - { 0x52151362793408cf, 0xeb0f170319963d94, 0xa833b2fa883d9466, 0x093a7fa775003c78 } + { -0x559940ab8cbb1a55, -0x25eda77770e4bcf7, 0x5e87d2b3fd564b2f, 0x5b2c78885483b1dd }, + { 0x52151362793408cf, -0x14f0e8fce669c26c, -0x57cc4d0577c26b9a, 0x093a7fa775003c78 } }, { - { 0xb8e9604460a91286, 0x7f3fd8047778d3de, 0x67d01e31bf8a5e2d, 0x7b038a06c27b653e }, - { 0xe5107de63a16d7be, 0xa377ffdc9af332cf, 0x70d5bf18440b677f, 0x6a252b19a4a31403 }, - { 0x9ed919d5d36990f3, 0x5213aebbdb4eb9f2, 0xc708ea054cb99135, 0x58ded57f72260e56 } + { -0x47169fbb9f56ed7a, 0x7f3fd8047778d3de, 0x67d01e31bf8a5e2d, 0x7b038a06c27b653e }, + { -0x1aef8219c5e92842, -0x5c880023650ccd31, 0x70d5bf18440b677f, 0x6a252b19a4a31403 }, + { -0x6126e62a2c966f0d, 0x5213aebbdb4eb9f2, -0x38f715fab3466ecb, 0x58ded57f72260e56 } }, { - { 0xda6d53265b0fd48b, 0x8960823193bfa988, 0xd78ac93261d57e28, 0x79f2942d3a5c8143 }, - { 0x78e79dade9413d77, 0xf257f9d59729e67d, 0x59db910ee37aa7e6, 0x6aa11b5bbb9e039c }, - { 0x97da2f25b6c88de9, 0x251ba7eaacf20169, 0x09b44f87ef4eb4e4, 0x7d90ab1bbc6a7da5 } + { -0x2592acd9a4f02b75, -0x769f7dce6c405678, -0x287536cd9e2a81d8, 0x79f2942d3a5c8143 }, + { 0x78e79dade9413d77, -0x0da8062a68d61983, 0x59db910ee37aa7e6, 0x6aa11b5bbb9e039c }, + { -0x6825d0da49377217, 0x251ba7eaacf20169, 0x09b44f87ef4eb4e4, 0x7d90ab1bbc6a7da5 } }, { { 0x1a07a3f496b3c397, 0x11ceaa188f4e2532, 0x7d9498d5a7751bf0, 0x19ed161f508dd8a0 }, - { 0x9acca683a7016bfe, 0x90505f4df2c50b6d, 0x6b610d5fcce435aa, 0x19a10d446198ff96 }, - { 0x560a2cd687dce6ca, 0x7f3568c48664cf4d, 0x8741e95222803a38, 0x483bdab1595653fc } + { -0x6533597c58fe9402, -0x6fafa0b20d3af493, 0x6b610d5fcce435aa, 0x19a10d446198ff96 }, + { 0x560a2cd687dce6ca, 0x7f3568c48664cf4d, -0x78be16addd7fc5c8, 0x483bdab1595653fc } }, { - { 0xd6cf4d0ab4da80f6, 0x82483e45f8307fe0, 0x05005269ae6f9da4, 0x1c7052909cf7877a }, - { 0xfa780f148734fa49, 0x106f0b70360534e0, 0x2210776fe3e307bd, 0x3286c109dde6a0fe }, + { -0x2930b2f54b257f0a, -0x7db7c1ba07cf8020, 0x05005269ae6f9da4, 0x1c7052909cf7877a }, + { -0x0587f0eb78cb05b7, 0x106f0b70360534e0, 0x2210776fe3e307bd, 0x3286c109dde6a0fe }, { 0x32ee7de2874e98d4, 0x14c362e9b97e0c60, 0x5781dcde6a60a38a, 0x217dd5eaaa7aa840 } }, { - { 0x8bdf1fb9be8c0ec8, 0x00bae7f8e30a0282, 0x4963991dad6c4f6c, 0x07058a6e5df6f60a }, - { 0x9db7c4d0248e1eb0, 0xe07697e14d74bf52, 0x1e6a9b173c562354, 0x7fa7c21f795a4965 }, - { 0xe9eb02c4db31f67f, 0xed25fd8910bcfb2b, 0x46c8131f5c5cddb4, 0x33b21c13a0cb9bce } + { -0x7420e0464173f138, 0x00bae7f8e30a0282, 0x4963991dad6c4f6c, 0x07058a6e5df6f60a }, + { -0x62483b2fdb71e150, -0x1f89681eb28b40ae, 0x1e6a9b173c562354, 0x7fa7c21f795a4965 }, + { -0x1614fd3b24ce0981, -0x12da0276ef4304d5, 0x46c8131f5c5cddb4, 0x33b21c13a0cb9bce } }, { - { 0x9aafb9b05ee38c5b, 0xbf9d2d4e071a13c7, 0x8eee6e6de933290a, 0x1c3bab17ae109717 }, - { 0x360692f8087d8e31, 0xf4dcc637d27163f7, 0x25a4e62065ea5963, 0x659bf72e5ac160d9 }, + { -0x6550464fa11c73a5, -0x4062d2b1f8e5ec39, -0x7111919216ccd6f6, 0x1c3bab17ae109717 }, + { 0x360692f8087d8e31, -0x0b2339c82d8e9c09, 0x25a4e62065ea5963, 0x659bf72e5ac160d9 }, { 0x1c9ab216c7cab7b0, 0x7d65d37407bbc3cc, 0x52744750504a58d5, 0x09f2606b131a2990 } }, }, { { - { 0x7e234c597c6691ae, 0x64889d3d0a85b4c8, 0xdae2c90c354afae7, 0x0a871e070c6a9e1d }, + { 0x7e234c597c6691ae, 0x64889d3d0a85b4c8, -0x251d36f3cab50519, 0x0a871e070c6a9e1d }, { 0x40e87d44744346be, 0x1d48dad415b52b25, 0x7c3a8a18a13b603e, 0x4eb728c12fcdbdf7 }, { 0x3301b5994bbc8989, 0x736bae3a5bdd4260, 0x0d61ade219d59e3c, 0x3ee7300f2685d464 } }, { - { 0x43fa7947841e7518, 0xe5c6fa59639c46d7, 0xa1065e1de3052b74, 0x7d47c6a2cfb89030 }, - { 0xf5d255e49e7dd6b7, 0x8016115c610b1eac, 0x3c99975d92e187ca, 0x13815762979125c2 }, - { 0x3fdad0148ef0d6e0, 0x9d3e749a91546f3c, 0x71ec621026bb8157, 0x148cf58d34c9ec80 } + { 0x43fa7947841e7518, -0x1a3905a69c63b929, -0x5ef9a1e21cfad48c, 0x7d47c6a2cfb89030 }, + { -0x0a2daa1b61822949, -0x7fe9eea39ef4e154, 0x3c99975d92e187ca, 0x13815762979125c2 }, + { 0x3fdad0148ef0d6e0, -0x62c18b656eab90c4, 0x71ec621026bb8157, 0x148cf58d34c9ec80 } }, { - { 0xe2572f7d9ae4756d, 0x56c345bb88f3487f, 0x9fd10b6d6960a88d, 0x278febad4eaea1b9 }, + { -0x1da8d082651b8a93, 0x56c345bb88f3487f, -0x602ef492969f5773, 0x278febad4eaea1b9 }, { 0x46a492f67934f027, 0x469984bef6840aa9, 0x5ca1bc2a89611854, 0x3ff2fa1ebd5dbbd4 }, - { 0xb1aa681f8c933966, 0x8c21949c20290c98, 0x39115291219d3c52, 0x4104dd02fe9c677b } + { -0x4e5597e0736cc69a, -0x73de6b63dfd6f368, 0x39115291219d3c52, 0x4104dd02fe9c677b } }, { - { 0x81214e06db096ab8, 0x21a8b6c90ce44f35, 0x6524c12a409e2af5, 0x0165b5a48efca481 }, - { 0x72b2bf5e1124422a, 0xa1fa0c3398a33ab5, 0x94cb6101fa52b666, 0x2c863b00afaf53d5 }, - { 0xf190a474a0846a76, 0x12eff984cd2f7cc0, 0x695e290658aa2b8f, 0x591b67d9bffec8b8 } + { -0x7edeb1f924f69548, 0x21a8b6c90ce44f35, 0x6524c12a409e2af5, 0x0165b5a48efca481 }, + { 0x72b2bf5e1124422a, -0x5e05f3cc675cc54b, -0x6b349efe05ad499a, 0x2c863b00afaf53d5 }, + { -0x0e6f5b8b5f7b958a, 0x12eff984cd2f7cc0, 0x695e290658aa2b8f, 0x591b67d9bffec8b8 } }, { - { 0x99b9b3719f18b55d, 0xe465e5faa18c641e, 0x61081136c29f05ed, 0x489b4f867030128b }, + { -0x66464c8e60e74aa3, -0x1b9a1a055e739be2, 0x61081136c29f05ed, 0x489b4f867030128b }, { 0x312f0d1c80b49bfa, 0x5979515eabf3ec8a, 0x727033c09ef01c88, 0x3de02ec7ca8f7bcb }, - { 0xd232102d3aeb92ef, 0xe16253b46116a861, 0x3d7eabe7190baa24, 0x49f5fbba496cbebf } + { -0x2dcdefd2c5146d11, -0x1e9dac4b9ee9579f, 0x3d7eabe7190baa24, 0x49f5fbba496cbebf } }, { - { 0x155d628c1e9c572e, 0x8a4d86acc5884741, 0x91a352f6515763eb, 0x06a1a6c28867515b }, - { 0x30949a108a5bcfd4, 0xdc40dd70bc6473eb, 0x92c294c1307c0d1c, 0x5604a86dcbfa6e74 }, - { 0x7288d1d47c1764b6, 0x72541140e0418b51, 0x9f031a6018acf6d1, 0x20989e89fe2742c6 } + { 0x155d628c1e9c572e, -0x75b279533a77b8bf, -0x6e5cad09aea89c15, 0x06a1a6c28867515b }, + { 0x30949a108a5bcfd4, -0x23bf228f439b8c15, -0x6d3d6b3ecf83f2e4, 0x5604a86dcbfa6e74 }, + { 0x7288d1d47c1764b6, 0x72541140e0418b51, -0x60fce59fe753092f, 0x20989e89fe2742c6 } }, { { 0x1674278b85eaec2e, 0x5621dc077acb2bdf, 0x640a4c1661cbf45a, 0x730b9950f70595d3 }, - { 0x499777fd3a2dcc7f, 0x32857c2ca54fd892, 0xa279d864d207e3a0, 0x0403ed1d0ca67e29 }, - { 0xc94b2d35874ec552, 0xc5e6c8cf98246f8d, 0xf7cb46fa16c035ce, 0x5bd7454308303dcc } + { 0x499777fd3a2dcc7f, 0x32857c2ca54fd892, -0x5d86279b2df81c60, 0x0403ed1d0ca67e29 }, + { -0x36b4d2ca78b13aae, -0x3a19373067db9073, -0x0834b905e93fca32, 0x5bd7454308303dcc } }, { - { 0x85c4932115e7792a, 0xc64c89a2bdcdddc9, 0x9d1e3da8ada3d762, 0x5bb7db123067f82c }, + { -0x7a3b6cdeea1886d6, -0x39b3765d42322237, -0x62e1c257525c289e, 0x5bb7db123067f82c }, { 0x7f9ad19528b24cc2, 0x7f6b54656335c181, 0x66b8b66e4fc07236, 0x133a78007380ad83 }, { 0x0961f467c6ca62be, 0x04ec21d6211952ee, 0x182360779bd54770, 0x740dca6d58f0e0d2 } }, }, { { - { 0x3906c72aed261ae5, 0x9ab68fd988e100f7, 0xf5e9059af3360197, 0x0e53dc78bf2b6d47 }, + { 0x3906c72aed261ae5, -0x65497026771eff09, -0x0a16fa650cc9fe69, 0x0e53dc78bf2b6d47 }, { 0x50b70bf5d3f0af0b, 0x4feaf48ae32e71f7, 0x60e84ed3a55bbd34, 0x00ed489b3f50d1ed }, - { 0xb90829bf7971877a, 0x5e4444636d17e631, 0x4d05c52e18276893, 0x27632d9a5a4a4af5 } + { -0x46f7d640868e7886, 0x5e4444636d17e631, 0x4d05c52e18276893, 0x27632d9a5a4a4af5 } }, { - { 0xa98285d187eaffdb, 0xa5b4fbbbd8d0a864, 0xb658f27f022663f7, 0x3bbc2b22d99ce282 }, - { 0xd11ff05154b260ce, 0xd86dc38e72f95270, 0x601fcd0d267cc138, 0x2b67916429e90ccd }, - { 0xb917c952583c0a58, 0x653ff9b80fe4c6f3, 0x9b0da7d7bcdf3c0c, 0x43a0eeb6ab54d60e } + { -0x567d7a2e78150025, -0x5a4b0444272f579c, -0x49a70d80fdd99c09, 0x3bbc2b22d99ce282 }, + { -0x2ee00faeab4d9f32, -0x27923c718d06ad90, 0x601fcd0d267cc138, 0x2b67916429e90ccd }, + { -0x46e836ada7c3f5a8, 0x653ff9b80fe4c6f3, -0x64f258284320c3f4, 0x43a0eeb6ab54d60e } }, { - { 0x3ac6322357875fe8, 0xd9d4f4ecf5fbcb8f, 0x8dee8493382bb620, 0x50c5eaa14c799fdc }, - { 0x396966a46d4a5487, 0xf811a18aac2bb3ba, 0x66e4685b5628b26b, 0x70a477029d929b92 }, - { 0xdd0edc8bd6f2fb3c, 0x54c63aa79cc7b7a0, 0xae0b032b2c8d9f1a, 0x6f9ce107602967fb } + { 0x3ac6322357875fe8, -0x262b0b130a043471, -0x72117b6cc7d449e0, 0x50c5eaa14c799fdc }, + { 0x396966a46d4a5487, -0x07ee5e7553d44c46, 0x66e4685b5628b26b, 0x70a477029d929b92 }, + { -0x22f12374290d04c4, 0x54c63aa79cc7b7a0, -0x51f4fcd4d37260e6, 0x6f9ce107602967fb } }, { - { 0x139693063520e0b5, 0x437fcf7c88ea03fe, 0xf7d4c40bd3c959bc, 0x699154d1f893ded9 }, - { 0xad1054b1cde1c22a, 0xc4a8e90248eb32df, 0x5f3e7b33accdc0ea, 0x72364713fc79963e }, - { 0x315d5c75b4b27526, 0xcccb842d0236daa5, 0x22f0c8a3345fee8e, 0x73975a617d39dbed } + { 0x139693063520e0b5, 0x437fcf7c88ea03fe, -0x082b3bf42c36a644, 0x699154d1f893ded9 }, + { -0x52efab4e321e3dd6, -0x3b5716fdb714cd21, 0x5f3e7b33accdc0ea, 0x72364713fc79963e }, + { 0x315d5c75b4b27526, -0x33347bd2fdc9255b, 0x22f0c8a3345fee8e, 0x73975a617d39dbed } }, { { 0x6f37f392f4433e46, 0x0e19b9a11f566b18, 0x220fb78a1fd1d662, 0x362a4258a381c94d }, - { 0xe4024df96375da10, 0x78d3251a1830c870, 0x902b1948658cd91c, 0x7e18b10b29b7438a }, - { 0x9071d9132b6beb2f, 0x0f26e9ad28418247, 0xeab91ec9bdec925d, 0x4be65bc8f48af2de } + { -0x1bfdb2069c8a25f0, 0x78d3251a1830c870, -0x6fd4e6b79a7326e4, 0x7e18b10b29b7438a }, + { -0x6f8e26ecd49414d1, 0x0f26e9ad28418247, -0x1546e13642136da3, 0x4be65bc8f48af2de } }, { - { 0x1d50fba257c26234, 0x7bd4823adeb0678b, 0xc2b0dc6ea6538af5, 0x5665eec6351da73e }, - { 0x78487feba36e7028, 0x5f3f13001dd8ce34, 0x934fb12d4b30c489, 0x056c244d397f0a2b }, - { 0xdb3ee00943bfb210, 0x4972018720800ac2, 0x26ab5d6173bd8667, 0x20b209c2ab204938 } + { 0x1d50fba257c26234, 0x7bd4823adeb0678b, -0x3d4f239159ac750b, 0x5665eec6351da73e }, + { 0x78487feba36e7028, 0x5f3f13001dd8ce34, -0x6cb04ed2b4cf3b77, 0x056c244d397f0a2b }, + { -0x24c11ff6bc404df0, 0x4972018720800ac2, 0x26ab5d6173bd8667, 0x20b209c2ab204938 } }, { { 0x1fcca94516bd3289, 0x448d65aa41420428, 0x59c3b7b216a55d62, 0x49992cc64e612cd8 }, - { 0x549e342ac07fb34b, 0x02d8220821373d93, 0xbc262d70acd1f567, 0x7a92c9fdfbcac784 }, - { 0x65bd1bea70f801de, 0x1befb7c0fe49e28a, 0xa86306cdb1b2ae4a, 0x3b7ac0cd265c2a09 } + { 0x549e342ac07fb34b, 0x02d8220821373d93, -0x43d9d28f532e0a99, 0x7a92c9fdfbcac784 }, + { 0x65bd1bea70f801de, 0x1befb7c0fe49e28a, -0x579cf9324e4d51b6, 0x3b7ac0cd265c2a09 } }, { - { 0xf0d54e4f22ed39a7, 0xa2aae91e5608150a, 0xf421b2e9eddae875, 0x31bc531d6b7de992 }, - { 0x822bee438c01bcec, 0x530cb525c0fbc73b, 0x48519034c1953fe9, 0x265cc261e09a0f5b }, - { 0xdf3d134da980f971, 0x7a4fb8d1221a22a7, 0x3df7d42035aad6d8, 0x2a14edcc6a1a125e } + { -0x0f2ab1b0dd12c659, -0x5d5516e1a9f7eaf6, -0x0bde4d161225178b, 0x31bc531d6b7de992 }, + { -0x7dd411bc73fe4314, 0x530cb525c0fbc73b, 0x48519034c1953fe9, 0x265cc261e09a0f5b }, + { -0x20c2ecb2567f068f, 0x7a4fb8d1221a22a7, 0x3df7d42035aad6d8, 0x2a14edcc6a1a125e } }, }, { { - { 0x231a8c570478433c, 0xb7b5270ec281439d, 0xdbaa99eae3d9079f, 0x2c03f5256c2b03d9 }, - { 0xdf48ee0752cfce4e, 0xc3fffaf306ec08b7, 0x05710b2ab95459c4, 0x161d25fa963ea38d }, + { 0x231a8c570478433c, -0x484ad8f13d7ebc63, -0x245566151c26f861, 0x2c03f5256c2b03d9 }, + { -0x20b711f8ad3031b2, -0x3c00050cf913f749, 0x05710b2ab95459c4, 0x161d25fa963ea38d }, { 0x790f18757b53a47d, 0x307b0130cf0c5879, 0x31903d77257ef7f9, 0x699468bdbd96bbaf } }, { - { 0xd8dd3de66aa91948, 0x485064c22fc0d2cc, 0x9b48246634fdea2f, 0x293e1c4e6c4a2e3a }, - { 0xbd1f2f46f4dafecf, 0x7cef0114a47fd6f7, 0xd31ffdda4a47b37f, 0x525219a473905785 }, - { 0x376e134b925112e1, 0x703778b5dca15da0, 0xb04589af461c3111, 0x5b605c447f032823 } + { -0x2722c2199556e6b8, 0x485064c22fc0d2cc, -0x64b7db99cb0215d1, 0x293e1c4e6c4a2e3a }, + { -0x42e0d0b90b250131, 0x7cef0114a47fd6f7, -0x2ce00225b5b84c81, 0x525219a473905785 }, + { 0x376e134b925112e1, 0x703778b5dca15da0, -0x4fba7650b9e3ceef, 0x5b605c447f032823 } }, { - { 0x3be9fec6f0e7f04c, 0x866a579e75e34962, 0x5542ef161e1de61a, 0x2f12fef4cc5abdd5 }, - { 0xb965805920c47c89, 0xe7f0100c923b8fcc, 0x0001256502e2ef77, 0x24a76dcea8aeb3ee }, - { 0x0a4522b2dfc0c740, 0x10d06e7f40c9a407, 0xc6cf144178cff668, 0x5e607b2518a43790 } + { 0x3be9fec6f0e7f04c, -0x7995a8618a1cb69e, 0x5542ef161e1de61a, 0x2f12fef4cc5abdd5 }, + { -0x469a7fa6df3b8377, -0x180feff36dc47034, 0x0001256502e2ef77, 0x24a76dcea8aeb3ee }, + { 0x0a4522b2dfc0c740, 0x10d06e7f40c9a407, -0x3930ebbe87300998, 0x5e607b2518a43790 } }, { - { 0xa02c431ca596cf14, 0xe3c42d40aed3e400, 0xd24526802e0f26db, 0x201f33139e457068 }, - { 0x58b31d8f6cdf1818, 0x35cfa74fc36258a2, 0xe1b3ff4f66e61d6e, 0x5067acab6ccdd5f7 }, - { 0xfd527f6b08039d51, 0x18b14964017c0006, 0xd5220eb02e25a4a8, 0x397cba8862460375 } + { -0x5fd3bce35a6930ec, -0x1c3bd2bf512c1c00, -0x2dbad97fd1f0d925, 0x201f33139e457068 }, + { 0x58b31d8f6cdf1818, 0x35cfa74fc36258a2, -0x1e4c00b09919e292, 0x5067acab6ccdd5f7 }, + { -0x02ad8094f7fc62af, 0x18b14964017c0006, -0x2addf14fd1da5b58, 0x397cba8862460375 } }, { - { 0x7815c3fbc81379e7, 0xa6619420dde12af1, 0xffa9c0f885a8fdd5, 0x771b4022c1e1c252 }, - { 0x30c13093f05959b2, 0xe23aa18de9a97976, 0x222fd491721d5e26, 0x2339d320766e6c3a }, - { 0xd87dd986513a2fa7, 0xf5ac9b71f9d4cf08, 0xd06bc31b1ea283b3, 0x331a189219971a76 } + { 0x7815c3fbc81379e7, -0x599e6bdf221ed50f, -0x00563f077a57022b, 0x771b4022c1e1c252 }, + { 0x30c13093f05959b2, -0x1dc55e721656868a, 0x222fd491721d5e26, 0x2339d320766e6c3a }, + { -0x27822679aec5d059, -0x0a53648e062b30f8, -0x2f943ce4e15d7c4d, 0x331a189219971a76 } }, { - { 0x26512f3a9d7572af, 0x5bcbe28868074a9e, 0x84edc1c11180f7c4, 0x1ac9619ff649a67b }, - { 0xf5166f45fb4f80c6, 0x9c36c7de61c775cf, 0xe3d4e81b9041d91c, 0x31167c6b83bdfe21 }, - { 0xf22b3842524b1068, 0x5068343bee9ce987, 0xfc9d71844a6250c8, 0x612436341f08b111 } + { 0x26512f3a9d7572af, 0x5bcbe28868074a9e, -0x7b123e3eee7f083c, 0x1ac9619ff649a67b }, + { -0x0ae990ba04b07f3a, -0x63c938219e388a31, -0x1c2b17e46fbe26e4, 0x31167c6b83bdfe21 }, + { -0x0dd4c7bdadb4ef98, 0x5068343bee9ce987, -0x03628e7bb59daf38, 0x612436341f08b111 } }, { - { 0x8b6349e31a2d2638, 0x9ddfb7009bd3fd35, 0x7f8bf1b8a3a06ba4, 0x1522aa3178d90445 }, - { 0xd99d41db874e898d, 0x09fea5f16c07dc20, 0x793d2c67d00f9bbc, 0x46ebe2309e5eff40 }, - { 0x2c382f5369614938, 0xdafe409ab72d6d10, 0xe8c83391b646f227, 0x45fe70f50524306c } + { -0x749cb61ce5d2d9c8, -0x622048ff642c02cb, 0x7f8bf1b8a3a06ba4, 0x1522aa3178d90445 }, + { -0x2662be2478b17673, 0x09fea5f16c07dc20, 0x793d2c67d00f9bbc, 0x46ebe2309e5eff40 }, + { 0x2c382f5369614938, -0x2501bf6548d292f0, -0x1737cc6e49b90dd9, 0x45fe70f50524306c } }, { { 0x62f24920c8951491, 0x05f007c83f630ca2, 0x6fbb45d2f5c9d4b8, 0x16619f6db57a2245 }, - { 0xda4875a6960c0b8c, 0x5b68d076ef0e2f20, 0x07fb51cf3d0b8fd4, 0x428d1623a0e392d4 }, - { 0x084f4a4401a308fd, 0xa82219c376a5caac, 0xdeb8de4643d1bc7d, 0x1d81592d60bd38c6 } + { -0x25b78a5969f3f474, 0x5b68d076ef0e2f20, 0x07fb51cf3d0b8fd4, 0x428d1623a0e392d4 }, + { 0x084f4a4401a308fd, -0x57dde63c895a3554, -0x214721b9bc2e4383, 0x1d81592d60bd38c6 } }, }, { { - { 0x3a4a369a2f89c8a1, 0x63137a1d7c8de80d, 0xbcac008a78eda015, 0x2cb8b3a5b483b03f }, - { 0xd833d7beec2a4c38, 0x2c9162830acc20ed, 0xe93a47aa92df7581, 0x702d67a3333c4a81 }, + { 0x3a4a369a2f89c8a1, 0x63137a1d7c8de80d, -0x4353ff7587125feb, 0x2cb8b3a5b483b03f }, + { -0x27cc284113d5b3c8, 0x2c9162830acc20ed, -0x16c5b8556d208a7f, 0x702d67a3333c4a81 }, { 0x36e417cbcb1b90a1, 0x33b3ddaa7f11794e, 0x3f510808885bc607, 0x24141dc0e6a8020d } }, { - { 0x91925dccbd83157d, 0x3ca1205322cc8094, 0x28e57f183f90d6e4, 0x1a4714cede2e767b }, - { 0x59f73c773fefee9d, 0xb3f1ef89c1cf989d, 0xe35dfb42e02e545f, 0x5766120b47a1b47c }, - { 0xdb20ba0fb8b6b7ff, 0xb732c3b677511fa1, 0xa92b51c099f02d89, 0x4f3875ad489ca5f1 } + { -0x6e6da233427cea83, 0x3ca1205322cc8094, 0x28e57f183f90d6e4, 0x1a4714cede2e767b }, + { 0x59f73c773fefee9d, -0x4c0e10763e306763, -0x1ca204bd1fd1aba1, 0x5766120b47a1b47c }, + { -0x24df45f047494801, -0x48cd3c4988aee05f, -0x56d4ae3f660fd277, 0x4f3875ad489ca5f1 } }, { - { 0x79ed13f6ee73eec0, 0xa5c6526d69110bb1, 0xe48928c38603860c, 0x722a1446fd7059f5 }, - { 0xc7fc762f4932ab22, 0x7ac0edf72f4c3c1b, 0x5f6b55aa9aa895e8, 0x3680274dad0a0081 }, - { 0xd0959fe9a8cf8819, 0xd0a995508475a99c, 0x6eac173320b09cc5, 0x628ecf04331b1095 } + { 0x79ed13f6ee73eec0, -0x5a39ad9296eef44f, -0x1b76d73c79fc79f4, 0x722a1446fd7059f5 }, + { -0x380389d0b6cd54de, 0x7ac0edf72f4c3c1b, 0x5f6b55aa9aa895e8, 0x3680274dad0a0081 }, + { -0x2f6a6016573077e7, -0x2f566aaf7b8a5664, 0x6eac173320b09cc5, 0x628ecf04331b1095 } }, { - { 0x9b41acf85c74ccf1, 0xb673318108265251, 0x99c92aed11adb147, 0x7a47d70d34ecb40f }, - { 0x98bcb118a9d0ddbc, 0xee449e3408b4802b, 0x87089226b8a6b104, 0x685f349a45c7915d }, - { 0x60a0c4cbcc43a4f5, 0x775c66ca3677bea9, 0xa17aa1752ff8f5ed, 0x11ded9020e01fdc0 } + { -0x64be5307a38b330f, -0x498cce7ef7d9adaf, -0x6636d512ee524eb9, 0x7a47d70d34ecb40f }, + { -0x67434ee7562f2244, -0x11bb61cbf74b7fd5, -0x78f76dd947594efc, 0x685f349a45c7915d }, + { 0x60a0c4cbcc43a4f5, 0x775c66ca3677bea9, -0x5e855e8ad0070a13, 0x11ded9020e01fdc0 } }, { - { 0x471f95b03bea93b7, 0x0552d7d43313abd3, 0xbd9370e2e17e3f7b, 0x7b120f1db20e5bec }, - { 0x890e7809caefe704, 0x8728296de30e8c6c, 0x4c5cd2a392aeb1c9, 0x194263d15771531f }, - { 0x17d2fb3d86502d7a, 0xb564d84450a69352, 0x7da962c8a60ed75d, 0x00d0f85b318736aa } + { 0x471f95b03bea93b7, 0x0552d7d43313abd3, -0x426c8f1d1e81c085, 0x7b120f1db20e5bec }, + { -0x76f187f6351018fc, -0x78d7d6921cf17394, 0x4c5cd2a392aeb1c9, 0x194263d15771531f }, + { 0x17d2fb3d86502d7a, -0x4a9b27bbaf596cae, 0x7da962c8a60ed75d, 0x00d0f85b318736aa } }, { - { 0xa6753c1efd7621c1, 0x69c0b4a7445671f5, 0x971f527405b23c11, 0x387bc74851a8c7cd }, - { 0x978b142e777c84fd, 0xf402644705a8c062, 0xa67ad51be7e612c7, 0x2f7b459698dd6a33 }, - { 0x81894b4d4a52a9a8, 0xadd93e12f6b8832f, 0x184d8548b61bd638, 0x3f1c62dbd6c9f6cd } + { -0x598ac3e10289de3f, 0x69c0b4a7445671f5, -0x68e0ad8bfa4dc3ef, 0x387bc74851a8c7cd }, + { -0x6874ebd188837b03, -0x0bfd9bb8fa573f9e, -0x59852ae41819ed39, 0x2f7b459698dd6a33 }, + { -0x7e76b4b2b5ad5658, -0x5226c1ed09477cd1, 0x184d8548b61bd638, 0x3f1c62dbd6c9f6cd } }, { { 0x3fad3e40148f693d, 0x052656e194eb9a72, 0x2f4dcbfd184f4e2f, 0x406f8db1c482e18b }, - { 0x2e8f1f0091910c1f, 0xa4df4fe0bff2e12c, 0x60c6560aee927438, 0x6338283facefc8fa }, - { 0x9e630d2c7f191ee4, 0x4fbf8301bc3ff670, 0x787d8e4e7afb73c4, 0x50d83d5be8f58fa5 } + { 0x2e8f1f0091910c1f, -0x5b20b01f400d1ed4, 0x60c6560aee927438, 0x6338283facefc8fa }, + { -0x619cf2d380e6e11c, 0x4fbf8301bc3ff670, 0x787d8e4e7afb73c4, 0x50d83d5be8f58fa5 } }, { - { 0xc0accf90b4d3b66d, 0xa7059de561732e60, 0x033d1f7870c6b0ba, 0x584161cd26d946e4 }, - { 0x85683916c11a1897, 0x2d69a4efe506d008, 0x39af1378f664bd01, 0x65942131361517c6 }, - { 0xbbf2b1a072d27ca2, 0xbf393c59fbdec704, 0xe98dbbcee262b81e, 0x02eebd0b3029b589 } + { -0x3f53306f4b2c4993, -0x58fa621a9e8cd1a0, 0x033d1f7870c6b0ba, 0x584161cd26d946e4 }, + { -0x7a97c6e93ee5e769, 0x2d69a4efe506d008, 0x39af1378f664bd01, 0x65942131361517c6 }, + { -0x440d4e5f8d2d835e, -0x40c6c3a6042138fc, -0x167244311d9d47e2, 0x02eebd0b3029b589 } }, }, { { - { 0x8765b69f7b85c5e8, 0x6ff0678bd168bab2, 0x3a70e77c1d330f9b, 0x3a5f6d51b0af8e7c }, + { -0x789a4960847a3a18, 0x6ff0678bd168bab2, 0x3a70e77c1d330f9b, 0x3a5f6d51b0af8e7c }, { 0x61368756a60dac5f, 0x17e02f6aebabdc57, 0x7f193f2d4cce0f7d, 0x20234a7789ecdcf0 }, - { 0x76d20db67178b252, 0x071c34f9d51ed160, 0xf62a4a20b3e41170, 0x7cd682353cffe366 } + { 0x76d20db67178b252, 0x071c34f9d51ed160, -0x09d5b5df4c1bee90, 0x7cd682353cffe366 } }, { - { 0xa665cd6068acf4f3, 0x42d92d183cd7e3d3, 0x5759389d336025d9, 0x3ef0253b2b2cd8ff }, - { 0x0be1a45bd887fab6, 0x2a846a32ba403b6e, 0xd9921012e96e6000, 0x2838c8863bdc0943 }, - { 0xd16bb0cf4a465030, 0xfa496b4115c577ab, 0x82cfae8af4ab419d, 0x21dcb8a606a82812 } + { -0x599a329f97530b0d, 0x42d92d183cd7e3d3, 0x5759389d336025d9, 0x3ef0253b2b2cd8ff }, + { 0x0be1a45bd887fab6, 0x2a846a32ba403b6e, -0x266defed1691a000, 0x2838c8863bdc0943 }, + { -0x2e944f30b5b9afd0, -0x05b694beea3a8855, -0x7d3051750b54be63, 0x21dcb8a606a82812 } }, { - { 0x9a8d00fabe7731ba, 0x8203607e629e1889, 0xb2cc023743f3d97f, 0x5d840dbf6c6f678b }, + { -0x6572ff054188ce46, -0x7dfc9f819d61e777, -0x4d33fdc8bc0c2681, 0x5d840dbf6c6f678b }, { 0x5c6004468c9d9fc8, 0x2540096ed42aa3cb, 0x125b4d4c12ee2f9c, 0x0bc3d08194a31dab }, { 0x706e380d309fe18b, 0x6eb02da6b9e165c7, 0x57bbba997dae20ab, 0x3a4276232ac196dd } }, { - { 0x3bf8c172db447ecb, 0x5fcfc41fc6282dbd, 0x80acffc075aa15fe, 0x0770c9e824e1a9f9 }, - { 0x4b42432c8a7084fa, 0x898a19e3dfb9e545, 0xbe9f00219c58e45d, 0x1ff177cea16debd1 }, - { 0xcf61d99a45b5b5fd, 0x860984e91b3a7924, 0xe7300919303e3e89, 0x39f264fd41500b1e } + { 0x3bf8c172db447ecb, 0x5fcfc41fc6282dbd, -0x7f53003f8a55ea02, 0x0770c9e824e1a9f9 }, + { 0x4b42432c8a7084fa, -0x7675e61c20461abb, -0x4160ffde63a71ba3, 0x1ff177cea16debd1 }, + { -0x309e2665ba4a4a03, -0x79f67b16e4c586dc, -0x18cff6e6cfc1c177, 0x39f264fd41500b1e } }, { - { 0xd19b4aabfe097be1, 0xa46dfce1dfe01929, 0xc3c908942ca6f1ff, 0x65c621272c35f14e }, - { 0xa7ad3417dbe7e29c, 0xbd94376a2b9c139c, 0xa0e91b8e93597ba9, 0x1712d73468889840 }, - { 0xe72b89f8ce3193dd, 0x4d103356a125c0bb, 0x0419a93d2e1cfe83, 0x22f9800ab19ce272 } + { -0x2e64b55401f6841f, -0x5b92031e201fe6d7, -0x3c36f76bd3590e01, 0x65c621272c35f14e }, + { -0x5852cbe824181d64, -0x426bc895d463ec64, -0x5f16e4716ca68457, 0x1712d73468889840 }, + { -0x18d4760731ce6c23, 0x4d103356a125c0bb, 0x0419a93d2e1cfe83, 0x22f9800ab19ce272 } }, { - { 0x42029fdd9a6efdac, 0xb912cebe34a54941, 0x640f64b987bdf37b, 0x4171a4d38598cab4 }, - { 0x605a368a3e9ef8cb, 0xe3e9c022a5504715, 0x553d48b05f24248f, 0x13f416cd647626e5 }, - { 0xfa2758aa99c94c8c, 0x23006f6fb000b807, 0xfbd291ddadda5392, 0x508214fa574bd1ab } + { 0x42029fdd9a6efdac, -0x46ed3141cb5ab6bf, 0x640f64b987bdf37b, 0x4171a4d38598cab4 }, + { 0x605a368a3e9ef8cb, -0x1c163fdd5aafb8eb, 0x553d48b05f24248f, 0x13f416cd647626e5 }, + { -0x05d8a7556636b374, 0x23006f6fb000b807, -0x042d6e225225ac6e, 0x508214fa574bd1ab } }, { - { 0x461a15bb53d003d6, 0xb2102888bcf3c965, 0x27c576756c683a5a, 0x3a7758a4c86cb447 }, - { 0xc20269153ed6fe4b, 0xa65a6739511d77c4, 0xcbde26462c14af94, 0x22f960ec6faba74b }, + { 0x461a15bb53d003d6, -0x4defd777430c369b, 0x27c576756c683a5a, 0x3a7758a4c86cb447 }, + { -0x3dfd96eac12901b5, -0x59a598c6aee2883c, -0x3421d9b9d3eb506c, 0x22f960ec6faba74b }, { 0x548111f693ae5076, 0x1dae21df1dfd54a6, 0x12248c90f3115e65, 0x5d9fd15f8de7f494 } }, { - { 0x3f244d2aeed7521e, 0x8e3a9028432e9615, 0xe164ba772e9c16d4, 0x3bc187fa47eb98d8 }, - { 0x031408d36d63727f, 0x6a379aefd7c7b533, 0xa9e18fc5ccaee24b, 0x332f35914f8fbed3 }, - { 0x6d470115ea86c20c, 0x998ab7cb6c46d125, 0xd77832b53a660188, 0x450d81ce906fba03 } + { 0x3f244d2aeed7521e, -0x71c56fd7bcd169eb, -0x1e9b4588d163e92c, 0x3bc187fa47eb98d8 }, + { 0x031408d36d63727f, 0x6a379aefd7c7b533, -0x561e703a33511db5, 0x332f35914f8fbed3 }, + { 0x6d470115ea86c20c, -0x6675483493b92edb, -0x2887cd4ac599fe78, 0x450d81ce906fba03 } }, }, { { { 0x23264d66b2cae0b5, 0x7dbaed33ebca6576, 0x030ebed6f0d24ac8, 0x2a887f78f7635510 }, - { 0xf8ae4d2ad8453902, 0x7018058ee8db2d1d, 0xaab3995fc7d2c11e, 0x53b16d2324ccca79 }, + { -0x0751b2d527bac6fe, 0x7018058ee8db2d1d, -0x554c66a0382d3ee2, 0x53b16d2324ccca79 }, { 0x2a23b9e75c012d4f, 0x0c974651cae1f2ea, 0x2fb63273675d70ca, 0x0ba7250b864403f5 } }, { - { 0xdd63589386f86d9c, 0x61699176e13a85a4, 0x2e5111954eaa7d57, 0x32c21b57fb60bdfb }, - { 0xbb0d18fd029c6421, 0xbc2d142189298f02, 0x8347f8e68b250e96, 0x7b9f2fe8032d71c9 }, - { 0xd87823cd319e0780, 0xefc4cfc1897775c5, 0x4854fb129a0ab3f7, 0x12c49d417238c371 } + { -0x229ca76c79079264, 0x61699176e13a85a4, 0x2e5111954eaa7d57, 0x32c21b57fb60bdfb }, + { -0x44f2e702fd639bdf, -0x43d2ebde76d670fe, -0x7cb8071974daf16a, 0x7b9f2fe8032d71c9 }, + { -0x2787dc32ce61f880, -0x103b303e76888a3b, 0x4854fb129a0ab3f7, 0x12c49d417238c371 } }, { - { 0x09b3a01783799542, 0x626dd08faad5ee3f, 0xba00bceeeb70149f, 0x1421b246a0a444c9 }, - { 0x0950b533ffe83769, 0x21861c1d8e1d6bd1, 0xf022d8381302e510, 0x2509200c6391cab4 }, - { 0x4aa43a8e8c24a7c7, 0x04c1f540d8f05ef5, 0xadba5e0c0b3eb9dc, 0x2ab5504448a49ce3 } + { 0x09b3a01783799542, 0x626dd08faad5ee3f, -0x45ff4311148feb61, 0x1421b246a0a444c9 }, + { 0x0950b533ffe83769, 0x21861c1d8e1d6bd1, -0x0fdd27c7ecfd1af0, 0x2509200c6391cab4 }, + { 0x4aa43a8e8c24a7c7, 0x04c1f540d8f05ef5, -0x5245a1f3f4c14624, 0x2ab5504448a49ce3 } }, { - { 0xdc07ac631c5d3afa, 0x58615171f9df8c6c, 0x72a079d89d73e2b0, 0x7301f4ceb4eae15d }, - { 0x2ed227266f0f5dec, 0x9824ee415ed50824, 0x807bec7c9468d415, 0x7093bae1b521e23f }, - { 0x6409e759d6722c41, 0xa674e1cf72bf729b, 0xbc0a24eb3c21e569, 0x390167d24ebacb23 } + { -0x23f8539ce3a2c506, 0x58615171f9df8c6c, 0x72a079d89d73e2b0, 0x7301f4ceb4eae15d }, + { 0x2ed227266f0f5dec, -0x67db11bea12af7dc, -0x7f8413836b972beb, 0x7093bae1b521e23f }, + { 0x6409e759d6722c41, -0x598b1e308d408d65, -0x43f5db14c3de1a97, 0x390167d24ebacb23 } }, { - { 0xd7bb054ba2f2120b, 0xe2b9ceaeb10589b7, 0x3fe8bac8f3c0edbe, 0x4cbd40767112cb69 }, - { 0x27f58e3bba353f1c, 0x4c47764dbf6a4361, 0xafbbc4e56e562650, 0x07db2ee6aae1a45d }, + { -0x2844fab45d0dedf5, -0x1d4631514efa7649, 0x3fe8bac8f3c0edbe, 0x4cbd40767112cb69 }, + { 0x27f58e3bba353f1c, 0x4c47764dbf6a4361, -0x50443b1a91a9d9b0, 0x07db2ee6aae1a45d }, { 0x0b603cc029c58176, 0x5988e3825cb15d61, 0x2bb61413dcf0ad8d, 0x7b8eec6c74183287 } }, { - { 0x32fee570fc386b73, 0xda8b0141da3a8cc7, 0x975ffd0ac8968359, 0x6ee809a1b132a855 }, - { 0xe4ca40782cd27cb0, 0xdaf9c323fbe967bd, 0xb29bd34a8ad41e9e, 0x72810497626ede4d }, - { 0x9444bb31fcfd863a, 0x2fe3690a3e4e48c5, 0xdc29c867d088fa25, 0x13bd1e38d173292e } + { 0x32fee570fc386b73, -0x2574febe25c57339, -0x68a002f537697ca7, 0x6ee809a1b132a855 }, + { -0x1b35bf87d32d8350, -0x25063cdc04169843, -0x4d642cb5752be162, 0x72810497626ede4d }, + { -0x6bbb44ce030279c6, 0x2fe3690a3e4e48c5, -0x23d637982f7705db, 0x13bd1e38d173292e } }, { { 0x223fb5cf1dfac521, 0x325c25316f554450, 0x030b98d7659177ac, 0x1ed018b64f88a4bd }, - { 0xd32b4cd8696149b5, 0xe55937d781d8aab7, 0x0bcb2127ae122b94, 0x41e86fcfb14099b0 }, - { 0x3630dfa1b802a6b0, 0x880f874742ad3bd5, 0x0af90d6ceec5a4d4, 0x746a247a37cdc5d9 } + { -0x2cd4b327969eb64b, -0x1aa6c8287e275549, 0x0bcb2127ae122b94, 0x41e86fcfb14099b0 }, + { 0x3630dfa1b802a6b0, -0x77f078b8bd52c42b, 0x0af90d6ceec5a4d4, 0x746a247a37cdc5d9 } }, { - { 0x6eccd85278d941ed, 0x2254ae83d22f7843, 0xc522d02e7bbfcdb7, 0x681e3351bff0e4e2 }, - { 0xd531b8bd2b7b9af6, 0x5005093537fc5b51, 0x232fcf25c593546d, 0x20a365142bb40f49 }, - { 0x8b64b59d83034f45, 0x2f8b71f21fa20efb, 0x69249495ba6550e4, 0x539ef98e45d5472b } + { 0x6eccd85278d941ed, 0x2254ae83d22f7843, -0x3add2fd184403249, 0x681e3351bff0e4e2 }, + { -0x2ace4742d484650a, 0x5005093537fc5b51, 0x232fcf25c593546d, 0x20a365142bb40f49 }, + { -0x749b4a627cfcb0bb, 0x2f8b71f21fa20efb, 0x69249495ba6550e4, 0x539ef98e45d5472b } }, }, { { - { 0xd074d8961cae743f, 0xf86d18f5ee1c63ed, 0x97bdc55be7f4ed29, 0x4cbad279663ab108 }, - { 0x6e7bb6a1a6205275, 0xaa4f21d7413c8e83, 0x6f56d155e88f5cb2, 0x2de25d4ba6345be1 }, - { 0x80d19024a0d71fcd, 0xc525c20afb288af8, 0xb1a3974b5f3a6419, 0x7d7fbcefe2007233 } + { -0x2f8b2769e3518bc1, -0x0792e70a11e39c13, -0x68423aa4180b12d7, 0x4cbad279663ab108 }, + { 0x6e7bb6a1a6205275, -0x55b0de28bec3717d, 0x6f56d155e88f5cb2, 0x2de25d4ba6345be1 }, + { -0x7f2e6fdb5f28e033, -0x3ada3df504d77508, -0x4e5c68b4a0c59be7, 0x7d7fbcefe2007233 } }, { - { 0xcd7c5dc5f3c29094, 0xc781a29a2a9105ab, 0x80c61d36421c3058, 0x4f9cd196dcd8d4d7 }, - { 0xfaef1e6a266b2801, 0x866c68c4d5739f16, 0xf68a2fbc1b03762c, 0x5975435e87b75a8d }, - { 0x199297d86a7b3768, 0xd0d058241ad17a63, 0xba029cad5c1c0c17, 0x7ccdd084387a0307 } + { -0x3283a23a0c3d6f6c, -0x387e5d65d56efa55, -0x7f39e2c9bde3cfa8, 0x4f9cd196dcd8d4d7 }, + { -0x0510e195d994d7ff, -0x7993973b2a8c60ea, -0x0975d043e4fc89d4, 0x5975435e87b75a8d }, + { 0x199297d86a7b3768, -0x2f2fa7dbe52e859d, -0x45fd6352a3e3f3e9, 0x7ccdd084387a0307 } }, { - { 0x9b0c84186760cc93, 0xcdae007a1ab32a99, 0xa88dec86620bda18, 0x3593ca848190ca44 }, - { 0xdca6422c6d260417, 0xae153d50948240bd, 0xa9c0c1b4fb68c677, 0x428bd0ed61d0cf53 }, - { 0x9213189a5e849aa7, 0xd4d8c33565d8facd, 0x8c52545b53fdbbd1, 0x27398308da2d63e6 } + { -0x64f37be7989f336d, -0x3251ff85e54cd567, -0x577213799df425e8, 0x3593ca848190ca44 }, + { -0x2359bdd392d9fbe9, -0x51eac2af6b7dbf43, -0x563f3e4b04973989, 0x428bd0ed61d0cf53 }, + { -0x6dece765a17b6559, -0x2b273cca9a270533, -0x73adaba4ac02442f, 0x27398308da2d63e6 } }, { - { 0xb9a10e4c0a702453, 0x0fa25866d57d1bde, 0xffb9d9b5cd27daf7, 0x572c2945492c33fd }, - { 0x42c38d28435ed413, 0xbd50f3603278ccc9, 0xbb07ab1a79da03ef, 0x269597aebe8c3355 }, - { 0xc77fc745d6cd30be, 0xe4dfe8d3e3baaefb, 0xa22c8830aa5dda0c, 0x7f985498c05bca80 } + { -0x465ef1b3f58fdbad, 0x0fa25866d57d1bde, -0x0046264a32d82509, 0x572c2945492c33fd }, + { 0x42c38d28435ed413, -0x42af0c9fcd873337, -0x44f854e58625fc11, 0x269597aebe8c3355 }, + { -0x388038ba2932cf42, -0x1b20172c1c455105, -0x5dd377cf55a225f4, 0x7f985498c05bca80 } }, { - { 0xd35615520fbf6363, 0x08045a45cf4dfba6, 0xeec24fbc873fa0c2, 0x30f2653cd69b12e7 }, - { 0x3849ce889f0be117, 0x8005ad1b7b54a288, 0x3da3c39f23fc921c, 0x76c2ec470a31f304 }, - { 0x8a08c938aac10c85, 0x46179b60db276bcb, 0xa920c01e0e6fac70, 0x2f1273f1596473da } + { -0x2ca9eaadf0409c9d, 0x08045a45cf4dfba6, -0x113db04378c05f3e, 0x30f2653cd69b12e7 }, + { 0x3849ce889f0be117, -0x7ffa52e484ab5d78, 0x3da3c39f23fc921c, 0x76c2ec470a31f304 }, + { -0x75f736c7553ef37b, 0x46179b60db276bcb, -0x56df3fe1f1905390, 0x2f1273f1596473da } }, { - { 0x30488bd755a70bc0, 0x06d6b5a4f1d442e7, 0xead1a69ebc596162, 0x38ac1997edc5f784 }, - { 0x4739fc7c8ae01e11, 0xfd5274904a6aab9f, 0x41d98a8287728f2e, 0x5d9e572ad85b69f2 }, - { 0x0666b517a751b13b, 0x747d06867e9b858c, 0xacacc011454dde49, 0x22dfcd9cbfe9e69c } + { 0x30488bd755a70bc0, 0x06d6b5a4f1d442e7, -0x152e596143a69e9e, 0x38ac1997edc5f784 }, + { 0x4739fc7c8ae01e11, -0x02ad8b6fb5955461, 0x41d98a8287728f2e, 0x5d9e572ad85b69f2 }, + { 0x0666b517a751b13b, 0x747d06867e9b858c, -0x53533feebab221b7, 0x22dfcd9cbfe9e69c } }, { { 0x56ec59b4103be0a1, 0x2ee3baecd259f969, 0x797cb29413f5cd32, 0x0fe9877824cde472 }, - { 0x8ddbd2e0c30d0cd9, 0xad8e665facbb4333, 0x8f6b258c322a961f, 0x6b2916c05448c1c7 }, + { -0x72242d1f3cf2f327, -0x527199a05344bccd, -0x7094da73cdd569e1, 0x6b2916c05448c1c7 }, { 0x7edb34d10aba913b, 0x4ea3cd822e6dac0e, 0x66083dff6578f815, 0x4c303f307ff00a17 } }, { - { 0x29fc03580dd94500, 0xecd27aa46fbbec93, 0x130a155fc2e2a7f8, 0x416b151ab706a1d5 }, - { 0xd30a3bd617b28c85, 0xc5d377b739773bea, 0xc6c6e78c1e6a5cbf, 0x0d61b8f78b2ab7c4 }, - { 0x56a8d7efe9c136b0, 0xbd07e5cd58e44b20, 0xafe62fda1b57e0ab, 0x191a2af74277e8d2 } + { 0x29fc03580dd94500, -0x132d855b9044136d, 0x130a155fc2e2a7f8, 0x416b151ab706a1d5 }, + { -0x2cf5c429e84d737b, -0x3a2c8848c688c416, -0x39391873e195a341, 0x0d61b8f78b2ab7c4 }, + { 0x56a8d7efe9c136b0, -0x42f81a32a71bb4e0, -0x5019d025e4a81f55, 0x191a2af74277e8d2 } }, }, { { - { 0x09d4b60b2fe09a14, 0xc384f0afdbb1747e, 0x58e2ea8978b5fd6e, 0x519ef577b5e09b0a }, - { 0xd550095bab6f4985, 0x04f4cd5b4fbfaf1a, 0x9d8e2ed12a0c7540, 0x2bc24e04b2212286 }, + { 0x09d4b60b2fe09a14, -0x3c7b0f50244e8b82, 0x58e2ea8978b5fd6e, 0x519ef577b5e09b0a }, + { -0x2aaff6a45490b67b, 0x04f4cd5b4fbfaf1a, -0x6271d12ed5f38ac0, 0x2bc24e04b2212286 }, { 0x1863d7d91124cca9, 0x7ac08145b88a708e, 0x2bcd7309857031f5, 0x62337a6e8ab8fae5 } }, { - { 0xd1ab324e1b3a1273, 0x18947cf181055340, 0x3b5d9567a98c196e, 0x7fa00425802e1e68 }, - { 0x4bcef17f06ffca16, 0xde06e1db692ae16a, 0x0753702d614f42b0, 0x5f6041b45b9212d0 }, - { 0x7d531574028c2705, 0x80317d69db0d75fe, 0x30fface8ef8c8ddd, 0x7e9de97bb6c3e998 } + { -0x2e54cdb1e4c5ed8d, 0x18947cf181055340, 0x3b5d9567a98c196e, 0x7fa00425802e1e68 }, + { 0x4bcef17f06ffca16, -0x21f91e2496d51e96, 0x0753702d614f42b0, 0x5f6041b45b9212d0 }, + { 0x7d531574028c2705, -0x7fce829624f28a02, 0x30fface8ef8c8ddd, 0x7e9de97bb6c3e998 } }, { - { 0xf004be62a24d40dd, 0xba0659910452d41f, 0x81c45ee162a44234, 0x4cb829d8a22266ef }, - { 0x1558967b9e6585a3, 0x97c99ce098e98b92, 0x10af149b6eb3adad, 0x42181fe8f4d38cfa }, + { -0x0ffb419d5db2bf23, -0x45f9a66efbad2be1, -0x7e3ba11e9d5bbdcc, 0x4cb829d8a22266ef }, + { 0x1558967b9e6585a3, -0x6836631f6716746e, 0x10af149b6eb3adad, 0x42181fe8f4d38cfa }, { 0x1dbcaa8407b86681, 0x081f001e8b26753b, 0x3cd7ce6a84048e81, 0x78af11633f25f22c } }, { - { 0x3241c00e7d65318c, 0xe6bee5dcd0e86de7, 0x118b2dc2fbc08c26, 0x680d04a7fc603dc3 }, - { 0x8416ebd40b50babc, 0x1508722628208bee, 0xa3148fafb9c1c36d, 0x0d07daacd32d7d5d }, - { 0xf9c2414a695aa3eb, 0xdaa42c4c05a68f21, 0x7c6c23987f93963e, 0x210e8cd30c3954e3 } + { 0x3241c00e7d65318c, -0x19411a232f179219, 0x118b2dc2fbc08c26, 0x680d04a7fc603dc3 }, + { -0x7be9142bf4af4544, 0x1508722628208bee, -0x5ceb7050463e3c93, 0x0d07daacd32d7d5d }, + { -0x063dbeb596a55c15, -0x255bd3b3fa5970df, 0x7c6c23987f93963e, 0x210e8cd30c3954e3 } }, { - { 0x2b50f16137fe6c26, 0xe102bcd856e404d8, 0x12b0f1414c561f6b, 0x51b17bc8d028ec91 }, - { 0xac4201f210a71c06, 0x6a65e0aef3bfb021, 0xbc42c35c393632f7, 0x56ea8db1865f0742 }, - { 0xfff5fb4bcf535119, 0xf4989d79df1108a0, 0xbdfcea659a3ba325, 0x18a11f1174d1a6f2 } + { 0x2b50f16137fe6c26, -0x1efd4327a91bfb28, 0x12b0f1414c561f6b, 0x51b17bc8d028ec91 }, + { -0x53bdfe0def58e3fa, 0x6a65e0aef3bfb021, -0x43bd3ca3c6c9cd09, 0x56ea8db1865f0742 }, + { -0x000a04b430acaee7, -0x0b67628620eef760, -0x4203159a65c45cdb, 0x18a11f1174d1a6f2 } }, { - { 0xfbd63cdad27a5f2c, 0xf00fc4bc8aa106d7, 0x53fb5c1a8e64a430, 0x04eaabe50c1a2e85 }, - { 0x407375ab3f6bba29, 0x9ec3b6d8991e482e, 0x99c80e82e55f92e9, 0x307c13b6fb0c0ae1 }, - { 0x24751021cb8ab5e7, 0xfc2344495c5010eb, 0x5f1e717b4e5610a1, 0x44da5f18c2710cd5 } + { -0x0429c3252d85a0d4, -0x0ff03b43755ef929, 0x53fb5c1a8e64a430, 0x04eaabe50c1a2e85 }, + { 0x407375ab3f6bba29, -0x613c492766e1b7d2, -0x6637f17d1aa06d17, 0x307c13b6fb0c0ae1 }, + { 0x24751021cb8ab5e7, -0x03dcbbb6a3afef15, 0x5f1e717b4e5610a1, 0x44da5f18c2710cd5 } }, { - { 0x9156fe6b89d8eacc, 0xe6b79451e23126a1, 0xbd7463d93944eb4e, 0x726373f6767203ae }, - { 0x033cc55ff1b82eb5, 0xb15ae36d411cae52, 0xba40b6198ffbacd3, 0x768edce1532e861f }, - { 0xe305ca72eb7ef68a, 0x662cf31f70eadb23, 0x18f026fdb4c45b68, 0x513b5384b5d2ecbd } + { -0x6ea9019476271534, -0x19486bae1dced95f, -0x428b9c26c6bb14b2, 0x726373f6767203ae }, + { 0x033cc55ff1b82eb5, -0x4ea51c92bee351ae, -0x45bf49e67004532d, 0x768edce1532e861f }, + { -0x1cfa358d14810976, 0x662cf31f70eadb23, 0x18f026fdb4c45b68, 0x513b5384b5d2ecbd } }, { - { 0x5e2702878af34ceb, 0x900b0409b946d6ae, 0x6512ebf7dabd8512, 0x61d9b76988258f81 }, + { 0x5e2702878af34ceb, -0x6ff4fbf646b92952, 0x6512ebf7dabd8512, 0x61d9b76988258f81 }, { 0x46d46280c729989e, 0x4b93fbd05368a5dd, 0x63df3f81d1765a89, 0x34cebd64b9a0a223 }, - { 0xa6c5a71349b7d94b, 0xa3f3d15823eb9446, 0x0416fbd277484834, 0x69d45e6f2c70812f } + { -0x593a58ecb64826b5, -0x5c0c2ea7dc146bba, 0x0416fbd277484834, 0x69d45e6f2c70812f } }, }, { { - { 0x9fe62b434f460efb, 0xded303d4a63607d6, 0xf052210eb7a0da24, 0x237e7dbe00545b93 }, - { 0xce16f74bc53c1431, 0x2b9725ce2072edde, 0xb8b9c36fb5b23ee7, 0x7e2e0e450b5cc908 }, + { -0x6019d4bcb0b9f105, -0x212cfc2b59c9f82a, -0x0faddef1485f25dc, 0x237e7dbe00545b93 }, + { -0x31e908b43ac3ebcf, 0x2b9725ce2072edde, -0x47463c904a4dc119, 0x7e2e0e450b5cc908 }, { 0x013575ed6701b430, 0x231094e69f0bfd10, 0x75320f1583e47f22, 0x71afa699b11155e3 } }, { - { 0xea423c1c473b50d6, 0x51e87a1f3b38ef10, 0x9b84bf5fb2c9be95, 0x00731fbc78f89a1c }, - { 0x65ce6f9b3953b61d, 0xc65839eaafa141e6, 0x0f435ffda9f759fe, 0x021142e9c2b1c28e }, - { 0xe430c71848f81880, 0xbf960c225ecec119, 0xb6dae0836bba15e3, 0x4c4d6f3347e15808 } + { -0x15bdc3e3b8c4af2a, 0x51e87a1f3b38ef10, -0x647b40a04d36416b, 0x00731fbc78f89a1c }, + { 0x65ce6f9b3953b61d, -0x39a7c615505ebe1a, 0x0f435ffda9f759fe, 0x021142e9c2b1c28e }, + { -0x1bcf38e7b707e780, -0x4069f3dda1313ee7, -0x49251f7c9445ea1d, 0x4c4d6f3347e15808 } }, { { 0x2f0cddfc988f1970, 0x6b916227b0b9f51b, 0x6ec7b6c4779176be, 0x38bf9500a88f9fa8 }, - { 0x18f7eccfc17d1fc9, 0x6c75f5a651403c14, 0xdbde712bf7ee0cdf, 0x193fddaaa7e47a22 }, - { 0x1fd2c93c37e8876f, 0xa2f61e5a18d1462c, 0x5080f58239241276, 0x6a6fb99ebf0d4969 } + { 0x18f7eccfc17d1fc9, 0x6c75f5a651403c14, -0x24218ed40811f321, 0x193fddaaa7e47a22 }, + { 0x1fd2c93c37e8876f, -0x5d09e1a5e72eb9d4, 0x5080f58239241276, 0x6a6fb99ebf0d4969 } }, { - { 0xeeb122b5b6e423c6, 0x939d7010f286ff8e, 0x90a92a831dcf5d8c, 0x136fda9f42c5eb10 }, - { 0x6a46c1bb560855eb, 0x2416bb38f893f09d, 0xd71d11378f71acc1, 0x75f76914a31896ea }, - { 0xf94cdfb1a305bdd1, 0x0f364b9d9ff82c08, 0x2a87d8a5c3bb588a, 0x022183510be8dcba } + { -0x114edd4a491bdc3a, -0x6c628fef0d790072, -0x6f56d57ce230a274, 0x136fda9f42c5eb10 }, + { 0x6a46c1bb560855eb, 0x2416bb38f893f09d, -0x28e2eec8708e533f, 0x75f76914a31896ea }, + { -0x06b3204e5cfa422f, 0x0f364b9d9ff82c08, 0x2a87d8a5c3bb588a, 0x022183510be8dcba } }, { - { 0x9d5a710143307a7f, 0xb063de9ec47da45f, 0x22bbfe52be927ad3, 0x1387c441fd40426c }, - { 0x4af766385ead2d14, 0xa08ed880ca7c5830, 0x0d13a6e610211e3d, 0x6a071ce17b806c03 }, - { 0xb5d3c3d187978af8, 0x722b5a3d7f0e4413, 0x0d7b4848bb477ca0, 0x3171b26aaf1edc92 } + { -0x62a58efebccf8581, -0x4f9c21613b825ba1, 0x22bbfe52be927ad3, 0x1387c441fd40426c }, + { 0x4af766385ead2d14, -0x5f71277f3583a7d0, 0x0d13a6e610211e3d, 0x6a071ce17b806c03 }, + { -0x4a2c3c2e78687508, 0x722b5a3d7f0e4413, 0x0d7b4848bb477ca0, 0x3171b26aaf1edc92 } }, { - { 0xa60db7d8b28a47d1, 0xa6bf14d61770a4f1, 0xd4a1f89353ddbd58, 0x6c514a63344243e9 }, - { 0xa92f319097564ca8, 0xff7bb84c2275e119, 0x4f55fe37a4875150, 0x221fd4873cf0835a }, - { 0x2322204f3a156341, 0xfb73e0e9ba0a032d, 0xfce0dd4c410f030e, 0x48daa596fb924aaa } + { -0x59f248274d75b82f, -0x5940eb29e88f5b0f, -0x2b5e076cac2242a8, 0x6c514a63344243e9 }, + { -0x56d0ce6f68a9b358, -0x008447b3dd8a1ee7, 0x4f55fe37a4875150, 0x221fd4873cf0835a }, + { 0x2322204f3a156341, -0x048c1f1645f5fcd3, -0x031f22b3bef0fcf2, 0x48daa596fb924aaa } }, { - { 0x14f61d5dc84c9793, 0x9941f9e3ef418206, 0xcdf5b88f346277ac, 0x58c837fa0e8a79a9 }, - { 0x6eca8e665ca59cc7, 0xa847254b2e38aca0, 0x31afc708d21e17ce, 0x676dd6fccad84af7 }, - { 0x0cf9688596fc9058, 0x1ddcbbf37b56a01b, 0xdcc2e77d4935d66a, 0x1c4f73f2c6a57f0a } + { 0x14f61d5dc84c9793, -0x66be061c10be7dfa, -0x320a4770cb9d8854, 0x58c837fa0e8a79a9 }, + { 0x6eca8e665ca59cc7, -0x57b8dab4d1c75360, 0x31afc708d21e17ce, 0x676dd6fccad84af7 }, + { 0x0cf9688596fc9058, 0x1ddcbbf37b56a01b, -0x233d1882b6ca2996, 0x1c4f73f2c6a57f0a } }, { - { 0xb36e706efc7c3484, 0x73dfc9b4c3c1cf61, 0xeb1d79c9781cc7e5, 0x70459adb7daf675c }, - { 0x0e7a4fbd305fa0bb, 0x829d4ce054c663ad, 0xf421c3832fe33848, 0x795ac80d1bf64c42 }, - { 0x1b91db4991b42bb3, 0x572696234b02dcca, 0x9fdf9ee51f8c78dc, 0x5fe162848ce21fd3 } + { -0x4c918f910383cb7c, 0x73dfc9b4c3c1cf61, -0x14e2863687e3381b, 0x70459adb7daf675c }, + { 0x0e7a4fbd305fa0bb, -0x7d62b31fab399c53, -0x0bde3c7cd01cc7b8, 0x795ac80d1bf64c42 }, + { 0x1b91db4991b42bb3, 0x572696234b02dcca, -0x6020611ae0738724, 0x5fe162848ce21fd3 } }, }, { { - { 0x315c29c795115389, 0xd7e0e507862f74ce, 0x0c4a762185927432, 0x72de6c984a25a1e4 }, - { 0xe2790aae4d077c41, 0x8b938270db7469a3, 0x6eb632dc8abd16a2, 0x720814ecaa064b72 }, - { 0xae9ab553bf6aa310, 0x050a50a9806d6e1b, 0x92bb7403adff5139, 0x0394d27645be618b } + { 0x315c29c795115389, -0x281f1af879d08b32, 0x0c4a762185927432, 0x72de6c984a25a1e4 }, + { -0x1d86f551b2f883bf, -0x746c7d8f248b965d, 0x6eb632dc8abd16a2, 0x720814ecaa064b72 }, + { -0x51654aac40955cf0, 0x050a50a9806d6e1b, -0x6d448bfc5200aec7, 0x0394d27645be618b } }, { - { 0xf5396425b23545a4, 0x15a7a27e98fbb296, 0xab6c52bc636fdd86, 0x79d995a8419334ee }, - { 0x4d572251857eedf4, 0xe3724edde19e93c5, 0x8a71420e0b797035, 0x3b3c833687abe743 }, - { 0xcd8a8ea61195dd75, 0xa504d8a81dd9a82f, 0x540dca81a35879b6, 0x60dd16a379c86a8a } + { -0x0ac69bda4dcaba5c, 0x15a7a27e98fbb296, -0x5493ad439c90227a, 0x79d995a8419334ee }, + { 0x4d572251857eedf4, -0x1c8db1221e616c3b, -0x758ebdf1f4868fcb, 0x3b3c833687abe743 }, + { -0x32757159ee6a228b, -0x5afb2757e22657d1, 0x540dca81a35879b6, 0x60dd16a379c86a8a } }, { - { 0x3501d6f8153e47b8, 0xb7a9675414a2f60c, 0x112ee8b6455d9523, 0x4e62a3c18112ea8a }, - { 0x35a2c8487381e559, 0x596ffea6d78082cb, 0xcb9771ebdba7b653, 0x5a08b5019b4da685 }, - { 0xc8d4ac04516ab786, 0x595af3215295b23d, 0xd6edd234db0230c1, 0x0929efe8825b41cc } + { 0x3501d6f8153e47b8, -0x485698abeb5d09f4, 0x112ee8b6455d9523, 0x4e62a3c18112ea8a }, + { 0x35a2c8487381e559, 0x596ffea6d78082cb, -0x34688e14245849ad, 0x5a08b5019b4da685 }, + { -0x372b53fbae95487a, 0x595af3215295b23d, -0x29122dcb24fdcf3f, 0x0929efe8825b41cc } }, { - { 0x8b3172b7ad56651d, 0x01581b7a3fabd717, 0x2dc94df6424df6e4, 0x30376e5d2c29284f }, - { 0x5f0601d1cbd0f2d3, 0x736e412f6132bb7f, 0x83604432238dde87, 0x1e3a5272f5c0753c }, - { 0xd2918da78159a59c, 0x6bdc1cd93f0713f3, 0x565f7a934acd6590, 0x53daacec4cb4c128 } + { -0x74ce8d4852a99ae3, 0x01581b7a3fabd717, 0x2dc94df6424df6e4, 0x30376e5d2c29284f }, + { 0x5f0601d1cbd0f2d3, 0x736e412f6132bb7f, -0x7c9fbbcddc722179, 0x1e3a5272f5c0753c }, + { -0x2d6e72587ea65a64, 0x6bdc1cd93f0713f3, 0x565f7a934acd6590, 0x53daacec4cb4c128 } }, { - { 0x99852bc3852cfdb0, 0x2cc12e9559d6ed0b, 0x70f9e2bf9b5ac27b, 0x4f3b8c117959ae99 }, - { 0x4ca73bd79cc8a7d6, 0x4d4a738f47e9a9b2, 0xf4cbf12942f5fe00, 0x01a13ff9bdbf0752 }, - { 0x55b6c9c82ff26412, 0x1ac4a8c91fb667a8, 0xd527bfcfeb778bf2, 0x303337da7012a3be } + { -0x667ad43c7ad30250, 0x2cc12e9559d6ed0b, 0x70f9e2bf9b5ac27b, 0x4f3b8c117959ae99 }, + { 0x4ca73bd79cc8a7d6, 0x4d4a738f47e9a9b2, -0x0b340ed6bd0a0200, 0x01a13ff9bdbf0752 }, + { 0x55b6c9c82ff26412, 0x1ac4a8c91fb667a8, -0x2ad840301488740e, 0x303337da7012a3be } }, { - { 0x976d3ccbfad2fdd1, 0xcb88839737a640a8, 0x2ff00c1d6734cb25, 0x269ff4dc789c2d2b }, - { 0x955422228c1c9d7c, 0x01fac1371a9b340f, 0x7e8d9177925b48d7, 0x53f8ad5661b3e31b }, - { 0x0c003fbdc08d678d, 0x4d982fa37ead2b17, 0xc07e6bcdb2e582f1, 0x296c7291df412a44 } + { -0x6892c334052d022f, -0x34777c68c859bf58, 0x2ff00c1d6734cb25, 0x269ff4dc789c2d2b }, + { -0x6aabdddd73e36284, 0x01fac1371a9b340f, 0x7e8d9177925b48d7, 0x53f8ad5661b3e31b }, + { 0x0c003fbdc08d678d, 0x4d982fa37ead2b17, -0x3f8194324d1a7d0f, 0x296c7291df412a44 } }, { - { 0xdfb23205dab8b59e, 0x465aeaa0c8092250, 0xd133c1189a725d18, 0x2327370261f117d1 }, - { 0x7903de2b33daf397, 0xd0ff0619c9a624b3, 0x8a1d252b555b3e18, 0x2b6d581c52e0b7c0 }, - { 0x3d0543d3623e7986, 0x679414c2c278a354, 0xae43f0cc726196f6, 0x7836c41f8245eaba } + { -0x204dcdfa25474a62, 0x465aeaa0c8092250, -0x2ecc3ee7658da2e8, 0x2327370261f117d1 }, + { 0x7903de2b33daf397, -0x2f00f9e63659db4d, -0x75e2dad4aaa4c1e8, 0x2b6d581c52e0b7c0 }, + { 0x3d0543d3623e7986, 0x679414c2c278a354, -0x51bc0f338d9e690a, 0x7836c41f8245eaba } }, { - { 0xca651e848011937c, 0xc6b0c46e6ef41a28, 0xb7021ba75f3f8d52, 0x119dff99ead7b9fd }, - { 0xe7a254db49e95a81, 0x5192d5d008b0ad73, 0x4d20e5b1d00afc07, 0x5d55f8012cf25f38 }, - { 0x43eadfcbf4b31d4d, 0xc6503f7411148892, 0xfeee68c5060d3b17, 0x329293b3dd4a0ac8 } + { -0x359ae17b7fee6c84, -0x394f3b91910be5d8, -0x48fde458a0c072ae, 0x119dff99ead7b9fd }, + { -0x185dab24b616a57f, 0x5192d5d008b0ad73, 0x4d20e5b1d00afc07, 0x5d55f8012cf25f38 }, + { 0x43eadfcbf4b31d4d, -0x39afc08beeeb776e, -0x0111973af9f2c4e9, 0x329293b3dd4a0ac8 } }, }, { { - { 0x2879852d5d7cb208, 0xb8dedd70687df2e7, 0xdc0bffab21687891, 0x2b44c043677daa35 }, - { 0x4e59214fe194961a, 0x49be7dc70d71cd4f, 0x9300cfd23b50f22d, 0x4789d446fc917232 }, - { 0x1a1c87ab074eb78e, 0xfac6d18e99daf467, 0x3eacbbcd484f9067, 0x60c52eef2bb9a4e4 } + { 0x2879852d5d7cb208, -0x4721228f97820d19, -0x23f40054de97876f, 0x2b44c043677daa35 }, + { 0x4e59214fe194961a, 0x49be7dc70d71cd4f, -0x6cff302dc4af0dd3, 0x4789d446fc917232 }, + { 0x1a1c87ab074eb78e, -0x05392e7166250b99, 0x3eacbbcd484f9067, 0x60c52eef2bb9a4e4 } }, { - { 0x702bc5c27cae6d11, 0x44c7699b54a48cab, 0xefbc4056ba492eb2, 0x70d77248d9b6676d }, - { 0x0b5d89bc3bfd8bf1, 0xb06b9237c9f3551a, 0x0e4c16b0d53028f5, 0x10bc9c312ccfcaab }, - { 0xaa8ae84b3ec2a05b, 0x98699ef4ed1781e0, 0x794513e4708e85d1, 0x63755bd3a976f413 } + { 0x702bc5c27cae6d11, 0x44c7699b54a48cab, -0x1043bfa945b6d14e, 0x70d77248d9b6676d }, + { 0x0b5d89bc3bfd8bf1, -0x4f946dc8360caae6, 0x0e4c16b0d53028f5, 0x10bc9c312ccfcaab }, + { -0x557517b4c13d5fa5, -0x6796610b12e87e20, 0x794513e4708e85d1, 0x63755bd3a976f413 } }, { { 0x3dc7101897f1acb7, 0x5dda7d5ec165bbd8, 0x508e5b9c0fa1020f, 0x2763751737c52a56 }, - { 0xb55fa03e2ad10853, 0x356f75909ee63569, 0x9ff9f1fdbe69b890, 0x0d8cc1c48bc16f84 }, - { 0x029402d36eb419a9, 0xf0b44e7e77b460a5, 0xcfa86230d43c4956, 0x70c2dd8a7ad166e7 } + { -0x4aa05fc1d52ef7ad, 0x356f75909ee63569, -0x60060e0241964770, 0x0d8cc1c48bc16f84 }, + { 0x029402d36eb419a9, -0x0f4bb181884b9f5b, -0x30579dcf2bc3b6aa, 0x70c2dd8a7ad166e7 } }, { - { 0x91d4967db8ed7e13, 0x74252f0ad776817a, 0xe40982e00d852564, 0x32b8613816a53ce5 }, - { 0x656194509f6fec0e, 0xee2e7ea946c6518d, 0x9733c1f367e09b5c, 0x2e0fac6363948495 }, - { 0x79e7f7bee448cd64, 0x6ac83a67087886d0, 0xf89fd4d9a0e4db2e, 0x4179215c735a4f41 } + { -0x6e2b6982471281ed, 0x74252f0ad776817a, -0x1bf67d1ff27ada9c, 0x32b8613816a53ce5 }, + { 0x656194509f6fec0e, -0x11d18156b939ae73, -0x68cc3e0c981f64a4, 0x2e0fac6363948495 }, + { 0x79e7f7bee448cd64, 0x6ac83a67087886d0, -0x07602b265f1b24d2, 0x4179215c735a4f41 } }, { - { 0xe4ae33b9286bcd34, 0xb7ef7eb6559dd6dc, 0x278b141fb3d38e1f, 0x31fa85662241c286 }, - { 0x8c7094e7d7dced2a, 0x97fb8ac347d39c70, 0xe13be033a906d902, 0x700344a30cd99d76 }, - { 0xaf826c422e3622f4, 0xc12029879833502d, 0x9bc1b7e12b389123, 0x24bb2312a9952489 } + { -0x1b51cc46d79432cc, -0x48108149aa622924, 0x278b141fb3d38e1f, 0x31fa85662241c286 }, + { -0x738f6b18282312d6, -0x6804753cb82c6390, -0x1ec41fcc56f926fe, 0x700344a30cd99d76 }, + { -0x507d93bdd1c9dd0c, -0x3edfd67867ccafd3, -0x643e481ed4c76edd, 0x24bb2312a9952489 } }, { - { 0x41f80c2af5f85c6b, 0x687284c304fa6794, 0x8945df99a3ba1bad, 0x0d1d2af9ffeb5d16 }, - { 0xb1a8ed1732de67c3, 0x3cb49418461b4948, 0x8ebd434376cfbcd2, 0x0fee3e871e188008 }, - { 0xa9da8aa132621edf, 0x30b822a159226579, 0x4004197ba79ac193, 0x16acd79718531d76 } + { 0x41f80c2af5f85c6b, 0x687284c304fa6794, -0x76ba20665c45e453, 0x0d1d2af9ffeb5d16 }, + { -0x4e5712e8cd21983d, 0x3cb49418461b4948, -0x7142bcbc8930432e, 0x0fee3e871e188008 }, + { -0x5625755ecd9de121, 0x30b822a159226579, 0x4004197ba79ac193, 0x16acd79718531d76 } }, { - { 0xc959c6c57887b6ad, 0x94e19ead5f90feba, 0x16e24e62a342f504, 0x164ed34b18161700 }, + { -0x36a6393a87784953, -0x6b1e6152a06f0146, 0x16e24e62a342f504, 0x164ed34b18161700 }, { 0x72df72af2d9b1d3d, 0x63462a36a432245a, 0x3ecea07916b39637, 0x123e0ef6b9302309 }, - { 0x487ed94c192fe69a, 0x61ae2cea3a911513, 0x877bf6d3b9a4de27, 0x78da0fc61073f3eb } + { 0x487ed94c192fe69a, 0x61ae2cea3a911513, -0x7884092c465b21d9, 0x78da0fc61073f3eb } }, { - { 0xa29f80f1680c3a94, 0x71f77e151ae9e7e6, 0x1100f15848017973, 0x054aa4b316b38ddd }, + { -0x5d607f0e97f3c56c, 0x71f77e151ae9e7e6, 0x1100f15848017973, 0x054aa4b316b38ddd }, { 0x5bf15d28e52bc66a, 0x2c47e31870f01a8e, 0x2419afbc06c28bdd, 0x2d25deeb256b173a }, - { 0xdfc8468d19267cb8, 0x0b28789c66e54daf, 0x2aeb1d2a666eec17, 0x134610a6ab7da760 } + { -0x2037b972e6d98348, 0x0b28789c66e54daf, 0x2aeb1d2a666eec17, 0x134610a6ab7da760 } }, }, { { - { 0xd91430e0dc028c3c, 0x0eb955a85217c771, 0x4b09e1ed2c99a1fa, 0x42881af2bd6a743c }, - { 0xcaf55ec27c59b23f, 0x99aeed3e154d04f2, 0x68441d72e14141f4, 0x140345133932a0a2 }, - { 0x7bfec69aab5cad3d, 0xc23e8cd34cb2cfad, 0x685dd14bfb37d6a2, 0x0ad6d64415677a18 } + { -0x26ebcf1f23fd73c4, 0x0eb955a85217c771, 0x4b09e1ed2c99a1fa, 0x42881af2bd6a743c }, + { -0x350aa13d83a64dc1, -0x665112c1eab2fb0e, 0x68441d72e14141f4, 0x140345133932a0a2 }, + { 0x7bfec69aab5cad3d, -0x3dc1732cb34d3053, 0x685dd14bfb37d6a2, 0x0ad6d64415677a18 } }, { { 0x7914892847927e9f, 0x33dad6ef370aa877, 0x1f8f24fa11122703, 0x5265ac2f2adf9592 }, { 0x781a439e417becb5, 0x4ac5938cd10e0266, 0x5da385110692ac24, 0x11b065a2ade31233 }, - { 0x405fdd309afcb346, 0xd9723d4428e63f54, 0x94c01df05f65aaae, 0x43e4dc3ae14c0809 } + { 0x405fdd309afcb346, -0x268dc2bbd719c0ac, -0x6b3fe20fa09a5552, 0x43e4dc3ae14c0809 } }, { - { 0xea6f7ac3adc2c6a3, 0xd0e928f6e9717c94, 0xe2d379ead645eaf5, 0x46dd8785c51ffbbe }, - { 0xbc12c7f1a938a517, 0x473028ab3180b2e1, 0x3f78571efbcd254a, 0x74e534426ff6f90f }, + { -0x1590853c523d395d, -0x2f16d709168e836c, -0x1d2c861529ba150b, 0x46dd8785c51ffbbe }, + { -0x43ed380e56c75ae9, 0x473028ab3180b2e1, 0x3f78571efbcd254a, 0x74e534426ff6f90f }, { 0x709801be375c8898, 0x4b06dab5e3fd8348, 0x75880ced27230714, 0x2b09468fdd2f4c42 } }, { - { 0x5b97946582ffa02a, 0xda096a51fea8f549, 0xa06351375f77af9b, 0x1bcfde61201d1e76 }, - { 0x97c749eeb701cb96, 0x83f438d4b6a369c3, 0x62962b8b9a402cd9, 0x6976c7509888df7b }, - { 0x4a4a5490246a59a2, 0xd63ebddee87fdd90, 0xd9437c670d2371fa, 0x69e87308d30f8ed6 } + { 0x5b97946582ffa02a, -0x25f695ae01570ab7, -0x5f9caec8a0885065, 0x1bcfde61201d1e76 }, + { -0x6838b61148fe346a, -0x7c0bc72b495c963d, 0x62962b8b9a402cd9, 0x6976c7509888df7b }, + { 0x4a4a5490246a59a2, -0x29c1422117802270, -0x26bc8398f2dc8e06, 0x69e87308d30f8ed6 } }, { - { 0x0f80bf028bc80303, 0x6aae16b37a18cefb, 0xdd47ea47d72cd6a3, 0x61943588f4ed39aa }, - { 0x435a8bb15656beb0, 0xf8fac9ba4f4d5bca, 0xb9b278c41548c075, 0x3eb0ef76e892b622 }, - { 0xd26e5c3e91039f85, 0xc0e9e77df6f33aa9, 0xe8968c5570066a93, 0x3c34d1881faaaddd } + { 0x0f80bf028bc80303, 0x6aae16b37a18cefb, -0x22b815b828d3295d, 0x61943588f4ed39aa }, + { 0x435a8bb15656beb0, -0x07053645b0b2a436, -0x464d873beab73f8b, 0x3eb0ef76e892b622 }, + { -0x2d91a3c16efc607b, -0x3f161882090cc557, -0x176973aa8ff9956d, 0x3c34d1881faaaddd } }, { - { 0xbd5b0b8f2fffe0d9, 0x6aa254103ed24fb9, 0x2ac7d7bcb26821c4, 0x605b394b60dca36a }, - { 0x3f9d2b5ea09f9ec0, 0x1dab3b6fb623a890, 0xa09ba3ea72d926c4, 0x374193513fd8b36d }, - { 0xb4e856e45a9d1ed2, 0xefe848766c97a9a2, 0xb104cf641e5eee7d, 0x2f50b81c88a71c8f } + { -0x42a4f470d0001f27, 0x6aa254103ed24fb9, 0x2ac7d7bcb26821c4, 0x605b394b60dca36a }, + { 0x3f9d2b5ea09f9ec0, 0x1dab3b6fb623a890, -0x5f645c158d26d93c, 0x374193513fd8b36d }, + { -0x4b17a91ba562e12e, -0x1017b7899368565e, -0x4efb309be1a11183, 0x2f50b81c88a71c8f } }, { - { 0x2b552ca0a7da522a, 0x3230b336449b0250, 0xf2c4c5bca4b99fb9, 0x7b2c674958074a22 }, - { 0x31723c61fc6811bb, 0x9cb450486211800f, 0x768933d347995753, 0x3491a53502752fcd }, - { 0xd55165883ed28cdf, 0x12d84fd2d362de39, 0x0a874ad3e3378e4f, 0x000d2b1f7c763e74 } + { 0x2b552ca0a7da522a, 0x3230b336449b0250, -0x0d3b3a435b466047, 0x7b2c674958074a22 }, + { 0x31723c61fc6811bb, -0x634bafb79dee7ff1, 0x768933d347995753, 0x3491a53502752fcd }, + { -0x2aae9a77c12d7321, 0x12d84fd2d362de39, 0x0a874ad3e3378e4f, 0x000d2b1f7c763e74 } }, { - { 0x9624778c3e94a8ab, 0x0ad6f3cee9a78bec, 0x948ac7810d743c4f, 0x76627935aaecfccc }, - { 0x3d420811d06d4a67, 0xbefc048590e0ffe3, 0xf870c6b7bd487bde, 0x6e2a7316319afa28 }, - { 0x56a8ac24d6d59a9f, 0xc8db753e3096f006, 0x477f41e68f4c5299, 0x588d851cf6c86114 } + { -0x69db8873c16b5755, 0x0ad6f3cee9a78bec, -0x6b75387ef28bc3b1, 0x76627935aaecfccc }, + { 0x3d420811d06d4a67, -0x4103fb7a6f1f001d, -0x078f394842b78422, 0x6e2a7316319afa28 }, + { 0x56a8ac24d6d59a9f, -0x37248ac1cf690ffa, 0x477f41e68f4c5299, 0x588d851cf6c86114 } }, }, { { - { 0xcd2a65e777d1f515, 0x548991878faa60f1, 0xb1b73bbcdabc06e5, 0x654878cba97cc9fb }, + { -0x32d59a18882e0aeb, 0x548991878faa60f1, -0x4e48c4432543f91b, 0x654878cba97cc9fb }, { 0x51138ec78df6b0fe, 0x5397da89e575f51b, 0x09207a1d717af1b9, 0x2102fdba2b20d650 }, - { 0x969ee405055ce6a1, 0x36bca7681251ad29, 0x3a1af517aa7da415, 0x0ad725db29ecb2ba } + { -0x69611bfafaa3195f, 0x36bca7681251ad29, 0x3a1af517aa7da415, 0x0ad725db29ecb2ba } }, { - { 0xfec7bc0c9b056f85, 0x537d5268e7f5ffd7, 0x77afc6624312aefa, 0x4f675f5302399fd9 }, - { 0xdc4267b1834e2457, 0xb67544b570ce1bc5, 0x1af07a0bf7d15ed7, 0x4aefcffb71a03650 }, - { 0xc32d36360415171e, 0xcd2bef118998483b, 0x870a6eadd0945110, 0x0bccbb72a2a86561 } + { -0x013843f364fa907b, 0x537d5268e7f5ffd7, 0x77afc6624312aefa, 0x4f675f5302399fd9 }, + { -0x23bd984e7cb1dba9, -0x498abb4a8f31e43b, 0x1af07a0bf7d15ed7, 0x4aefcffb71a03650 }, + { -0x3cd2c9c9fbeae8e2, -0x32d410ee7667b7c5, -0x78f591522f6baef0, 0x0bccbb72a2a86561 } }, { - { 0x186d5e4c50fe1296, 0xe0397b82fee89f7e, 0x3bc7f6c5507031b0, 0x6678fd69108f37c2 }, - { 0x185e962feab1a9c8, 0x86e7e63565147dcd, 0xb092e031bb5b6df2, 0x4024f0ab59d6b73e }, + { 0x186d5e4c50fe1296, -0x1fc6847d01176082, 0x3bc7f6c5507031b0, 0x6678fd69108f37c2 }, + { 0x185e962feab1a9c8, -0x791819ca9aeb8233, -0x4f6d1fce44a4920e, 0x4024f0ab59d6b73e }, { 0x1586fa31636863c2, 0x07f68c48572d33f2, 0x4f73cc9f789eaefc, 0x2d42e2108ead4701 } }, { - { 0x21717b0d0f537593, 0x914e690b131e064c, 0x1bb687ae752ae09f, 0x420bf3a79b423c6e }, - { 0x97f5131594dfd29b, 0x6155985d313f4c6a, 0xeba13f0708455010, 0x676b2608b8d2d322 }, - { 0x8138ba651c5b2b47, 0x8671b6ec311b1b80, 0x7bff0cb1bc3135b0, 0x745d2ffa9c0cf1e0 } + { 0x21717b0d0f537593, -0x6eb196f4ece1f9b4, 0x1bb687ae752ae09f, 0x420bf3a79b423c6e }, + { -0x680aecea6b202d65, 0x6155985d313f4c6a, -0x145ec0f8f7baaff0, 0x676b2608b8d2d322 }, + { -0x7ec7459ae3a4d4b9, -0x798e4913cee4e480, 0x7bff0cb1bc3135b0, 0x745d2ffa9c0cf1e0 } }, { - { 0x6036df5721d34e6a, 0xb1db8827997bb3d0, 0xd3c209c3c8756afa, 0x06e15be54c1dc839 }, - { 0xbf525a1e2bc9c8bd, 0xea5b260826479d81, 0xd511c70edf0155db, 0x1ae23ceb960cf5d0 }, + { 0x6036df5721d34e6a, -0x4e2477d866844c30, -0x2c3df63c378a9506, 0x06e15be54c1dc839 }, + { -0x40ada5e1d4363743, -0x15a4d9f7d9b8627f, -0x2aee38f120feaa25, 0x1ae23ceb960cf5d0 }, { 0x5b725d871932994a, 0x32351cb5ceb1dab0, 0x7dc41549dab7ca05, 0x58ded861278ec1f7 } }, { { 0x2dfb5ba8b6c2c9a8, 0x48eeef8ef52c598c, 0x33809107f12d1573, 0x08ba696b531d5bd8 }, - { 0xd8173793f266c55c, 0xc8c976c5cc454e49, 0x5ce382f8bc26c3a8, 0x2ff39de85485f6f9 }, - { 0x77ed3eeec3efc57a, 0x04e05517d4ff4811, 0xea3d7a3ff1a671cb, 0x120633b4947cfe54 } + { -0x27e8c86c0d993aa4, -0x3736893a33bab1b7, 0x5ce382f8bc26c3a8, 0x2ff39de85485f6f9 }, + { 0x77ed3eeec3efc57a, 0x04e05517d4ff4811, -0x15c285c00e598e35, 0x120633b4947cfe54 } }, { - { 0x82bd31474912100a, 0xde237b6d7e6fbe06, 0xe11e761911ea79c6, 0x07433be3cb393bde }, + { -0x7d42ceb8b6edeff6, -0x21dc8492819041fa, -0x1ee189e6ee15863a, 0x07433be3cb393bde }, { 0x0b94987891610042, 0x4ee7b13cecebfae8, 0x70be739594f0a4c0, 0x35d30a99b4d59185 }, - { 0xff7944c05ce997f4, 0x575d3de4b05c51a3, 0x583381fd5a76847c, 0x2d873ede7af6da9f } + { -0x0086bb3fa316680c, 0x575d3de4b05c51a3, 0x583381fd5a76847c, 0x2d873ede7af6da9f } }, { - { 0xaa6202e14e5df981, 0xa20d59175015e1f5, 0x18a275d3bae21d6c, 0x0543618a01600253 }, - { 0x157a316443373409, 0xfab8b7eef4aa81d9, 0xb093fee6f5a64806, 0x2e773654707fa7b6 }, - { 0x0deabdf4974c23c1, 0xaa6f0a259dce4693, 0x04202cb8a29aba2c, 0x4b1443362d07960d } + { -0x559dfd1eb1a2067f, -0x5df2a6e8afea1e0b, 0x18a275d3bae21d6c, 0x0543618a01600253 }, + { 0x157a316443373409, -0x054748110b557e27, -0x4f6c01190a59b7fa, 0x2e773654707fa7b6 }, + { 0x0deabdf4974c23c1, -0x5590f5da6231b96d, 0x04202cb8a29aba2c, 0x4b1443362d07960d } }, }, { { - { 0x299b1c3f57c5715e, 0x96cb929e6b686d90, 0x3004806447235ab3, 0x2c435c24a44d9fe1 }, - { 0x47b837f753242cec, 0x256dc48cc04212f2, 0xe222fbfbe1d928c5, 0x48ea295bad8a2c07 }, + { 0x299b1c3f57c5715e, -0x69346d6194979270, 0x3004806447235ab3, 0x2c435c24a44d9fe1 }, + { 0x47b837f753242cec, 0x256dc48cc04212f2, -0x1ddd04041e26d73b, 0x48ea295bad8a2c07 }, { 0x0607c97c80f8833f, 0x0e851578ca25ec5b, 0x54f7450b161ebb6f, 0x7bcb4792a0def80e } }, { { 0x1cecd0a0045224c2, 0x757f1b1b69e53952, 0x775b7a925289f681, 0x1b6cc62016736148 }, - { 0x8487e3d02bc73659, 0x4baf8445059979df, 0xd17c975adcad6fbf, 0x57369f0bdefc96b6 }, - { 0xf1a9990175638698, 0x353dd1beeeaa60d3, 0x849471334c9ba488, 0x63fa6e6843ade311 } + { -0x7b781c2fd438c9a7, 0x4baf8445059979df, -0x2e8368a523529041, 0x57369f0bdefc96b6 }, + { -0x0e5666fe8a9c7968, 0x353dd1beeeaa60d3, -0x7b6b8eccb3645b78, 0x63fa6e6843ade311 } }, { - { 0x2195becdd24b5eb7, 0x5e41f18cc0cd44f9, 0xdf28074441ca9ede, 0x07073b98f35b7d67 }, - { 0xd15c20536597c168, 0x9f73740098d28789, 0x18aee7f13257ba1f, 0x3418bfda07346f14 }, - { 0xd03c676c4ce530d4, 0x0b64c0473b5df9f4, 0x065cef8b19b3a31e, 0x3084d661533102c9 } + { 0x2195becdd24b5eb7, 0x5e41f18cc0cd44f9, -0x20d7f8bbbe356122, 0x07073b98f35b7d67 }, + { -0x2ea3dfac9a683e98, -0x608c8bff672d7877, 0x18aee7f13257ba1f, 0x3418bfda07346f14 }, + { -0x2fc39893b31acf2c, 0x0b64c0473b5df9f4, 0x065cef8b19b3a31e, 0x3084d661533102c9 } }, { - { 0x9a6ce876760321fd, 0x7fe2b5109eb63ad8, 0x00e7d4ae8ac80592, 0x73d86b7abb6f723a }, - { 0xe1f6b79ebf8469ad, 0x15801004e2663135, 0x9a498330af74181b, 0x3ba2504f049b673c }, - { 0x0b52b5606dba5ab6, 0xa9134f0fbbb1edab, 0x30a9520d9b04a635, 0x6813b8f37973e5db } + { -0x6593178989fcde03, 0x7fe2b5109eb63ad8, 0x00e7d4ae8ac80592, 0x73d86b7abb6f723a }, + { -0x1e094861407b9653, 0x15801004e2663135, -0x65b67ccf508be7e5, 0x3ba2504f049b673c }, + { 0x0b52b5606dba5ab6, -0x56ecb0f0444e1255, 0x30a9520d9b04a635, 0x6813b8f37973e5db } }, { - { 0xf194ca56f3157e29, 0x136d35705ef528a5, 0xdd4cef778b0599bc, 0x7d5472af24f833ed }, - { 0x9854b054334127c1, 0x105d047882fbff25, 0xdb49f7f944186f4f, 0x1768e838bed0b900 }, - { 0xd0ef874daf33da47, 0x00d3be5db6e339f9, 0x3f2a8a2f9c9ceece, 0x5d1aeb792352435a } + { -0x0e6b35a90cea81d7, 0x136d35705ef528a5, -0x22b3108874fa6644, 0x7d5472af24f833ed }, + { -0x67ab4fabccbed83f, 0x105d047882fbff25, -0x24b60806bbe790b1, 0x1768e838bed0b900 }, + { -0x2f1078b250cc25b9, 0x00d3be5db6e339f9, 0x3f2a8a2f9c9ceece, 0x5d1aeb792352435a } }, { - { 0x12c7bfaeb61ba775, 0xb84e621fe263bffd, 0x0b47a5c35c840dcf, 0x7e83be0bccaf8634 }, - { 0xf59e6bb319cd63ca, 0x670c159221d06839, 0xb06d565b2150cab6, 0x20fb199d104f12a3 }, - { 0x61943dee6d99c120, 0x86101f2e460b9fe0, 0x6bb2f1518ee8598d, 0x76b76289fcc475cc } + { 0x12c7bfaeb61ba775, -0x47b19de01d9c4003, 0x0b47a5c35c840dcf, 0x7e83be0bccaf8634 }, + { -0x0a61944ce6329c36, 0x670c159221d06839, -0x4f92a9a4deaf354a, 0x20fb199d104f12a3 }, + { 0x61943dee6d99c120, -0x79efe0d1b9f46020, 0x6bb2f1518ee8598d, 0x76b76289fcc475cc } }, { { 0x4245f1a1522ec0b3, 0x558785b22a75656d, 0x1d485a2548a1b3c0, 0x60959eccd58fe09f }, - { 0x791b4cc1756286fa, 0xdbced317d74a157c, 0x7e732421ea72bde6, 0x01fe18491131c8e9 }, + { 0x791b4cc1756286fa, -0x24312ce828b5ea84, 0x7e732421ea72bde6, 0x01fe18491131c8e9 }, { 0x3ebfeb7ba8ed7a09, 0x49fdc2bbe502789c, 0x44ebce5d3c119428, 0x35e1eb55be947f4a } }, { - { 0x14fd6dfa726ccc74, 0x3b084cfe2f53b965, 0xf33ae4f552a2c8b4, 0x59aab07a0d40166a }, - { 0xdbdae701c5738dd3, 0xf9c6f635b26f1bee, 0x61e96a8042f15ef4, 0x3aa1d11faf60a4d8 }, + { 0x14fd6dfa726ccc74, 0x3b084cfe2f53b965, -0x0cc51b0aad5d374c, 0x59aab07a0d40166a }, + { -0x242518fe3a8c722d, -0x063909ca4d90e412, 0x61e96a8042f15ef4, 0x3aa1d11faf60a4d8 }, { 0x77bcec4c925eac25, 0x1848718460137738, 0x5b374337fea9f451, 0x1865e78ec8e6aa46 } }, }, { { - { 0x967c54e91c529ccb, 0x30f6269264c635fb, 0x2747aff478121965, 0x17038418eaf66f5c }, - { 0xccc4b7c7b66e1f7a, 0x44157e25f50c2f7e, 0x3ef06dfc713eaf1c, 0x582f446752da63f7 }, - { 0xc6317bd320324ce4, 0xa81042e8a4488bc4, 0xb21ef18b4e5a1364, 0x0c2a1c4bcda28dc9 } + { -0x6983ab16e3ad6335, 0x30f6269264c635fb, 0x2747aff478121965, 0x17038418eaf66f5c }, + { -0x333b48384991e086, 0x44157e25f50c2f7e, 0x3ef06dfc713eaf1c, 0x582f446752da63f7 }, + { -0x39ce842cdfcdb31c, -0x57efbd175bb7743c, -0x4de10e74b1a5ec9c, 0x0c2a1c4bcda28dc9 } }, { - { 0xedc4814869bd6945, 0x0d6d907dbe1c8d22, 0xc63bd212d55cc5ab, 0x5a6a9b30a314dc83 }, - { 0xd24dc7d06f1f0447, 0xb2269e3edb87c059, 0xd15b0272fbb2d28f, 0x7c558bd1c6f64877 }, - { 0xd0ec1524d396463d, 0x12bb628ac35a24f0, 0xa50c3a791cbc5fa4, 0x0404a5ca0afbafc3 } + { -0x123b7eb7964296bb, 0x0d6d907dbe1c8d22, -0x39c42ded2aa33a55, 0x5a6a9b30a314dc83 }, + { -0x2db2382f90e0fbb9, -0x4dd961c124783fa7, -0x2ea4fd8d044d2d71, 0x7c558bd1c6f64877 }, + { -0x2f13eadb2c69b9c3, 0x12bb628ac35a24f0, -0x5af3c586e343a05c, 0x0404a5ca0afbafc3 } }, { - { 0x62bc9e1b2a416fd1, 0xb5c6f728e350598b, 0x04343fd83d5d6967, 0x39527516e7f8ee98 }, - { 0x8c1f40070aa743d6, 0xccbad0cb5b265ee8, 0x574b046b668fd2de, 0x46395bfdcadd9633 }, - { 0x117fdb2d1a5d9a9c, 0x9c7745bcd1005c2a, 0xefd4bef154d56fea, 0x76579a29e822d016 } + { 0x62bc9e1b2a416fd1, -0x4a3908d71cafa675, 0x04343fd83d5d6967, 0x39527516e7f8ee98 }, + { -0x73e0bff8f558bc2a, -0x33452f34a4d9a118, 0x574b046b668fd2de, 0x46395bfdcadd9633 }, + { 0x117fdb2d1a5d9a9c, -0x6388ba432effa3d6, -0x102b410eab2a9016, 0x76579a29e822d016 } }, { - { 0x333cb51352b434f2, 0xd832284993de80e1, 0xb5512887750d35ce, 0x02c514bb2a2777c1 }, + { 0x333cb51352b434f2, -0x27cdd7b66c217f1f, -0x4aaed7788af2ca32, 0x02c514bb2a2777c1 }, { 0x45b68e7e49c02a17, 0x23cd51a2bca9a37f, 0x3ed65f11ec224c1b, 0x43a384dc9e05bdb1 }, - { 0x684bd5da8bf1b645, 0xfb8bd37ef6b54b53, 0x313916d7a9b0d253, 0x1160920961548059 } + { 0x684bd5da8bf1b645, -0x04742c81094ab4ad, 0x313916d7a9b0d253, 0x1160920961548059 } }, { { 0x7a385616369b4dcd, 0x75c02ca7655c3563, 0x7dc21bf9d4f18021, 0x2f637d7491e6e042 }, - { 0xb44d166929dacfaa, 0xda529f4c8413598f, 0xe9ef63ca453d5559, 0x351e125bc5698e0b }, - { 0xd4b49b461af67bbe, 0xd603037ac8ab8961, 0x71dee19ff9a699fb, 0x7f182d06e7ce2a9a } + { -0x4bb2e996d6253056, -0x25ad60b37beca671, -0x16109c35bac2aaa7, 0x351e125bc5698e0b }, + { -0x2b4b64b9e5098442, -0x29fcfc853754769f, 0x71dee19ff9a699fb, 0x7f182d06e7ce2a9a } }, { - { 0x09454b728e217522, 0xaa58e8f4d484b8d8, 0xd358254d7f46903c, 0x44acc043241c5217 }, - { 0x7a7c8e64ab0168ec, 0xcb5a4a5515edc543, 0x095519d347cd0eda, 0x67d4ac8c343e93b0 }, - { 0x1c7d6bbb4f7a5777, 0x8b35fed4918313e1, 0x4adca1c6c96b4684, 0x556d1c8312ad71bd } + { 0x09454b728e217522, -0x55a7170b2b7b4728, -0x2ca7dab280b96fc4, 0x44acc043241c5217 }, + { 0x7a7c8e64ab0168ec, -0x34a5b5aaea123abd, 0x095519d347cd0eda, 0x67d4ac8c343e93b0 }, + { 0x1c7d6bbb4f7a5777, -0x74ca012b6e7cec1f, 0x4adca1c6c96b4684, 0x556d1c8312ad71bd } }, { - { 0x81f06756b11be821, 0x0faff82310a3f3dd, 0xf8b2d0556a99465d, 0x097abe38cc8c7f05 }, + { -0x7e0f98a94ee417df, 0x0faff82310a3f3dd, -0x074d2faa9566b9a3, 0x097abe38cc8c7f05 }, { 0x17ef40e30c8d3982, 0x31f7073e15a3fa34, 0x4f21f3cb0773646e, 0x746c6c6d1d824eff }, { 0x0c49c9877ea52da4, 0x4c4369559bdc1d43, 0x022c3809f7ccebd2, 0x577e14a34bee84bd } }, { - { 0x94fecebebd4dd72b, 0xf46a4fda060f2211, 0x124a5977c0c8d1ff, 0x705304b8fb009295 }, - { 0xf0e268ac61a73b0a, 0xf2fafa103791a5f5, 0xc1e13e826b6d00e9, 0x60fa7ee96fd78f42 }, - { 0xb63d1d354d296ec6, 0xf3c3053e5fad31d8, 0x670b958cb4bd42ec, 0x21398e0ca16353fd } + { -0x6b01314142b228d5, -0x0b95b025f9f0ddef, 0x124a5977c0c8d1ff, 0x705304b8fb009295 }, + { -0x0f1d97539e58c4f6, -0x0d0505efc86e5a0b, -0x3e1ec17d9492ff17, 0x60fa7ee96fd78f42 }, + { -0x49c2e2cab2d6913a, -0x0c3cfac1a052ce28, 0x670b958cb4bd42ec, 0x21398e0ca16353fd } }, }, { { - { 0x86c5fc16861b7e9a, 0xf6a330476a27c451, 0x01667267a1e93597, 0x05ffb9cd6082dfeb }, - { 0x216ab2ca8da7d2ef, 0x366ad9dd99f42827, 0xae64b9004fdd3c75, 0x403a395b53909e62 }, - { 0xa617fa9ff53f6139, 0x60f2b5e513e66cb6, 0xd7a8beefb3448aa4, 0x7a2932856f5ea192 } + { -0x793a03e979e48166, -0x095ccfb895d83baf, 0x01667267a1e93597, 0x05ffb9cd6082dfeb }, + { 0x216ab2ca8da7d2ef, 0x366ad9dd99f42827, -0x519b46ffb022c38b, 0x403a395b53909e62 }, + { -0x59e805600ac09ec7, 0x60f2b5e513e66cb6, -0x285741104cbb755c, 0x7a2932856f5ea192 } }, { - { 0xb89c444879639302, 0x4ae4f19350c67f2c, 0xf0b35da8c81af9c6, 0x39d0003546871017 }, - { 0x0b39d761b02de888, 0x5f550e7ed2414e1f, 0xa6bfa45822e1a940, 0x050a2f7dfd447b99 }, - { 0x437c3b33a650db77, 0x6bafe81dbac52bb2, 0xfe99402d2db7d318, 0x2b5b7eec372ba6ce } + { -0x4763bbb7869c6cfe, 0x4ae4f19350c67f2c, -0x0f4ca25737e5063a, 0x39d0003546871017 }, + { 0x0b39d761b02de888, 0x5f550e7ed2414e1f, -0x59405ba7dd1e56c0, 0x050a2f7dfd447b99 }, + { 0x437c3b33a650db77, 0x6bafe81dbac52bb2, -0x0166bfd2d2482ce8, 0x2b5b7eec372ba6ce } }, { - { 0xa694404d613ac8f4, 0x500c3c2bfa97e72c, 0x874104d21fcec210, 0x1b205fb38604a8ee }, - { 0xb3bc4bbd83f50eef, 0x508f0c998c927866, 0x43e76587c8b7e66e, 0x0f7655a3a47f98d9 }, + { -0x596bbfb29ec5370c, 0x500c3c2bfa97e72c, -0x78befb2de0313df0, 0x1b205fb38604a8ee }, + { -0x4c43b4427c0af111, 0x508f0c998c927866, 0x43e76587c8b7e66e, 0x0f7655a3a47f98d9 }, { 0x55ecad37d24b133c, 0x441e147d6038c90b, 0x656683a1d62c6fee, 0x0157d5dc87e0ecae } }, { - { 0x95265514d71eb524, 0xe603d8815df14593, 0x147cdf410d4de6b7, 0x5293b1730437c850 }, - { 0xf2a7af510354c13d, 0xd7a0b145aa372b60, 0x2869b96a05a3d470, 0x6528e42d82460173 }, - { 0x23d0e0814bccf226, 0x92c745cd8196fb93, 0x8b61796c59541e5b, 0x40a44df0c021f978 } + { -0x6ad9aaeb28e14adc, -0x19fc277ea20eba6d, 0x147cdf410d4de6b7, 0x5293b1730437c850 }, + { -0x0d5850aefcab3ec3, -0x285f4eba55c8d4a0, 0x2869b96a05a3d470, 0x6528e42d82460173 }, + { 0x23d0e0814bccf226, -0x6d38ba327e69046d, -0x749e8693a6abe1a5, 0x40a44df0c021f978 } }, { - { 0x86c96e514bc5d095, 0xf20d4098fca6804a, 0x27363d89c826ea5d, 0x39ca36565719cacf }, - { 0xdaa869894f20ea6a, 0xea14a3d14c620618, 0x6001fccb090bf8be, 0x35f4e822947e9cf0 }, - { 0x97506f2f6f87b75c, 0xc624aea0034ae070, 0x1ec856e3aad34dd6, 0x055b0be0e440e58f } + { -0x793691aeb43a2f6b, -0x0df2bf6703597fb6, 0x27363d89c826ea5d, 0x39ca36565719cacf }, + { -0x25579676b0df1596, -0x15eb5c2eb39df9e8, 0x6001fccb090bf8be, 0x35f4e822947e9cf0 }, + { -0x68af90d0907848a4, -0x39db515ffcb51f90, 0x1ec856e3aad34dd6, 0x055b0be0e440e58f } }, { - { 0x4d12a04b6ea33da2, 0x57cf4c15e36126dd, 0x90ec9675ee44d967, 0x64ca348d2a985aac }, - { 0x6469a17d89735d12, 0xdb6f27d5e662b9f1, 0x9fcba3286a395681, 0x363b8004d269af25 }, - { 0x99588e19e4c4912d, 0xefcc3b4e1ca5ce6b, 0x4522ea60fa5b98d5, 0x7064bbab1de4a819 } + { 0x4d12a04b6ea33da2, 0x57cf4c15e36126dd, -0x6f13698a11bb2699, 0x64ca348d2a985aac }, + { 0x6469a17d89735d12, -0x2490d82a199d460f, -0x60345cd795c6a97f, 0x363b8004d269af25 }, + { -0x66a771e61b3b6ed3, -0x1033c4b1e35a3195, 0x4522ea60fa5b98d5, 0x7064bbab1de4a819 } }, { - { 0xa290c06142542129, 0xf2e2c2aebe8d5b90, 0xcf2458db76abfe1b, 0x02157ade83d626bf }, - { 0xb919e1515a770641, 0xa9a2e2c74e7f8039, 0x7527250b3df23109, 0x756a7330ac27b78b }, - { 0x3e46972a1b9a038b, 0x2e4ee66a7ee03fb4, 0x81a248776edbb4ca, 0x1a944ee88ecd0563 } + { -0x5d6f3f9ebdabded7, -0x0d1d3d514172a470, -0x30dba724895401e5, 0x02157ade83d626bf }, + { -0x46e61eaea588f9bf, -0x565d1d38b1807fc7, 0x7527250b3df23109, 0x756a7330ac27b78b }, + { 0x3e46972a1b9a038b, 0x2e4ee66a7ee03fb4, -0x7e5db78891244b36, 0x1a944ee88ecd0563 } }, { - { 0xbb40a859182362d6, 0xb99f55778a4d1abb, 0x8d18b427758559f6, 0x26c20fe74d26235a }, - { 0xd5a91d1151039372, 0x2ed377b799ca26de, 0xa17202acfd366b6b, 0x0730291bd6901995 }, + { -0x44bf57a6e7dc9d2a, -0x4660aa8875b2e545, -0x72e74bd88a7aa60a, 0x26c20fe74d26235a }, + { -0x2a56e2eeaefc6c8e, 0x2ed377b799ca26de, -0x5e8dfd5302c99495, 0x0730291bd6901995 }, { 0x648d1d9fe9cc22f5, 0x66bc561928dd577c, 0x47d3ed21652439d1, 0x49d271acedaf8b49 } }, }, { { - { 0x2798aaf9b4b75601, 0x5eac72135c8dad72, 0xd2ceaa6161b7a023, 0x1bbfb284e98f7d4e }, - { 0x89f5058a382b33f3, 0x5ae2ba0bad48c0b4, 0x8f93b503a53db36e, 0x5aa3ed9d95a232e6 }, - { 0x656777e9c7d96561, 0xcb2b125472c78036, 0x65053299d9506eee, 0x4a07e14e5e8957cc } + { 0x2798aaf9b4b75601, 0x5eac72135c8dad72, -0x2d31559e9e485fdd, 0x1bbfb284e98f7d4e }, + { -0x760afa75c7d4cc0d, 0x5ae2ba0bad48c0b4, -0x706c4afc5ac24c92, 0x5aa3ed9d95a232e6 }, + { 0x656777e9c7d96561, -0x34d4edab8d387fca, 0x65053299d9506eee, 0x4a07e14e5e8957cc } }, { - { 0x240b58cdc477a49b, 0xfd38dade6447f017, 0x19928d32a7c86aad, 0x50af7aed84afa081 }, - { 0x4ee412cb980df999, 0xa315d76f3c6ec771, 0xbba5edde925c77fd, 0x3f0bac391d313402 }, + { 0x240b58cdc477a49b, -0x02c725219bb80fe9, 0x19928d32a7c86aad, 0x50af7aed84afa081 }, + { 0x4ee412cb980df999, -0x5cea2890c391388f, -0x445a12216da38803, 0x3f0bac391d313402 }, { 0x6e4fde0115f65be5, 0x29982621216109b2, 0x780205810badd6d9, 0x1921a316baebd006 } }, { - { 0xd75aad9ad9f3c18b, 0x566a0eef60b1c19c, 0x3e9a0bac255c0ed9, 0x7b049deca062c7f5 }, - { 0x89422f7edfb870fc, 0x2c296beb4f76b3bd, 0x0738f1d436c24df7, 0x6458df41e273aeb0 }, - { 0xdccbe37a35444483, 0x758879330fedbe93, 0x786004c312c5dd87, 0x6093dccbc2950e64 } + { -0x28a55265260c3e75, 0x566a0eef60b1c19c, 0x3e9a0bac255c0ed9, 0x7b049deca062c7f5 }, + { -0x76bdd08120478f04, 0x2c296beb4f76b3bd, 0x0738f1d436c24df7, 0x6458df41e273aeb0 }, + { -0x23341c85cabbbb7d, 0x758879330fedbe93, 0x786004c312c5dd87, 0x6093dccbc2950e64 } }, { - { 0x6bdeeebe6084034b, 0x3199c2b6780fb854, 0x973376abb62d0695, 0x6e3180c98b647d90 }, + { 0x6bdeeebe6084034b, 0x3199c2b6780fb854, -0x68cc895449d2f96b, 0x6e3180c98b647d90 }, { 0x1ff39a8585e0706d, 0x36d0a5d8b3e73933, 0x43b9f2e1718f453b, 0x57d1ea084827a97c }, - { 0xee7ab6e7a128b071, 0xa4c1596d93a88baa, 0xf7b4de82b2216130, 0x363e999ddd97bd18 } + { -0x118549185ed74f8f, -0x5b3ea6926c577456, -0x084b217d4dde9ed0, 0x363e999ddd97bd18 } }, { - { 0x2f1848dce24baec6, 0x769b7255babcaf60, 0x90cb3c6e3cefe931, 0x231f979bc6f9b355 }, - { 0x96a843c135ee1fc4, 0x976eb35508e4c8cf, 0xb42f6801b58cd330, 0x48ee9b78693a052b }, - { 0x5c31de4bcc2af3c6, 0xb04bb030fe208d1f, 0xb78d7009c14fb466, 0x079bfa9b08792413 } + { 0x2f1848dce24baec6, 0x769b7255babcaf60, -0x6f34c391c31016cf, 0x231f979bc6f9b355 }, + { -0x6957bc3eca11e03c, -0x68914caaf71b3731, -0x4bd097fe4a732cd0, 0x48ee9b78693a052b }, + { 0x5c31de4bcc2af3c6, -0x4fb44fcf01df72e1, -0x48728ff63eb04b9a, 0x079bfa9b08792413 } }, { - { 0xf3c9ed80a2d54245, 0x0aa08b7877f63952, 0xd76dac63d1085475, 0x1ef4fb159470636b }, - { 0xe3903a51da300df4, 0x843964233da95ab0, 0xed3cf12d0b356480, 0x038c77f684817194 }, - { 0x854e5ee65b167bec, 0x59590a4296d0cdc2, 0x72b2df3498102199, 0x575ee92a4a0bff56 } + { -0x0c36127f5d2abdbb, 0x0aa08b7877f63952, -0x2892539c2ef7ab8b, 0x1ef4fb159470636b }, + { -0x1c6fc5ae25cff20c, -0x7bc69bdcc256a550, -0x12c30ed2f4ca9b80, 0x038c77f684817194 }, + { -0x7ab1a119a4e98414, 0x59590a4296d0cdc2, 0x72b2df3498102199, 0x575ee92a4a0bff56 } }, { - { 0x5d46bc450aa4d801, 0xc3af1227a533b9d8, 0x389e3b262b8906c2, 0x200a1e7e382f581b }, - { 0xd4c080908a182fcf, 0x30e170c299489dbd, 0x05babd5752f733de, 0x43d4e7112cd3fd00 }, - { 0x518db967eaf93ac5, 0x71bc989b056652c0, 0xfe2b85d9567197f5, 0x050eca52651e4e38 } + { 0x5d46bc450aa4d801, -0x3c50edd85acc4628, 0x389e3b262b8906c2, 0x200a1e7e382f581b }, + { -0x2b3f7f6f75e7d031, 0x30e170c299489dbd, 0x05babd5752f733de, 0x43d4e7112cd3fd00 }, + { 0x518db967eaf93ac5, 0x71bc989b056652c0, -0x01d47a26a98e680b, 0x050eca52651e4e38 } }, { - { 0x97ac397660e668ea, 0x9b19bbfe153ab497, 0x4cb179b534eca79f, 0x6151c09fa131ae57 }, - { 0xc3431ade453f0c9c, 0xe9f5045eff703b9b, 0xfcd97ac9ed847b3d, 0x4b0ee6c21c58f4c6 }, - { 0x3af55c0dfdf05d96, 0xdd262ee02ab4ee7a, 0x11b2bb8712171709, 0x1fef24fa800f030b } + { -0x6853c6899f199716, -0x64e64401eac54b69, 0x4cb179b534eca79f, 0x6151c09fa131ae57 }, + { -0x3cbce521bac0f364, -0x160afba1008fc465, -0x03268536127b84c3, 0x4b0ee6c21c58f4c6 }, + { 0x3af55c0dfdf05d96, -0x22d9d11fd54b1186, 0x11b2bb8712171709, 0x1fef24fa800f030b } }, }, { { - { 0xff91a66a90166220, 0xf22552ae5bf1e009, 0x7dff85d87f90df7c, 0x4f620ffe0c736fb9 }, - { 0xb496123a6b6c6609, 0xa750fe8580ab5938, 0xf471bf39b7c27a5f, 0x507903ce77ac193c }, - { 0x62f90d65dfde3e34, 0xcf28c592b9fa5fad, 0x99c86ef9c6164510, 0x25d448044a256c84 } + { -0x006e59956fe99de0, -0x0ddaad51a40e1ff7, 0x7dff85d87f90df7c, 0x4f620ffe0c736fb9 }, + { -0x4b69edc5949399f7, -0x58af017a7f54a6c8, -0x0b8e40c6483d85a1, 0x507903ce77ac193c }, + { 0x62f90d65dfde3e34, -0x30d73a6d4605a053, -0x6637910639e9baf0, 0x25d448044a256c84 } }, { - { 0x2c7c4415c9022b55, 0x56a0d241812eb1fe, 0xf02ea1c9d7b65e0d, 0x4180512fd5323b26 }, - { 0xbd68230ec7e9b16f, 0x0eb1b9c1c1c5795d, 0x7943c8c495b6b1ff, 0x2f9faf620bbacf5e }, - { 0xa4ff3e698a48a5db, 0xba6a3806bd95403b, 0x9f7ce1af47d5b65d, 0x15e087e55939d2fb } + { 0x2c7c4415c9022b55, 0x56a0d241812eb1fe, -0x0fd15e362849a1f3, 0x4180512fd5323b26 }, + { -0x4297dcf138164e91, 0x0eb1b9c1c1c5795d, 0x7943c8c495b6b1ff, 0x2f9faf620bbacf5e }, + { -0x5b00c19675b75a25, -0x4595c7f9426abfc5, -0x60831e50b82a49a3, 0x15e087e55939d2fb } }, { - { 0x8894186efb963f38, 0x48a00e80dc639bd5, 0xa4e8092be96c1c99, 0x5a097d54ca573661 }, - { 0x12207543745c1496, 0xdaff3cfdda38610c, 0xe4e797272c71c34f, 0x39c07b1934bdede9 }, - { 0x2d45892b17c9e755, 0xd033fd7289308df8, 0x6c2fe9d9525b8bd9, 0x2edbecf1c11cc079 } + { -0x776be7910469c0c8, 0x48a00e80dc639bd5, -0x5b17f6d41693e367, 0x5a097d54ca573661 }, + { 0x12207543745c1496, -0x2500c30225c79ef4, -0x1b1868d8d38e3cb1, 0x39c07b1934bdede9 }, + { 0x2d45892b17c9e755, -0x2fcc028d76cf7208, 0x6c2fe9d9525b8bd9, 0x2edbecf1c11cc079 } }, { - { 0xee0f0fddd087a25f, 0x9c7531555c3e34ee, 0x660c572e8fab3ab5, 0x0854fc44544cd3b2 }, - { 0x1616a4e3c715a0d2, 0x53623cb0f8341d4d, 0x96ef5329c7e899cb, 0x3d4e8dbba668baa6 }, + { -0x11f0f0222f785da1, -0x638aceaaa3c1cb12, 0x660c572e8fab3ab5, 0x0854fc44544cd3b2 }, + { 0x1616a4e3c715a0d2, 0x53623cb0f8341d4d, -0x6910acd638176635, 0x3d4e8dbba668baa6 }, { 0x61eba0c555edad19, 0x24b533fef0a83de6, 0x3b77042883baa5f8, 0x678f82b898a47e8d } }, { - { 0x1e09d94057775696, 0xeed1265c3cd951db, 0xfa9dac2b20bce16f, 0x0f7f76e0e8d089f4 }, - { 0xb1491d0bd6900c54, 0x3539722c9d132636, 0x4db928920b362bc9, 0x4d7cd1fea68b69df }, - { 0x36d9ebc5d485b00c, 0xa2596492e4adb365, 0xc1659480c2119ccd, 0x45306349186e0d5f } + { 0x1e09d94057775696, -0x112ed9a3c326ae25, -0x056253d4df431e91, 0x0f7f76e0e8d089f4 }, + { -0x4eb6e2f4296ff3ac, 0x3539722c9d132636, 0x4db928920b362bc9, 0x4d7cd1fea68b69df }, + { 0x36d9ebc5d485b00c, -0x5da69b6d1b524c9b, -0x3e9a6b7f3dee6333, 0x45306349186e0d5f } }, { - { 0x96a414ec2b072491, 0x1bb2218127a7b65b, 0x6d2849596e8a4af0, 0x65f3b08ccd27765f }, - { 0x94ddd0c1a6cdff1d, 0x55f6f115e84213ae, 0x6c935f85992fcf6a, 0x067ee0f54a37f16f }, - { 0xecb29fff199801f7, 0x9d361d1fa2a0f72f, 0x25f11d2375fd2f49, 0x124cefe80fe10fe2 } + { -0x695beb13d4f8db6f, 0x1bb2218127a7b65b, 0x6d2849596e8a4af0, 0x65f3b08ccd27765f }, + { -0x6b222f3e593200e3, 0x55f6f115e84213ae, 0x6c935f85992fcf6a, 0x067ee0f54a37f16f }, + { -0x134d6000e667fe09, -0x62c9e2e05d5f08d1, 0x25f11d2375fd2f49, 0x124cefe80fe10fe2 } }, { - { 0x1518e85b31b16489, 0x8faadcb7db710bfb, 0x39b0bdf4a14ae239, 0x05f4cbea503d20c1 }, - { 0x4c126cf9d18df255, 0xc1d471e9147a63b6, 0x2c6d3c73f3c93b5f, 0x6be3a6a2e3ff86a2 }, - { 0xce040e9ec04145bc, 0xc71ff4e208f6834c, 0xbd546e8dab8847a3, 0x64666aa0a4d2aba5 } + { 0x1518e85b31b16489, -0x70552348248ef405, 0x39b0bdf4a14ae239, 0x05f4cbea503d20c1 }, + { 0x4c126cf9d18df255, -0x3e2b8e16eb859c4a, 0x2c6d3c73f3c93b5f, 0x6be3a6a2e3ff86a2 }, + { -0x31fbf1613fbeba44, -0x38e00b1df7097cb4, -0x42ab91725477b85d, 0x64666aa0a4d2aba5 } }, { - { 0xb0c53bf73337e94c, 0x7cb5697e11e14f15, 0x4b84abac1930c750, 0x28dd4abfe0640468 }, - { 0x6841435a7c06d912, 0xca123c21bb3f830b, 0xd4b37b27b1cbe278, 0x1d753b84c76f5046 }, + { -0x4f3ac408ccc816b4, 0x7cb5697e11e14f15, 0x4b84abac1930c750, 0x28dd4abfe0640468 }, + { 0x6841435a7c06d912, -0x35edc3de44c07cf5, -0x2b4c84d84e341d88, 0x1d753b84c76f5046 }, { 0x7dc0b64c44cb9f44, 0x18a3e1ace3925dbf, 0x7a3034862d0457c4, 0x4c498bf78a0c892e } }, }, { { - { 0x22d2aff530976b86, 0x8d90b806c2d24604, 0xdca1896c4de5bae5, 0x28005fe6c8340c17 }, - { 0x37d653fb1aa73196, 0x0f9495303fd76418, 0xad200b09fb3a17b2, 0x544d49292fc8613e }, - { 0x6aefba9f34528688, 0x5c1bff9425107da1, 0xf75bbbcd66d94b36, 0x72e472930f316dfa } + { 0x22d2aff530976b86, -0x726f47f93d2db9fc, -0x235e7693b21a451b, 0x28005fe6c8340c17 }, + { 0x37d653fb1aa73196, 0x0f9495303fd76418, -0x52dff4f604c5e84e, 0x544d49292fc8613e }, + { 0x6aefba9f34528688, 0x5c1bff9425107da1, -0x08a444329926b4ca, 0x72e472930f316dfa } }, { { 0x07f3f635d32a7627, 0x7aaa4d865f6566f0, 0x3c85e79728d04450, 0x1fee7f000fe06438 }, - { 0x2695208c9781084f, 0xb1502a0b23450ee1, 0xfd9daea603efde02, 0x5a9d2e8c2733a34c }, - { 0x765305da03dbf7e5, 0xa4daf2491434cdbd, 0x7b4ad5cdd24a88ec, 0x00f94051ee040543 } + { 0x2695208c9781084f, -0x4eafd5f4dcbaf11f, -0x02625159fc1021fe, 0x5a9d2e8c2733a34c }, + { 0x765305da03dbf7e5, -0x5b250db6ebcb3243, 0x7b4ad5cdd24a88ec, 0x00f94051ee040543 } }, { - { 0xd7ef93bb07af9753, 0x583ed0cf3db766a7, 0xce6998bf6e0b1ec5, 0x47b7ffd25dd40452 }, - { 0x8d356b23c3d330b2, 0xf21c8b9bb0471b06, 0xb36c316c6e42b83c, 0x07d79c7e8beab10d }, - { 0x87fbfb9cbc08dd12, 0x8a066b3ae1eec29b, 0x0d57242bdb1fc1bf, 0x1c3520a35ea64bb6 } + { -0x28106c44f85068ad, 0x583ed0cf3db766a7, -0x3196674091f4e13b, 0x47b7ffd25dd40452 }, + { -0x72ca94dc3c2ccf4e, -0x0de374644fb8e4fa, -0x4c93ce9391bd47c4, 0x07d79c7e8beab10d }, + { -0x7804046343f722ee, -0x75f994c51e113d65, 0x0d57242bdb1fc1bf, 0x1c3520a35ea64bb6 } }, { - { 0xcda86f40216bc059, 0x1fbb231d12bcd87e, 0xb4956a9e17c70990, 0x38750c3b66d12e55 }, - { 0x80d253a6bccba34a, 0x3e61c3a13838219b, 0x90c3b6019882e396, 0x1c3d05775d0ee66f }, - { 0x692ef1409422e51a, 0xcbc0c73c2b5df671, 0x21014fe7744ce029, 0x0621e2c7d330487c } + { -0x325790bfde943fa7, 0x1fbb231d12bcd87e, -0x4b6a9561e838f670, 0x38750c3b66d12e55 }, + { -0x7f2dac5943345cb6, 0x3e61c3a13838219b, -0x6f3c49fe677d1c6a, 0x1c3d05775d0ee66f }, + { 0x692ef1409422e51a, -0x343f38c3d4a2098f, 0x21014fe7744ce029, 0x0621e2c7d330487c } }, { - { 0xb7ae1796b0dbf0f3, 0x54dfafb9e17ce196, 0x25923071e9aaa3b4, 0x5d8e589ca1002e9d }, - { 0xaf9860cc8259838d, 0x90ea48c1c69f9adc, 0x6526483765581e30, 0x0007d6097bd3a5bc }, - { 0xc0bf1d950842a94b, 0xb2d3c363588f2e3e, 0x0a961438bb51e2ef, 0x1583d7783c1cbf86 } + { -0x4851e8694f240f0d, 0x54dfafb9e17ce196, 0x25923071e9aaa3b4, 0x5d8e589ca1002e9d }, + { -0x50679f337da67c73, -0x6f15b73e39606524, 0x6526483765581e30, 0x0007d6097bd3a5bc }, + { -0x3f40e26af7bd56b5, -0x4d2c3c9ca770d1c2, 0x0a961438bb51e2ef, 0x1583d7783c1cbf86 } }, { - { 0x90034704cc9d28c7, 0x1d1b679ef72cc58f, 0x16e12b5fbe5b8726, 0x4958064e83c5580a }, - { 0xeceea2ef5da27ae1, 0x597c3a1455670174, 0xc9a62a126609167a, 0x252a5f2e81ed8f70 }, - { 0x0d2894265066e80d, 0xfcc3f785307c8c6b, 0x1b53da780c1112fd, 0x079c170bd843b388 } + { -0x6ffcb8fb3362d739, 0x1d1b679ef72cc58f, 0x16e12b5fbe5b8726, 0x4958064e83c5580a }, + { -0x13115d10a25d851f, 0x597c3a1455670174, -0x3659d5ed99f6e986, 0x252a5f2e81ed8f70 }, + { 0x0d2894265066e80d, -0x033c087acf837395, 0x1b53da780c1112fd, 0x079c170bd843b388 } }, { - { 0xcdd6cd50c0d5d056, 0x9af7686dbb03573b, 0x3ca6723ff3c3ef48, 0x6768c0d7317b8acc }, - { 0x0506ece464fa6fff, 0xbee3431e6205e523, 0x3579422451b8ea42, 0x6dec05e34ac9fb00 }, - { 0x94b625e5f155c1b3, 0x417bf3a7997b7b91, 0xc22cbddc6d6b2600, 0x51445e14ddcd52f4 } + { -0x322932af3f2a2faa, -0x6508979244fca8c5, 0x3ca6723ff3c3ef48, 0x6768c0d7317b8acc }, + { 0x0506ece464fa6fff, -0x411cbce19dfa1add, 0x3579422451b8ea42, 0x6dec05e34ac9fb00 }, + { -0x6b49da1a0eaa3e4d, 0x417bf3a7997b7b91, -0x3dd342239294da00, 0x51445e14ddcd52f4 } }, { - { 0x893147ab2bbea455, 0x8c53a24f92079129, 0x4b49f948be30f7a7, 0x12e990086e4fd43d }, - { 0x57502b4b3b144951, 0x8e67ff6b444bbcb3, 0xb8bd6927166385db, 0x13186f31e39295c8 }, - { 0xf10c96b37fdfbb2e, 0x9f9a935e121ceaf9, 0xdf1136c43a5b983f, 0x77b2e3f05d3e99af } + { -0x76ceb854d4415bab, -0x73ac5db06df86ed7, 0x4b49f948be30f7a7, 0x12e990086e4fd43d }, + { 0x57502b4b3b144951, -0x71980094bbb4434d, -0x474296d8e99c7a25, 0x13186f31e39295c8 }, + { -0x0ef3694c802044d2, -0x60656ca1ede31507, -0x20eec93bc5a467c1, 0x77b2e3f05d3e99af } }, }, { { - { 0x9532f48fcc5cd29b, 0x2ba851bea3ce3671, 0x32dacaa051122941, 0x478d99d9350004f2 }, - { 0xfd0d75879cf12657, 0xe82fef94e53a0e29, 0xcc34a7f05bbb4be7, 0x0b251172a50c38a2 }, - { 0x1d5ad94890bb02c0, 0x50e208b10ec25115, 0xa26a22894ef21702, 0x4dc923343b524805 } + { -0x6acd0b7033a32d65, 0x2ba851bea3ce3671, 0x32dacaa051122941, 0x478d99d9350004f2 }, + { -0x02f28a78630ed9a9, -0x17d0106b1ac5f1d7, -0x33cb580fa444b419, 0x0b251172a50c38a2 }, + { 0x1d5ad94890bb02c0, 0x50e208b10ec25115, -0x5d95dd76b10de8fe, 0x4dc923343b524805 } }, { - { 0x3ad3e3ebf36c4975, 0xd75d25a537862125, 0xe873943da025a516, 0x6bbc7cb4c411c847 }, - { 0xe3828c400f8086b6, 0x3f77e6f7979f0dc8, 0x7ef6de304df42cb4, 0x5265797cb6abd784 }, - { 0x3c6f9cd1d4a50d56, 0xb6244077c6feab7e, 0x6ff9bf483580972e, 0x00375883b332acfb } + { 0x3ad3e3ebf36c4975, -0x28a2da5ac879dedb, -0x178c6bc25fda5aea, 0x6bbc7cb4c411c847 }, + { -0x1c7d73bff07f794a, 0x3f77e6f7979f0dc8, 0x7ef6de304df42cb4, 0x5265797cb6abd784 }, + { 0x3c6f9cd1d4a50d56, -0x49dbbf8839015482, 0x6ff9bf483580972e, 0x00375883b332acfb } }, { - { 0xc98bec856c75c99c, 0xe44184c000e33cf4, 0x0a676b9bba907634, 0x669e2cb571f379d7 }, - { 0x0001b2cd28cb0940, 0x63fb51a06f1c24c9, 0xb5ad8691dcd5ca31, 0x67238dbd8c450660 }, - { 0xcb116b73a49bd308, 0x025aad6b2392729e, 0xb4793efa3f55d9b1, 0x72a1056140678bb9 } + { -0x3674137a938a3664, -0x1bbe7b3fff1cc30c, 0x0a676b9bba907634, 0x669e2cb571f379d7 }, + { 0x0001b2cd28cb0940, 0x63fb51a06f1c24c9, -0x4a52796e232a35cf, 0x67238dbd8c450660 }, + { -0x34ee948c5b642cf8, 0x025aad6b2392729e, -0x4b86c105c0aa264f, 0x72a1056140678bb9 } }, { - { 0x0d8d2909e2e505b6, 0x98ca78abc0291230, 0x77ef5569a9b12327, 0x7c77897b81439b47 }, - { 0xa2b6812b1cc9249d, 0x62866eee21211f58, 0x2cb5c5b85df10ece, 0x03a6b259e263ae00 }, - { 0xf1c1b5e2de331cb5, 0x5a9f5d8e15fca420, 0x9fa438f17bd932b1, 0x2a381bf01c6146e7 } + { 0x0d8d2909e2e505b6, -0x673587543fd6edd0, 0x77ef5569a9b12327, 0x7c77897b81439b47 }, + { -0x5d497ed4e336db63, 0x62866eee21211f58, 0x2cb5c5b85df10ece, 0x03a6b259e263ae00 }, + { -0x0e3e4a1d21cce34b, 0x5a9f5d8e15fca420, -0x605bc70e8426cd4f, 0x2a381bf01c6146e7 } }, { - { 0xf7c0be32b534166f, 0x27e6ca6419cf70d4, 0x934df7d7a957a759, 0x5701461dabdec2aa }, - { 0xac9b9879cfc811c1, 0x8b7d29813756e567, 0x50da4e607c70edfc, 0x5dbca62f884400b6 }, + { -0x083f41cd4acbe991, 0x27e6ca6419cf70d4, -0x6cb2082856a858a7, 0x5701461dabdec2aa }, + { -0x536467863037ee3f, -0x7482d67ec8a91a99, 0x50da4e607c70edfc, 0x5dbca62f884400b6 }, { 0x2c6747402c915c25, 0x1bdcd1a80b0d340a, 0x5e5601bd07b43f5f, 0x2555b4e05539a242 } }, { - { 0x78409b1d87e463d4, 0xad4da95acdfb639d, 0xec28773755259b9c, 0x69c806e9c31230ab }, - { 0x6fc09f5266ddd216, 0xdce560a7c8e37048, 0xec65939da2df62fd, 0x7a869ae7e52ed192 }, - { 0x7b48f57414bb3f22, 0x68c7cee4aedccc88, 0xed2f936179ed80be, 0x25d70b885f77bc4b } + { 0x78409b1d87e463d4, -0x52b256a532049c63, -0x13d788c8aada6464, 0x69c806e9c31230ab }, + { 0x6fc09f5266ddd216, -0x231a9f58371c8fb8, -0x139a6c625d209d03, 0x7a869ae7e52ed192 }, + { 0x7b48f57414bb3f22, 0x68c7cee4aedccc88, -0x12d06c9e86127f42, 0x25d70b885f77bc4b } }, { - { 0x98459d29bb1ae4d4, 0x56b9c4c739f954ec, 0x832743f6c29b4b3e, 0x21ea8e2798b6878a }, + { -0x67ba62d644e51b2c, 0x56b9c4c739f954ec, -0x7cd8bc093d64b4c2, 0x21ea8e2798b6878a }, { 0x4151c3d9762bf4de, 0x083f435f2745d82b, 0x29775a2e0d23ddd5, 0x138e3a6269a5db24 }, - { 0x87bef4b46a5a7b9c, 0xd2299d1b5fc1d062, 0x82409818dd321648, 0x5c5abeb1e5a2e03d } + { -0x78410b4b95a58464, -0x2dd662e4a03e2f9e, -0x7dbf67e722cde9b8, 0x5c5abeb1e5a2e03d } }, { - { 0x02cde6de1306a233, 0x7b5a52a2116f8ec7, 0xe1c681f4c1163b5b, 0x241d350660d32643 }, - { 0x14722af4b73c2ddb, 0xbc470c5f5a05060d, 0x00943eac2581b02e, 0x0e434b3b1f499c8f }, - { 0x6be4404d0ebc52c7, 0xae46233bb1a791f5, 0x2aec170ed25db42b, 0x1d8dfd966645d694 } + { 0x02cde6de1306a233, 0x7b5a52a2116f8ec7, -0x1e397e0b3ee9c4a5, 0x241d350660d32643 }, + { 0x14722af4b73c2ddb, -0x43b8f3a0a5faf9f3, 0x00943eac2581b02e, 0x0e434b3b1f499c8f }, + { 0x6be4404d0ebc52c7, -0x51b9dcc44e586e0b, 0x2aec170ed25db42b, 0x1d8dfd966645d694 } }, }, { { - { 0xd598639c12ddb0a4, 0xa5d19f30c024866b, 0xd17c2f0358fce460, 0x07a195152e095e8a }, - { 0x296fa9c59c2ec4de, 0xbc8b61bf4f84f3cb, 0x1c7706d917a8f908, 0x63b795fc7ad3255d }, - { 0xa8368f02389e5fc8, 0x90433b02cf8de43b, 0xafa1fd5dc5412643, 0x3e8fe83d032f0137 } + { -0x2a679c63ed224f5c, -0x5a2e60cf3fdb7995, -0x2e83d0fca7031ba0, 0x07a195152e095e8a }, + { 0x296fa9c59c2ec4de, -0x43749e40b07b0c35, 0x1c7706d917a8f908, 0x63b795fc7ad3255d }, + { -0x57c970fdc761a038, -0x6fbcc4fd30721bc5, -0x505e02a23abed9bd, 0x3e8fe83d032f0137 } }, { - { 0x08704c8de8efd13c, 0xdfc51a8e33e03731, 0xa59d5da51260cde3, 0x22d60899a6258c86 }, - { 0x2f8b15b90570a294, 0x94f2427067084549, 0xde1c5ae161bbfd84, 0x75ba3b797fac4007 }, - { 0x6239dbc070cdd196, 0x60fe8a8b6c7d8a9a, 0xb38847bceb401260, 0x0904d07b87779e5e } + { 0x08704c8de8efd13c, -0x203ae571cc1fc8cf, -0x5a62a25aed9f321d, 0x22d60899a6258c86 }, + { 0x2f8b15b90570a294, -0x6b0dbd8f98f7bab7, -0x21e3a51e9e44027c, 0x75ba3b797fac4007 }, + { 0x6239dbc070cdd196, 0x60fe8a8b6c7d8a9a, -0x4c77b84314bfeda0, 0x0904d07b87779e5e } }, { - { 0xf4322d6648f940b9, 0x06952f0cbd2d0c39, 0x167697ada081f931, 0x6240aacebaf72a6c }, - { 0xb4ce1fd4ddba919c, 0xcf31db3ec74c8daa, 0x2c63cc63ad86cc51, 0x43e2143fbc1dde07 }, - { 0xf834749c5ba295a0, 0xd6947c5bca37d25a, 0x66f13ba7e7c9316a, 0x56bdaf238db40cac } + { -0x0bcdd299b706bf47, 0x06952f0cbd2d0c39, 0x167697ada081f931, 0x6240aacebaf72a6c }, + { -0x4b31e02b22456e64, -0x30ce24c138b37256, 0x2c63cc63ad86cc51, 0x43e2143fbc1dde07 }, + { -0x07cb8b63a45d6a60, -0x296b83a435c82da6, 0x66f13ba7e7c9316a, 0x56bdaf238db40cac } }, { { 0x1310d36cc19d3bb2, 0x062a6bb7622386b9, 0x7c9b8591d7a14f5c, 0x03aa31507e1e5754 }, - { 0x362ab9e3f53533eb, 0x338568d56eb93d40, 0x9e0e14521d5a5572, 0x1d24a86d83741318 }, - { 0xf4ec7648ffd4ce1f, 0xe045eaf054ac8c1c, 0x88d225821d09357c, 0x43b261dc9aeb4859 } + { 0x362ab9e3f53533eb, 0x338568d56eb93d40, -0x61f1ebade2a5aa8e, 0x1d24a86d83741318 }, + { -0x0b1389b7002b31e1, -0x1fba150fab5373e4, -0x772dda7de2f6ca84, 0x43b261dc9aeb4859 } }, { - { 0x19513d8b6c951364, 0x94fe7126000bf47b, 0x028d10ddd54f9567, 0x02b4d5e242940964 }, - { 0xe55b1e1988bb79bb, 0xa09ed07dc17a359d, 0xb02c2ee2603dea33, 0x326055cf5b276bc2 }, - { 0xb4a155cb28d18df2, 0xeacc4646186ce508, 0xc49cf4936c824389, 0x27a6c809ae5d3410 } + { 0x19513d8b6c951364, -0x6b018ed9fff40b85, 0x028d10ddd54f9567, 0x02b4d5e242940964 }, + { -0x1aa4e1e677448645, -0x5f612f823e85ca63, -0x4fd3d11d9fc215cd, 0x326055cf5b276bc2 }, + { -0x4b5eaa34d72e720e, -0x1533b9b9e7931af8, -0x3b630b6c937dbc77, 0x27a6c809ae5d3410 } }, { - { 0xcd2c270ac43d6954, 0xdd4a3e576a66cab2, 0x79fa592469d7036c, 0x221503603d8c2599 }, - { 0x8ba6ebcd1f0db188, 0x37d3d73a675a5be8, 0xf22edfa315f5585a, 0x2cb67174ff60a17e }, - { 0x59eecdf9390be1d0, 0xa9422044728ce3f1, 0x82891c667a94f0f4, 0x7b1df4b73890f436 } + { -0x32d3d8f53bc296ac, -0x22b5c1a89599354e, 0x79fa592469d7036c, 0x221503603d8c2599 }, + { -0x74591432e0f24e78, 0x37d3d73a675a5be8, -0x0dd1205cea0aa7a6, 0x2cb67174ff60a17e }, + { 0x59eecdf9390be1d0, -0x56bddfbb8d731c0f, -0x7d76e399856b0f0c, 0x7b1df4b73890f436 } }, { - { 0x5f2e221807f8f58c, 0xe3555c9fd49409d4, 0xb2aaa88d1fb6a630, 0x68698245d352e03d }, - { 0xe492f2e0b3b2a224, 0x7c6c9e062b551160, 0x15eb8fe20d7f7b0e, 0x61fcef2658fc5992 }, - { 0xdbb15d852a18187a, 0xf3e4aad386ddacd7, 0x44bae2810ff6c482, 0x46cf4c473daf01cf } + { 0x5f2e221807f8f58c, -0x1caaa3602b6bf62c, -0x4d555772e04959d0, 0x68698245d352e03d }, + { -0x1b6d0d1f4c4d5ddc, 0x7c6c9e062b551160, 0x15eb8fe20d7f7b0e, 0x61fcef2658fc5992 }, + { -0x244ea27ad5e7e786, -0x0c1b552c79225329, 0x44bae2810ff6c482, 0x46cf4c473daf01cf } }, { { 0x213c6ea7f1498140, 0x7c1e7ef8392b4854, 0x2488c38c5629ceba, 0x1065aae50d8cc5bb }, @@ -2031,1640 +2031,1640 @@ static const ge_precomp base[64][8] = { }, { { - { 0x7b26e56b9e2d4734, 0xc4c7132b81c61675, 0xef5c9525ec9cde7f, 0x39c80b16e71743ad }, - { 0x7afcd613efa9d697, 0x0cc45aa41c067959, 0xa56fe104c1fada96, 0x3a73b70472e40365 }, - { 0x0f196e0d1b826c68, 0xf71ff0e24960e3db, 0x6113167023b7436c, 0x0cf0ea5877da7282 } + { 0x7b26e56b9e2d4734, -0x3b38ecd47e39e98b, -0x10a36ada13632181, 0x39c80b16e71743ad }, + { 0x7afcd613efa9d697, 0x0cc45aa41c067959, -0x5a901efb3e05256a, 0x3a73b70472e40365 }, + { 0x0f196e0d1b826c68, -0x08e00f1db69f1c25, 0x6113167023b7436c, 0x0cf0ea5877da7282 } }, { - { 0xe332ced43ba6945a, 0xde0b1361e881c05d, 0x1ad40f095e67ed3b, 0x5da8acdab8c63d5d }, - { 0x196c80a4ddd4ccbd, 0x22e6f55d95f2dd9d, 0xc75e33c740d6c71b, 0x7bb51279cb3c042f }, - { 0xc4b6664a3a70159f, 0x76194f0f0a904e14, 0xa5614c39a4096c13, 0x6cd0ff50979feced } + { -0x1ccd312bc4596ba6, -0x21f4ec9e177e3fa3, 0x1ad40f095e67ed3b, 0x5da8acdab8c63d5d }, + { 0x196c80a4ddd4ccbd, 0x22e6f55d95f2dd9d, -0x38a1cc38bf2938e5, 0x7bb51279cb3c042f }, + { -0x3b4999b5c58fea61, 0x76194f0f0a904e14, -0x5a9eb3c65bf693ed, 0x6cd0ff50979feced } }, { - { 0x7fecfabdb04ba18e, 0xd0fc7bfc3bddbcf7, 0xa41d486e057a131c, 0x641a4391f2223a61 }, - { 0xc0e067e78f4428ac, 0x14835ab0a61135e3, 0xf21d14f338062935, 0x6390a4c8df04849c }, - { 0xc5c6b95aa606a8db, 0x914b7f9eb06825f1, 0x2a731f6b44fc9eff, 0x30ddf38562705cfc } + { 0x7fecfabdb04ba18e, -0x2f038403c4224309, -0x5be2b791fa85ece4, 0x641a4391f2223a61 }, + { -0x3f1f981870bbd754, 0x14835ab0a61135e3, -0x0de2eb0cc7f9d6cb, 0x6390a4c8df04849c }, + { -0x3a3946a559f95725, -0x6eb480614f97da0f, 0x2a731f6b44fc9eff, 0x30ddf38562705cfc } }, { - { 0x33bef2bd68bcd52c, 0xc649dbb069482ef2, 0xb5b6ee0c41cb1aee, 0x5c294d270212a7e5 }, - { 0x4e3dcbdad1bff7f9, 0xc9118e8220645717, 0xbacccebc0f189d56, 0x1b4822e9d4467668 }, - { 0xab360a7f25563781, 0x2512228a480f7958, 0xc75d05276114b4e3, 0x222d9625d976fe2a } + { 0x33bef2bd68bcd52c, -0x39b6244f96b7d10e, -0x4a4911f3be34e512, 0x5c294d270212a7e5 }, + { 0x4e3dcbdad1bff7f9, -0x36ee717ddf9ba8e9, -0x45333143f0e762aa, 0x1b4822e9d4467668 }, + { -0x54c9f580daa9c87f, 0x2512228a480f7958, -0x38a2fad89eeb4b1d, 0x222d9625d976fe2a } }, { - { 0x0f94be7e0a344f85, 0xeb2faa8c87f22c38, 0x9ce1e75e4ee16f0f, 0x43e64e5418a08dea }, - { 0x1c717f85b372ace1, 0x81930e694638bf18, 0x239cad056bc08b58, 0x0b34271c87f8fff4 }, - { 0x8155e2521a35ce63, 0xbe100d4df912028e, 0xbff80bf8a57ddcec, 0x57342dc96d6bc6e4 } + { 0x0f94be7e0a344f85, -0x14d05573780dd3c8, -0x631e18a1b11e90f1, 0x43e64e5418a08dea }, + { 0x1c717f85b372ace1, -0x7e6cf196b9c740e8, 0x239cad056bc08b58, 0x0b34271c87f8fff4 }, + { -0x7eaa1dade5ca319d, -0x41eff2b206edfd72, -0x4007f4075a822314, 0x57342dc96d6bc6e4 } }, { - { 0xf3c3bcb71e707bf6, 0x351d9b8c7291a762, 0x00502e6edad69a33, 0x522f521f1ec8807f }, - { 0xefeef065c8ce5998, 0xbf029510b5cbeaa2, 0x8c64a10620b7c458, 0x35134fb231c24855 }, - { 0x272c1f46f9a3902b, 0xc91ba3b799657bcc, 0xae614b304f8a1c0e, 0x7afcaad70b99017b } + { -0x0c3c4348e18f840a, 0x351d9b8c7291a762, 0x00502e6edad69a33, 0x522f521f1ec8807f }, + { -0x10110f9a3731a668, -0x40fd6aef4a34155e, -0x739b5ef9df483ba8, 0x35134fb231c24855 }, + { 0x272c1f46f9a3902b, -0x36e45c48669a8434, -0x519eb4cfb075e3f2, 0x7afcaad70b99017b } }, { - { 0xa88141ecef842b6b, 0x55e7b14797abe6c5, 0x8c748f9703784ffe, 0x5b50a1f7afcd00b7 }, - { 0xc25ded54a4b8be41, 0x902d13e11bb0e2dd, 0x41f43233cde82ab2, 0x1085faa5c3aae7cb }, - { 0x9b840f66f1361315, 0x18462242701003e9, 0x65ed45fae4a25080, 0x0a2862393fda7320 } + { -0x577ebe13107bd495, 0x55e7b14797abe6c5, -0x738b7068fc87b002, 0x5b50a1f7afcd00b7 }, + { -0x3da212ab5b4741bf, -0x6fd2ec1ee44f1d23, 0x41f43233cde82ab2, 0x1085faa5c3aae7cb }, + { -0x647bf0990ec9eceb, 0x18462242701003e9, 0x65ed45fae4a25080, 0x0a2862393fda7320 } }, { - { 0x960e737b6ecb9d17, 0xfaf24948d67ceae1, 0x37e7a9b4d55e1b89, 0x5cb7173cb46c59eb }, + { -0x69f18c84913462e9, -0x050db6b72983151f, 0x37e7a9b4d55e1b89, 0x5cb7173cb46c59eb }, { 0x46ab13c8347cbc9d, 0x3849e8d499c12383, 0x4cea314087d64ac9, 0x1f354134b1a29ee7 }, - { 0x4a89e68b82b7abf0, 0xf41cd9279ba6b7b9, 0x16e6c210e18d876f, 0x7cacdb0f7f1b09c6 } + { 0x4a89e68b82b7abf0, -0x0be326d864594847, 0x16e6c210e18d876f, 0x7cacdb0f7f1b09c6 } }, }, { { - { 0xe1014434dcc5caed, 0x47ed5d963c84fb33, 0x70019576ed86a0e7, 0x25b2697bd267f9e4 }, - { 0x9062b2e0d91a78bc, 0x47c9889cc8509667, 0x9df54a66405070b8, 0x7369e6a92493a1bf }, - { 0x9d673ffb13986864, 0x3ca5fbd9415dc7b8, 0xe04ecc3bdf273b5e, 0x1420683db54e4cd2 } + { -0x1efebbcb233a3513, 0x47ed5d963c84fb33, 0x70019576ed86a0e7, 0x25b2697bd267f9e4 }, + { -0x6f9d4d1f26e58744, 0x47c9889cc8509667, -0x620ab599bfaf8f48, 0x7369e6a92493a1bf }, + { -0x6298c004ec67979c, 0x3ca5fbd9415dc7b8, -0x1fb133c420d8c4a2, 0x1420683db54e4cd2 } }, { - { 0x34eebb6fc1cc5ad0, 0x6a1b0ce99646ac8b, 0xd3b0da49a66bde53, 0x31e83b4161d081c1 }, - { 0xb478bd1e249dd197, 0x620c35005e58c102, 0xfb02d32fccbaac5c, 0x60b63bebf508a72d }, - { 0x97e8c7129e062b4f, 0x49e48f4f29320ad8, 0x5bece14b6f18683f, 0x55cf1eb62d550317 } + { 0x34eebb6fc1cc5ad0, 0x6a1b0ce99646ac8b, -0x2c4f25b6599421ad, 0x31e83b4161d081c1 }, + { -0x4b8742e1db622e69, 0x620c35005e58c102, -0x04fd2cd0334553a4, 0x60b63bebf508a72d }, + { -0x681738ed61f9d4b1, 0x49e48f4f29320ad8, 0x5bece14b6f18683f, 0x55cf1eb62d550317 } }, { - { 0x3076b5e37df58c52, 0xd73ab9dde799cc36, 0xbd831ce34913ee20, 0x1a56fbaa62ba0133 }, - { 0x5879101065c23d58, 0x8b9d086d5094819c, 0xe2402fa912c55fa7, 0x669a6564570891d4 }, - { 0x943e6b505c9dc9ec, 0x302557bba77c371a, 0x9873ae5641347651, 0x13c4836799c58a5c } + { 0x3076b5e37df58c52, -0x28c54622186633ca, -0x427ce31cb6ec11e0, 0x1a56fbaa62ba0133 }, + { 0x5879101065c23d58, -0x7462f792af6b7e64, -0x1dbfd056ed3aa059, 0x669a6564570891d4 }, + { -0x6bc194afa3623614, 0x302557bba77c371a, -0x678c51a9becb89af, 0x13c4836799c58a5c } }, { - { 0xc4dcfb6a5d8bd080, 0xdeebc4ec571a4842, 0xd4b2e883b8e55365, 0x50bdc87dc8e5b827 }, - { 0x423a5d465ab3e1b9, 0xfc13c187c7f13f61, 0x19f83664ecb5b9b6, 0x66f80c93a637b607 }, + { -0x3b230495a2742f80, -0x21143b13a8e5b7be, -0x2b4d177c471aac9b, 0x50bdc87dc8e5b827 }, + { 0x423a5d465ab3e1b9, -0x03ec3e78380ec09f, 0x19f83664ecb5b9b6, 0x66f80c93a637b607 }, { 0x606d37836edfe111, 0x32353e15f011abd9, 0x64b03ac325b73b96, 0x1dd56444725fd5ae } }, { - { 0xc297e60008bac89a, 0x7d4cea11eae1c3e0, 0xf3e38be19fe7977c, 0x3a3a450f63a305cd }, - { 0x8fa47ff83362127d, 0xbc9f6ac471cd7c15, 0x6e71454349220c8b, 0x0e645912219f732e }, - { 0x078f2f31d8394627, 0x389d3183de94a510, 0xd1e36c6d17996f80, 0x318c8d9393a9a87b } + { -0x3d6819fff7453766, 0x7d4cea11eae1c3e0, -0x0c1c741e60186884, 0x3a3a450f63a305cd }, + { -0x705b8007cc9ded83, -0x4360953b8e3283eb, 0x6e71454349220c8b, 0x0e645912219f732e }, + { 0x078f2f31d8394627, 0x389d3183de94a510, -0x2e1c9392e8669080, 0x318c8d9393a9a87b } }, { - { 0x5d669e29ab1dd398, 0xfc921658342d9e3b, 0x55851dfdf35973cd, 0x509a41c325950af6 }, - { 0xf2745d032afffe19, 0x0c9f3c497f24db66, 0xbc98d3e3ba8598ef, 0x224c7c679a1d5314 }, - { 0xbdc06edca6f925e9, 0x793ef3f4641b1f33, 0x82ec12809d833e89, 0x05bff02328a11389 } + { 0x5d669e29ab1dd398, -0x036de9a7cbd261c5, 0x55851dfdf35973cd, 0x509a41c325950af6 }, + { -0x0d8ba2fcd50001e7, 0x0c9f3c497f24db66, -0x43672c1c457a6711, 0x224c7c679a1d5314 }, + { -0x423f91235906da17, 0x793ef3f4641b1f33, -0x7d13ed7f627cc177, 0x05bff02328a11389 } }, { { 0x6881a0dd0dc512e4, 0x4fe70dc844a5fafe, 0x1f748e6b8f4a5240, 0x576277cdee01a3ea }, - { 0x3632137023cae00b, 0x544acf0ad1accf59, 0x96741049d21a1c88, 0x780b8cc3fa2a44a7 }, - { 0x1ef38abc234f305f, 0x9a577fbd1405de08, 0x5e82a51434e62a0d, 0x5ff418726271b7a1 } + { 0x3632137023cae00b, 0x544acf0ad1accf59, -0x698befb62de5e378, 0x780b8cc3fa2a44a7 }, + { 0x1ef38abc234f305f, -0x65a88042ebfa21f8, 0x5e82a51434e62a0d, 0x5ff418726271b7a1 } }, { - { 0xe5db47e813b69540, 0xf35d2a3b432610e1, 0xac1f26e938781276, 0x29d4db8ca0a0cb69 }, - { 0x398e080c1789db9d, 0xa7602025f3e778f5, 0xfa98894c06bd035d, 0x106a03dc25a966be }, - { 0xd9ad0aaf333353d0, 0x38669da5acd309e5, 0x3c57658ac888f7f0, 0x4ab38a51052cbefa } + { -0x1a24b817ec496ac0, -0x0ca2d5c4bcd9ef1f, -0x53e0d916c787ed8a, 0x29d4db8ca0a0cb69 }, + { 0x398e080c1789db9d, -0x589fdfda0c18870b, -0x056776b3f942fca3, 0x106a03dc25a966be }, + { -0x2652f550ccccac30, 0x38669da5acd309e5, 0x3c57658ac888f7f0, 0x4ab38a51052cbefa } }, }, { { - { 0xf68fe2e8809de054, 0xe3bc096a9c82bad1, 0x076353d40aadbf45, 0x7b9b1fb5dea1959e }, - { 0xdfdacbee4324c0e9, 0x054442883f955bb7, 0xdef7aaa8ea31609f, 0x68aee70642287cff }, - { 0xf01cc8f17471cc0c, 0x95242e37579082bb, 0x27776093d3e46b5f, 0x2d13d55a28bd85fb } + { -0x09701d177f621fac, -0x1c43f695637d452f, 0x076353d40aadbf45, 0x7b9b1fb5dea1959e }, + { -0x20253411bcdb3f17, 0x054442883f955bb7, -0x2108555715ce9f61, 0x68aee70642287cff }, + { -0x0fe3370e8b8e33f4, -0x6adbd1c8a86f7d45, 0x27776093d3e46b5f, 0x2d13d55a28bd85fb } }, { - { 0xbf019cce7aee7a52, 0xa8ded2b6e454ead3, 0x3c619f0b87a8bb19, 0x3619b5d7560916d8 }, - { 0xfac5d2065b35b8da, 0xa8da8a9a85624bb7, 0xccd2ca913d21cd0f, 0x6b8341ee8bf90d58 }, - { 0x3579f26b0282c4b2, 0x64d592f24fafefae, 0xb7cded7b28c8c7c0, 0x6a927b6b7173a8d7 } + { -0x40fe6331851185ae, -0x57212d491bab152d, 0x3c619f0b87a8bb19, 0x3619b5d7560916d8 }, + { -0x053a2df9a4ca4726, -0x572575657a9db449, -0x332d356ec2de32f1, 0x6b8341ee8bf90d58 }, + { 0x3579f26b0282c4b2, 0x64d592f24fafefae, -0x48321284d7373840, 0x6a927b6b7173a8d7 } }, { - { 0x8d7040863ece88eb, 0xf0e307a980eec08c, 0xac2250610d788fda, 0x056d92a43a0d478d }, - { 0x1f6db24f986e4656, 0x1021c02ed1e9105b, 0xf8ff3fff2cc0a375, 0x1d2a6bf8c6c82592 }, + { -0x728fbf79c1317715, -0x0f1cf8567f113f74, -0x53ddaf9ef2877026, 0x056d92a43a0d478d }, + { 0x1f6db24f986e4656, 0x1021c02ed1e9105b, -0x0700c000d33f5c8b, 0x1d2a6bf8c6c82592 }, { 0x1b05a196fc3da5a1, 0x77d7a8c243b59ed0, 0x06da3d6297d17918, 0x66fbb494f12353f7 } }, { - { 0xd6d70996f12309d6, 0xdbfb2385e9c3d539, 0x46d602b0f7552411, 0x270a0b0557843e0c }, - { 0x751a50b9d85c0fb8, 0xd1afdc258bcf097b, 0x2f16a6a38309a969, 0x14ddff9ee5b00659 }, - { 0x61ff0640a7862bcc, 0x81cac09a5f11abfe, 0x9047830455d12abb, 0x19a4bde1945ae873 } + { -0x2928f6690edcf62a, -0x2404dc7a163c2ac7, 0x46d602b0f7552411, 0x270a0b0557843e0c }, + { 0x751a50b9d85c0fb8, -0x2e5023da7430f685, 0x2f16a6a38309a969, 0x14ddff9ee5b00659 }, + { 0x61ff0640a7862bcc, -0x7e353f65a0ee5402, -0x6fb87cfbaa2ed545, 0x19a4bde1945ae873 } }, { { 0x40c709dec076c49f, 0x657bfaf27f3e53f6, 0x40662331eca042c4, 0x14b375487eb4df04 }, - { 0x9b9f26f520a6200a, 0x64804443cf13eaf8, 0x8a63673f8631edd3, 0x72bbbce11ed39dc1 }, - { 0xae853c94ab66dc47, 0xeb62343edf762d6e, 0xf08e0e186fb2f7d1, 0x4f0b1c02700ab37a } + { -0x6460d90adf59dff6, 0x64804443cf13eaf8, -0x759c98c079ce122d, 0x72bbbce11ed39dc1 }, + { -0x517ac36b549923b9, -0x149dcbc12089d292, -0x0f71f1e7904d082f, 0x4f0b1c02700ab37a } }, { - { 0x79fd21ccc1b2e23f, 0x4ae7c281453df52a, 0xc8172ec9d151486b, 0x68abe9443e0a7534 }, - { 0xe1706787d81951fa, 0xa10a2c8eb290c77b, 0xe7382fa03ed66773, 0x0a4d84710bcc4b54 }, - { 0xda12c6c407831dcb, 0x0da230d74d5c510d, 0x4ab1531e6bd404e1, 0x4106b166bcf440ef } + { 0x79fd21ccc1b2e23f, 0x4ae7c281453df52a, -0x37e8d1362eaeb795, 0x68abe9443e0a7534 }, + { -0x1e8f987827e6ae06, -0x5ef5d3714d6f3885, -0x18c7d05fc129988d, 0x0a4d84710bcc4b54 }, + { -0x25ed393bf87ce235, 0x0da230d74d5c510d, 0x4ab1531e6bd404e1, 0x4106b166bcf440ef } }, { - { 0xa485ccd539e4ecf2, 0x5aa3f3ad0555bab5, 0x145e3439937df82d, 0x1238b51e1214283f }, - { 0x02e57a421cd23668, 0x4ad9fb5d0eaef6fd, 0x954e6727b1244480, 0x7f792f9d2699f331 }, - { 0x0b886b925fd4d924, 0x60906f7a3626a80d, 0xecd367b4b98abd12, 0x2876beb1def344cf } + { -0x5b7a332ac61b130e, 0x5aa3f3ad0555bab5, 0x145e3439937df82d, 0x1238b51e1214283f }, + { 0x02e57a421cd23668, 0x4ad9fb5d0eaef6fd, -0x6ab198d84edbbb80, 0x7f792f9d2699f331 }, + { 0x0b886b925fd4d924, 0x60906f7a3626a80d, -0x132c984b467542ee, 0x2876beb1def344cf } }, { - { 0xd594b3333a8a85f8, 0x4ea37689e78d7d58, 0x73bf9f455e8e351f, 0x5507d7d2bc41ebb4 }, - { 0xdc84e93563144691, 0x632fe8a0d61f23f4, 0x4caa800612a9a8d5, 0x48f9dbfa0e9918d3 }, - { 0x1ceb2903299572fc, 0x7c8ccaa29502d0ee, 0x91bfa43411cce67b, 0x5784481964a831e7 } + { -0x2a6b4cccc5757a08, 0x4ea37689e78d7d58, 0x73bf9f455e8e351f, 0x5507d7d2bc41ebb4 }, + { -0x237b16ca9cebb96f, 0x632fe8a0d61f23f4, 0x4caa800612a9a8d5, 0x48f9dbfa0e9918d3 }, + { 0x1ceb2903299572fc, 0x7c8ccaa29502d0ee, -0x6e405bcbee331985, 0x5784481964a831e7 } }, }, { { - { 0xd6cfd1ef5fddc09c, 0xe82b3efdf7575dce, 0x25d56b5d201634c2, 0x3041c6bb04ed2b9b }, - { 0xda7c2b256768d593, 0x98c1c0574422ca13, 0xf1a80bd5ca0ace1d, 0x29cdd1adc088a690 }, - { 0x0ff2f2f9d956e148, 0xade797759f356b2e, 0x1a4698bb5f6c025c, 0x104bbd6814049a7b } + { -0x29302e10a0223f64, -0x17d4c10208a8a232, 0x25d56b5d201634c2, 0x3041c6bb04ed2b9b }, + { -0x2583d4da98972a6d, -0x673e3fa8bbdd35ed, -0x0e57f42a35f531e3, 0x29cdd1adc088a690 }, + { 0x0ff2f2f9d956e148, -0x5218688a60ca94d2, 0x1a4698bb5f6c025c, 0x104bbd6814049a7b } }, { - { 0xa95d9a5fd67ff163, 0xe92be69d4cc75681, 0xb7f8024cde20f257, 0x204f2a20fb072df5 }, + { -0x56a265a029800e9d, -0x16d41962b338a97f, -0x4807fdb321df0da9, 0x204f2a20fb072df5 }, { 0x51f0fd3168f1ed67, 0x2c811dcdd86f3bc2, 0x44dc5c4304d2f2de, 0x5be8cc57092a7149 }, - { 0xc8143b3d30ebb079, 0x7589155abd652e30, 0x653c3c318f6d5c31, 0x2570fb17c279161f } + { -0x37ebc4c2cf144f87, 0x7589155abd652e30, 0x653c3c318f6d5c31, 0x2570fb17c279161f } }, { - { 0x192ea9550bb8245a, 0xc8e6fba88f9050d1, 0x7986ea2d88a4c935, 0x241c5f91de018668 }, - { 0x3efa367f2cb61575, 0xf5f96f761cd6026c, 0xe8c7142a65b52562, 0x3dcb65ea53030acd }, - { 0x28d8172940de6caa, 0x8fbf2cf022d9733a, 0x16d7fcdd235b01d1, 0x08420edd5fcdf0e5 } + { 0x192ea9550bb8245a, -0x37190457706faf2f, 0x7986ea2d88a4c935, 0x241c5f91de018668 }, + { 0x3efa367f2cb61575, -0x0a069089e329fd94, -0x1738ebd59a4ada9e, 0x3dcb65ea53030acd }, + { 0x28d8172940de6caa, -0x7040d30fdd268cc6, 0x16d7fcdd235b01d1, 0x08420edd5fcdf0e5 } }, { - { 0x0358c34e04f410ce, 0xb6135b5a276e0685, 0x5d9670c7ebb91521, 0x04d654f321db889c }, - { 0xcdff20ab8362fa4a, 0x57e118d4e21a3e6e, 0xe3179617fc39e62b, 0x0d9a53efbc1769fd }, + { 0x0358c34e04f410ce, -0x49eca4a5d891f97b, 0x5d9670c7ebb91521, 0x04d654f321db889c }, + { -0x3200df547c9d05b6, 0x57e118d4e21a3e6e, -0x1ce869e803c619d5, 0x0d9a53efbc1769fd }, { 0x5e7dc116ddbdb5d5, 0x2954deb68da5dd2d, 0x1cb608173334a292, 0x4a7a4f2618991ad7 } }, { - { 0x24c3b291af372a4b, 0x93da8270718147f2, 0xdd84856486899ef2, 0x4a96314223e0ee33 }, - { 0xf4a718025fb15f95, 0x3df65f346b5c1b8f, 0xcdfcf08500e01112, 0x11b50c4cddd31848 }, - { 0xa6e8274408a4ffd6, 0x738e177e9c1576d9, 0x773348b63d02b3f2, 0x4f4bce4dce6bcc51 } + { 0x24c3b291af372a4b, -0x6c257d8f8e7eb80e, -0x227b7a9b7976610e, 0x4a96314223e0ee33 }, + { -0x0b58e7fda04ea06b, 0x3df65f346b5c1b8f, -0x32030f7aff1feeee, 0x11b50c4cddd31848 }, + { -0x5917d8bbf75b002a, 0x738e177e9c1576d9, 0x773348b63d02b3f2, 0x4f4bce4dce6bcc51 } }, { - { 0x30e2616ec49d0b6f, 0xe456718fcaec2317, 0x48eb409bf26b4fa6, 0x3042cee561595f37 }, - { 0xa71fce5ae2242584, 0x26ea725692f58a9e, 0xd21a09d71cea3cf4, 0x73fcdd14b71c01e6 }, - { 0x427e7079449bac41, 0x855ae36dbce2310a, 0x4cae76215f841a7c, 0x389e740c9a9ce1d6 } + { 0x30e2616ec49d0b6f, -0x1ba98e703513dce9, 0x48eb409bf26b4fa6, 0x3042cee561595f37 }, + { -0x58e031a51ddbda7c, 0x26ea725692f58a9e, -0x2de5f628e315c30c, 0x73fcdd14b71c01e6 }, + { 0x427e7079449bac41, -0x7aa51c92431dcef6, 0x4cae76215f841a7c, 0x389e740c9a9ce1d6 } }, { - { 0xc9bd78f6570eac28, 0xe55b0b3227919ce1, 0x65fc3eaba19b91ed, 0x25c425e5d6263690 }, - { 0x64fcb3ae34dcb9ce, 0x97500323e348d0ad, 0x45b3f07d62c6381b, 0x61545379465a6788 }, - { 0x3f3e06a6f1d7de6e, 0x3ef976278e062308, 0x8c14f6264e8a6c77, 0x6539a08915484759 } + { -0x36428709a8f153d8, -0x1aa4f4cdd86e631f, 0x65fc3eaba19b91ed, 0x25c425e5d6263690 }, + { 0x64fcb3ae34dcb9ce, -0x68affcdc1cb72f53, 0x45b3f07d62c6381b, 0x61545379465a6788 }, + { 0x3f3e06a6f1d7de6e, 0x3ef976278e062308, -0x73eb09d9b1759389, 0x6539a08915484759 } }, { - { 0xddc4dbd414bb4a19, 0x19b2bc3c98424f8e, 0x48a89fd736ca7169, 0x0f65320ef019bd90 }, - { 0xe9d21f74c3d2f773, 0xc150544125c46845, 0x624e5ce8f9b99e33, 0x11c5e4aac5cd186c }, - { 0xd486d1b1cafde0c6, 0x4f3fe6e3163b5181, 0x59a8af0dfaf2939a, 0x4cabc7bdec33072a } + { -0x223b242beb44b5e7, 0x19b2bc3c98424f8e, 0x48a89fd736ca7169, 0x0f65320ef019bd90 }, + { -0x162de08b3c2d088d, -0x3eafabbeda3b97bb, 0x624e5ce8f9b99e33, 0x11c5e4aac5cd186c }, + { -0x2b792e4e35021f3a, 0x4f3fe6e3163b5181, 0x59a8af0dfaf2939a, 0x4cabc7bdec33072a } }, }, { { - { 0xf7c0a19c1a54a044, 0x4a1c5e2477bd9fbb, 0xa6e3ca115af22972, 0x1819bb953f2e9e0d }, - { 0x16faa8fb532f7428, 0xdbd42ea046a4e272, 0x5337653b8b9ea480, 0x4065947223973f03 }, - { 0x498fbb795e042e84, 0x7d0dd89a7698b714, 0x8bfb0ba427fe6295, 0x36ba82e721200524 } + { -0x083f5e63e5ab5fbc, 0x4a1c5e2477bd9fbb, -0x591c35eea50dd68e, 0x1819bb953f2e9e0d }, + { 0x16faa8fb532f7428, -0x242bd15fb95b1d8e, 0x5337653b8b9ea480, 0x4065947223973f03 }, + { 0x498fbb795e042e84, 0x7d0dd89a7698b714, -0x7404f45bd8019d6b, 0x36ba82e721200524 } }, { - { 0xc8d69d0a57274ed5, 0x45ba803260804b17, 0xdf3cda102255dfac, 0x77d221232709b339 }, - { 0xd60ecbb74245ec41, 0xfd9be89e34348716, 0xc9240afee42284de, 0x4472f648d0531db4 }, - { 0x498a6d7064ad94d8, 0xa5b5c8fd9af62263, 0x8ca8ed0545c141f4, 0x2c63bec3662d358c } + { -0x372962f5a8d8b12b, 0x45ba803260804b17, -0x20c325efddaa2054, 0x77d221232709b339 }, + { -0x29f13448bdba13bf, -0x02641761cbcb78ea, -0x36dbf5011bdd7b22, 0x4472f648d0531db4 }, + { 0x498a6d7064ad94d8, -0x5a4a37026509dd9d, -0x735712faba3ebe0c, 0x2c63bec3662d358c } }, { - { 0x9a518b3a8586f8bf, 0x9ee71af6cbb196f0, 0xaa0625e6a2385cf2, 0x1deb2176ddd7c8d1 }, - { 0x7fe60d8bea787955, 0xb9dc117eb5f401b7, 0x91c7c09a19355cce, 0x22692ef59442bedf }, - { 0x8563d19a2066cf6c, 0x401bfd8c4dcc7cd7, 0xd976a6becd0d8f62, 0x67cfd773a278b05e } + { -0x65ae74c57a790741, -0x6118e509344e6910, -0x55f9da195dc7a30e, 0x1deb2176ddd7c8d1 }, + { 0x7fe60d8bea787955, -0x4623ee814a0bfe49, -0x6e383f65e6caa332, 0x22692ef59442bedf }, + { -0x7a9c2e65df993094, 0x401bfd8c4dcc7cd7, -0x2689594132f2709e, 0x67cfd773a278b05e } }, { - { 0x2d5fa9855a4e586a, 0x65f8f7a449beab7e, 0xaa074dddf21d33d3, 0x185cba721bcb9dee }, - { 0x8dec31faef3ee475, 0x99dbff8a9e22fd92, 0x512d11594e26cab1, 0x0cde561eec4310b9 }, - { 0x93869da3f4e3cb41, 0xbf0392f540f7977e, 0x026204fcd0463b83, 0x3ec91a769eec6eed } + { 0x2d5fa9855a4e586a, 0x65f8f7a449beab7e, -0x55f8b2220de2cc2d, 0x185cba721bcb9dee }, + { -0x7213ce0510c11b8b, -0x6624007561dd026e, 0x512d11594e26cab1, 0x0cde561eec4310b9 }, + { -0x6c79625c0b1c34bf, -0x40fc6d0abf086882, 0x026204fcd0463b83, 0x3ec91a769eec6eed } }, { - { 0x0fad2fb7b0a3402f, 0x46615ecbfb69f4a8, 0xf745bcc8c5f8eaa6, 0x7a5fa8794a94e896 }, - { 0x1e9df75bf78166ad, 0x4dfda838eb0cd7af, 0xba002ed8c1eaf988, 0x13fedb3e11f33cfc }, - { 0x52958faa13cd67a1, 0x965ee0818bdbb517, 0x16e58daa2e8845b3, 0x357d397d5499da8f } + { 0x0fad2fb7b0a3402f, 0x46615ecbfb69f4a8, -0x08ba43373a07155a, 0x7a5fa8794a94e896 }, + { 0x1e9df75bf78166ad, 0x4dfda838eb0cd7af, -0x45ffd1273e150678, 0x13fedb3e11f33cfc }, + { 0x52958faa13cd67a1, -0x69a11f7e74244ae9, 0x16e58daa2e8845b3, 0x357d397d5499da8f } }, { { 0x481dacb4194bfbf8, 0x4d77e3f1bae58299, 0x1ef4612e7d1372a0, 0x3a8d867e70ff69e1 }, - { 0x1ebfa05fb0bace6c, 0xc934620c1caf9a1e, 0xcc771cc41d82b61a, 0x2d94a16aa5f74fec }, - { 0x6f58cd5d55aff958, 0xba3eaa5c75567721, 0x75c123999165227d, 0x69be1343c2f2b35e } + { 0x1ebfa05fb0bace6c, -0x36cb9df3e35065e2, -0x3388e33be27d49e6, 0x2d94a16aa5f74fec }, + { 0x6f58cd5d55aff958, -0x45c155a38aa988df, 0x75c123999165227d, 0x69be1343c2f2b35e } }, { - { 0x82bbbdac684b8de3, 0xa2f4c7d03fca0718, 0x337f92fbe096aaa8, 0x200d4d8c63587376 }, + { -0x7d44425397b4721d, -0x5d0b382fc035f8e8, 0x337f92fbe096aaa8, 0x200d4d8c63587376 }, { 0x0e091d5ee197c92a, 0x4f51019f2945119f, 0x143679b9f034e99c, 0x7d88112e4d24c696 }, - { 0x208aed4b4893b32b, 0x3efbf23ebe59b964, 0xd762deb0dba5e507, 0x69607bd681bd9d94 } + { 0x208aed4b4893b32b, 0x3efbf23ebe59b964, -0x289d214f245a1af9, 0x69607bd681bd9d94 } }, { - { 0x3b7f3bd49323a902, 0x7c21b5566b2c6e53, 0xe5ba8ff53a7852a7, 0x28bc77a5838ece00 }, - { 0xf6be021068de1ce1, 0xe8d518e70edcbc1f, 0xe3effdd01b5505a5, 0x35f63353d3ec3fd0 }, + { 0x3b7f3bd49323a902, 0x7c21b5566b2c6e53, -0x1a45700ac587ad59, 0x28bc77a5838ece00 }, + { -0x0941fdef9721e31f, -0x172ae718f12343e1, -0x1c10022fe4aafa5b, 0x35f63353d3ec3fd0 }, { 0x63ba78a8e25d8036, 0x63651e0094333490, 0x48d82f20288ce532, 0x3a31abfa36b57524 } }, }, { { - { 0xc08f788f3f78d289, 0xfe30a72ca1404d9f, 0xf2778bfccf65cc9d, 0x7ee498165acb2021 }, - { 0x239e9624089c0a2e, 0xc748c4c03afe4738, 0x17dbed2a764fa12a, 0x639b93f0321c8582 }, - { 0x7bd508e39111a1c3, 0x2b2b90d480907489, 0xe7d2aec2ae72fd19, 0x0edf493c85b602a6 } + { -0x3f708770c0872d77, -0x01cf58d35ebfb261, -0x0d887403309a3363, 0x7ee498165acb2021 }, + { 0x239e9624089c0a2e, -0x38b73b3fc501b8c8, 0x17dbed2a764fa12a, 0x639b93f0321c8582 }, + { 0x7bd508e39111a1c3, 0x2b2b90d480907489, -0x182d513d518d02e7, 0x0edf493c85b602a6 } }, { - { 0x6767c4d284764113, 0xa090403ff7f5f835, 0x1c8fcffacae6bede, 0x04c00c54d1dfa369 }, - { 0xaecc8158599b5a68, 0xea574f0febade20e, 0x4fe41d7422b67f07, 0x403b92e3019d4fb4 }, - { 0x4dc22f818b465cf8, 0x71a0f35a1480eff8, 0xaee8bfad04c7d657, 0x355bb12ab26176f4 } + { 0x6767c4d284764113, -0x5f6fbfc0080a07cb, 0x1c8fcffacae6bede, 0x04c00c54d1dfa369 }, + { -0x51337ea7a664a598, -0x15a8b0f014521df2, 0x4fe41d7422b67f07, 0x403b92e3019d4fb4 }, + { 0x4dc22f818b465cf8, 0x71a0f35a1480eff8, -0x51174052fb3829a9, 0x355bb12ab26176f4 } }, { - { 0xa301dac75a8c7318, 0xed90039db3ceaa11, 0x6f077cbf3bae3f2d, 0x7518eaf8e052ad8e }, - { 0xa71e64cc7493bbf4, 0xe5bd84d9eca3b0c3, 0x0a6bc50cfa05e785, 0x0f9b8132182ec312 }, - { 0xa48859c41b7f6c32, 0x0f2d60bcf4383298, 0x1815a929c9b1d1d9, 0x47c3871bbb1755c4 } + { -0x5cfe2538a5738ce8, -0x126ffc624c3155ef, 0x6f077cbf3bae3f2d, 0x7518eaf8e052ad8e }, + { -0x58e19b338b6c440c, -0x1a427b26135c4f3d, 0x0a6bc50cfa05e785, 0x0f9b8132182ec312 }, + { -0x5b77a63be48093ce, 0x0f2d60bcf4383298, 0x1815a929c9b1d1d9, 0x47c3871bbb1755c4 } }, { - { 0xfbe65d50c85066b0, 0x62ecc4b0b3a299b0, 0xe53754ea441ae8e0, 0x08fea02ce8d48d5f }, - { 0x5144539771ec4f48, 0xf805b17dc98c5d6e, 0xf762c11a47c3c66b, 0x00b89b85764699dc }, - { 0x824ddd7668deead0, 0xc86445204b685d23, 0xb514cfcd5d89d665, 0x473829a74f75d537 } + { -0x0419a2af37af9950, 0x62ecc4b0b3a299b0, -0x1ac8ab15bbe51720, 0x08fea02ce8d48d5f }, + { 0x5144539771ec4f48, -0x07fa4e823673a292, -0x089d3ee5b83c3995, 0x00b89b85764699dc }, + { -0x7db2228997211530, -0x379bbadfb497a2dd, -0x4aeb3032a276299b, 0x473829a74f75d537 } }, { { 0x23d9533aad3902c9, 0x64c2ddceef03588f, 0x15257390cfe12fb4, 0x6c668b4d44e4d390 }, - { 0x82d2da754679c418, 0xe63bd7d8b2618df0, 0x355eef24ac47eb0a, 0x2078684c4833c6b4 }, - { 0x3b48cf217a78820c, 0xf76a0ab281273e97, 0xa96c65a78c8eed7b, 0x7411a6054f8a433f } + { -0x7d2d258ab9863be8, -0x19c428274d9e7210, 0x355eef24ac47eb0a, 0x2078684c4833c6b4 }, + { 0x3b48cf217a78820c, -0x0895f54d7ed8c169, -0x56939a5873711285, 0x7411a6054f8a433f } }, { - { 0x579ae53d18b175b4, 0x68713159f392a102, 0x8455ecba1eef35f5, 0x1ec9a872458c398f }, - { 0x4d659d32b99dc86d, 0x044cdc75603af115, 0xb34c712cdcc2e488, 0x7c136574fb8134ff }, - { 0xb8e6a4d400a2509b, 0x9b81d7020bc882b4, 0x57e7cc9bf1957561, 0x3add88a5c7cd6460 } + { 0x579ae53d18b175b4, 0x68713159f392a102, -0x7baa1345e110ca0b, 0x1ec9a872458c398f }, + { 0x4d659d32b99dc86d, 0x044cdc75603af115, -0x4cb38ed3233d1b78, 0x7c136574fb8134ff }, + { -0x47195b2bff5daf65, -0x647e28fdf4377d4c, 0x57e7cc9bf1957561, 0x3add88a5c7cd6460 } }, { - { 0x85c298d459393046, 0x8f7e35985ff659ec, 0x1d2ca22af2f66e3a, 0x61ba1131a406a720 }, - { 0xab895770b635dcf2, 0x02dfef6cf66c1fbc, 0x85530268beb6d187, 0x249929fccc879e74 }, - { 0xa3d0a0f116959029, 0x023b6b6cba7ebd89, 0x7bf15a3e26783307, 0x5620310cbbd8ece7 } + { -0x7a3d672ba6c6cfba, -0x7081ca67a009a614, 0x1d2ca22af2f66e3a, 0x61ba1131a406a720 }, + { -0x5476a88f49ca230e, 0x02dfef6cf66c1fbc, -0x7aacfd9741492e79, 0x249929fccc879e74 }, + { -0x5c2f5f0ee96a6fd7, 0x023b6b6cba7ebd89, 0x7bf15a3e26783307, 0x5620310cbbd8ece7 } }, { - { 0x6646b5f477e285d6, 0x40e8ff676c8f6193, 0xa6ec7311abb594dd, 0x7ec846f3658cec4d }, - { 0x528993434934d643, 0xb9dbf806a51222f5, 0x8f6d878fc3f41c22, 0x37676a2a4d9d9730 }, - { 0x9b5e8f3f1da22ec7, 0x130f1d776c01cd13, 0x214c8fcfa2989fb8, 0x6daaf723399b9dd5 } + { 0x6646b5f477e285d6, 0x40e8ff676c8f6193, -0x59138cee544a6b23, 0x7ec846f3658cec4d }, + { 0x528993434934d643, -0x462407f95aeddd0b, -0x709278703c0be3de, 0x37676a2a4d9d9730 }, + { -0x64a170c0e25dd139, 0x130f1d776c01cd13, 0x214c8fcfa2989fb8, 0x6daaf723399b9dd5 } }, }, { { - { 0x81aebbdd2cd13070, 0x962e4325f85a0e9e, 0xde9391aacadffecb, 0x53177fda52c230e6 }, - { 0x591e4a5610628564, 0x2a4bb87ca8b4df34, 0xde2a2572e7a38e43, 0x3cbdabd9fee5046e }, - { 0xa7bc970650b9de79, 0x3d12a7fbc301b59b, 0x02652e68d36ae38c, 0x79d739835a6199dc } + { -0x7e514422d32ecf90, -0x69d1bcda07a5f162, -0x216c6e5535200135, 0x53177fda52c230e6 }, + { 0x591e4a5610628564, 0x2a4bb87ca8b4df34, -0x21d5da8d185c71bd, 0x3cbdabd9fee5046e }, + { -0x584368f9af462187, 0x3d12a7fbc301b59b, 0x02652e68d36ae38c, 0x79d739835a6199dc } }, { - { 0x21c9d9920d591737, 0x9bea41d2e9b46cd6, 0xe20e84200d89bfca, 0x79d99f946eae5ff8 }, - { 0xd9354df64131c1bd, 0x758094a186ec5822, 0x4464ee12e459f3c2, 0x6c11fce4cb133282 }, - { 0xf17b483568673205, 0x387deae83caad96c, 0x61b471fd56ffe386, 0x31741195b745a599 } + { 0x21c9d9920d591737, -0x6415be2d164b932a, -0x1df17bdff2764036, 0x79d99f946eae5ff8 }, + { -0x26cab209bece3e43, 0x758094a186ec5822, 0x4464ee12e459f3c2, 0x6c11fce4cb133282 }, + { -0x0e84b7ca9798cdfb, 0x387deae83caad96c, 0x61b471fd56ffe386, 0x31741195b745a599 } }, { { 0x17f8ba683b02a047, 0x50212096feefb6c8, 0x70139be21556cbe2, 0x203e44a11d98915b }, - { 0xe8d10190b77a360b, 0x99b983209995e702, 0xbd4fdff8fa0247aa, 0x2772e344e0d36a87 }, - { 0xd6863eba37b9e39f, 0x105bc169723b5a23, 0x104f6459a65c0762, 0x567951295b4d38d4 } + { -0x172efe6f4885c9f5, -0x66467cdf666a18fe, -0x42b0200705fdb856, 0x2772e344e0d36a87 }, + { -0x2979c145c8461c61, 0x105bc169723b5a23, 0x104f6459a65c0762, 0x567951295b4d38d4 } }, { { 0x07242eb30d4b497f, 0x1ef96306b9bccc87, 0x37950934d8116f45, 0x05468d6201405b04 }, - { 0x535fd60613037524, 0xe210adf6b0fbc26a, 0xac8d0a9b23e990ae, 0x47204d08d72fdbf9 }, - { 0x00f565a9f93267de, 0xcecfd78dc0d58e8a, 0xa215e2dcf318e28e, 0x4599ee919b633352 } + { 0x535fd60613037524, -0x1def52094f043d96, -0x5372f564dc166f52, 0x47204d08d72fdbf9 }, + { 0x00f565a9f93267de, -0x313028723f2a7176, -0x5dea1d230ce71d72, 0x4599ee919b633352 } }, { - { 0xac746d6b861ae579, 0x31ab0650f6aea9dc, 0x241d661140256d4c, 0x2f485e853d21a5de }, - { 0xd3c220ca70e0e76b, 0xb12bea58ea9f3094, 0x294ddec8c3271282, 0x0c3539e1a1d1d028 }, + { -0x538b929479e51a87, 0x31ab0650f6aea9dc, 0x241d661140256d4c, 0x2f485e853d21a5de }, + { -0x2c3ddf358f1f1895, -0x4ed415a71560cf6c, 0x294ddec8c3271282, 0x0c3539e1a1d1d028 }, { 0x329744839c0833f3, 0x6fe6257fd2abc484, 0x5327d1814b358817, 0x65712585893fe9bc } }, { - { 0x81c29f1bd708ee3f, 0xddcb5a05ae6407d0, 0x97aec1d7d2a3eba7, 0x1590521a91d50831 }, - { 0x9c102fb732a61161, 0xe48e10dd34d520a8, 0x365c63546f9a9176, 0x32f6fe4c046f6006 }, - { 0x40a3a11ec7910acc, 0x9013dff8f16d27ae, 0x1a9720d8abb195d4, 0x1bb9fe452ea98463 } + { -0x7e3d60e428f711c1, -0x2234a5fa519bf830, -0x68513e282d5c1459, 0x1590521a91d50831 }, + { -0x63efd048cd59ee9f, -0x1b71ef22cb2adf58, 0x365c63546f9a9176, 0x32f6fe4c046f6006 }, + { 0x40a3a11ec7910acc, -0x6fec20070e92d852, 0x1a9720d8abb195d4, 0x1bb9fe452ea98463 } }, { - { 0xcf5e6c95cc36747c, 0x294201536b0bc30d, 0x453ac67cee797af0, 0x5eae6ab32a8bb3c9 }, - { 0xe9d1d950b3d54f9e, 0x2d5f9cbee00d33c1, 0x51c2c656a04fc6ac, 0x65c091ee3c1cbcc9 }, + { -0x30a1936a33c98b84, 0x294201536b0bc30d, 0x453ac67cee797af0, 0x5eae6ab32a8bb3c9 }, + { -0x162e26af4c2ab062, 0x2d5f9cbee00d33c1, 0x51c2c656a04fc6ac, 0x65c091ee3c1cbcc9 }, { 0x7083661114f118ea, 0x2b37b87b94349cad, 0x7273f51cb4e99f40, 0x78a2a95823d75698 } }, { - { 0xb4f23c425ef83207, 0xabf894d3c9a934b5, 0xd0708c1339fd87f7, 0x1876789117166130 }, - { 0xa2b072e95c8c2ace, 0x69cffc96651e9c4b, 0x44328ef842e7b42b, 0x5dd996c122aadeb3 }, - { 0x925b5ef0670c507c, 0x819bc842b93c33bf, 0x10792e9a70dd003f, 0x59ad4b7a6e28dc74 } + { -0x4b0dc3bda107cdf9, -0x54076b2c3656cb4b, -0x2f8f73ecc6027809, 0x1876789117166130 }, + { -0x5d4f8d16a373d532, 0x69cffc96651e9c4b, 0x44328ef842e7b42b, 0x5dd996c122aadeb3 }, + { -0x6da4a10f98f3af84, -0x7e6437bd46c3cc41, 0x10792e9a70dd003f, 0x59ad4b7a6e28dc74 } }, }, { { { 0x583b04bfacad8ea2, 0x29b743e8148be884, 0x2b1e583b0810c5db, 0x2b5449e58eb3bbaa }, - { 0x5f3a7562eb3dbe47, 0xf7ea38548ebda0b8, 0x00c3e53145747299, 0x1304e9e71627d551 }, - { 0x789814d26adc9cfe, 0x3c1bab3f8b48dd0b, 0xda0fe1fff979c60a, 0x4468de2d7c2dd693 } + { 0x5f3a7562eb3dbe47, -0x0815c7ab71425f48, 0x00c3e53145747299, 0x1304e9e71627d551 }, + { 0x789814d26adc9cfe, 0x3c1bab3f8b48dd0b, -0x25f01e00068639f6, 0x4468de2d7c2dd693 } }, { - { 0x4b9ad8c6f86307ce, 0x21113531435d0c28, 0xd4a866c5657a772c, 0x5da6427e63247352 }, - { 0x51bb355e9419469e, 0x33e6dc4c23ddc754, 0x93a5b6d6447f9962, 0x6cce7c6ffb44bd63 }, - { 0x1a94c688deac22ca, 0xb9066ef7bbae1ff8, 0x88ad8c388d59580f, 0x58f29abfe79f2ca8 } + { 0x4b9ad8c6f86307ce, 0x21113531435d0c28, -0x2b57993a9a8588d4, 0x5da6427e63247352 }, + { 0x51bb355e9419469e, 0x33e6dc4c23ddc754, -0x6c5a4929bb80669e, 0x6cce7c6ffb44bd63 }, + { 0x1a94c688deac22ca, -0x46f991084451e008, -0x775273c772a6a7f1, 0x58f29abfe79f2ca8 } }, { - { 0x4b5a64bf710ecdf6, 0xb14ce538462c293c, 0x3643d056d50b3ab9, 0x6af93724185b4870 }, - { 0xe90ecfab8de73e68, 0x54036f9f377e76a5, 0xf0495b0bbe015982, 0x577629c4a7f41e36 }, - { 0x3220024509c6a888, 0xd2e036134b558973, 0x83e236233c33289f, 0x701f25bb0caec18f } + { 0x4b5a64bf710ecdf6, -0x4eb31ac7b9d3d6c4, 0x3643d056d50b3ab9, 0x6af93724185b4870 }, + { -0x16f130547218c198, 0x54036f9f377e76a5, -0x0fb6a4f441fea67e, 0x577629c4a7f41e36 }, + { 0x3220024509c6a888, -0x2d1fc9ecb4aa768d, -0x7c1dc9dcc3ccd761, 0x701f25bb0caec18f } }, { - { 0x9d18f6d97cbec113, 0x844a06e674bfdbe4, 0x20f5b522ac4e60d6, 0x720a5bc050955e51 }, - { 0xc3a8b0f8e4616ced, 0xf700660e9e25a87d, 0x61e3061ff4bca59c, 0x2e0c92bfbdc40be9 }, - { 0x0c3f09439b805a35, 0xe84e8b376242abfc, 0x691417f35c229346, 0x0e9b9cbb144ef0ec } + { -0x62e7092683413eed, -0x7bb5f9198b40241c, 0x20f5b522ac4e60d6, 0x720a5bc050955e51 }, + { -0x3c574f071b9e9313, -0x08ff99f161da5783, 0x61e3061ff4bca59c, 0x2e0c92bfbdc40be9 }, + { 0x0c3f09439b805a35, -0x17b174c89dbd5404, 0x691417f35c229346, 0x0e9b9cbb144ef0ec } }, { - { 0x8dee9bd55db1beee, 0xc9c3ab370a723fb9, 0x44a8f1bf1c68d791, 0x366d44191cfd3cde }, - { 0xfbbad48ffb5720ad, 0xee81916bdbf90d0e, 0xd4813152635543bf, 0x221104eb3f337bd8 }, - { 0x9e3c1743f2bc8c14, 0x2eda26fcb5856c3b, 0xccb82f0e68a7fb97, 0x4167a4e6bc593244 } + { -0x7211642aa24e4112, -0x363c54c8f58dc047, 0x44a8f1bf1c68d791, 0x366d44191cfd3cde }, + { -0x04452b7004a8df53, -0x117e6e942406f2f2, -0x2b7ecead9caabc41, 0x221104eb3f337bd8 }, + { -0x61c3e8bc0d4373ec, 0x2eda26fcb5856c3b, -0x3347d0f197580469, 0x4167a4e6bc593244 } }, { - { 0xc2be2665f8ce8fee, 0xe967ff14e880d62c, 0xf12e6e7e2f364eee, 0x34b33370cb7ed2f6 }, + { -0x3d41d99a07317012, -0x169800eb177f29d4, -0x0ed19181d0c9b112, 0x34b33370cb7ed2f6 }, { 0x643b9d2876f62700, 0x5d1d9d400e7668eb, 0x1b4b430321fc0684, 0x7938bb7e2255246a }, - { 0xcdc591ee8681d6cc, 0xce02109ced85a753, 0xed7485c158808883, 0x1176fc6e2dfe65e4 } + { -0x323a6e11797e2934, -0x31fdef63127a58ad, -0x128b7a3ea77f777d, 0x1176fc6e2dfe65e4 } }, { - { 0xdb90e28949770eb8, 0x98fbcc2aacf440a3, 0x21354ffeded7879b, 0x1f6a3e54f26906b6 }, - { 0xb4af6cd05b9c619b, 0x2ddfc9f4b2a58480, 0x3d4fa502ebe94dc4, 0x08fc3a4c677d5f34 }, - { 0x60a4c199d30734ea, 0x40c085b631165cd6, 0xe2333e23f7598295, 0x4f2fad0116b900d1 } + { -0x246f1d76b688f148, -0x670433d5530bbf5d, 0x21354ffeded7879b, 0x1f6a3e54f26906b6 }, + { -0x4b50932fa4639e65, 0x2ddfc9f4b2a58480, 0x3d4fa502ebe94dc4, 0x08fc3a4c677d5f34 }, + { 0x60a4c199d30734ea, 0x40c085b631165cd6, -0x1dccc1dc08a67d6b, 0x4f2fad0116b900d1 } }, { - { 0x962cd91db73bb638, 0xe60577aafc129c08, 0x6f619b39f3b61689, 0x3451995f2944ee81 }, - { 0x44beb24194ae4e54, 0x5f541c511857ef6c, 0xa61e6b2d368d0498, 0x445484a4972ef7ab }, - { 0x9152fcd09fea7d7c, 0x4a816c94b0935cf6, 0x258e9aaa47285c40, 0x10b89ca6042893b7 } + { -0x69d326e248c449c8, -0x19fa885503ed63f8, 0x6f619b39f3b61689, 0x3451995f2944ee81 }, + { 0x44beb24194ae4e54, 0x5f541c511857ef6c, -0x59e194d2c972fb68, 0x445484a4972ef7ab }, + { -0x6ead032f60158284, 0x4a816c94b0935cf6, 0x258e9aaa47285c40, 0x10b89ca6042893b7 } }, }, { { - { 0xd67cded679d34aa0, 0xcc0b9ec0cc4db39f, 0xa535a456e35d190f, 0x2e05d9eaf61f6fef }, - { 0x9b2a426e3b646025, 0x32127190385ce4cf, 0xa25cffc2dd6dea45, 0x06409010bea8de75 }, - { 0xc447901ad61beb59, 0x661f19bce5dc880a, 0x24685482b7ca6827, 0x293c778cefe07f26 } + { -0x29832129862cb560, -0x33f4613f33b24c61, -0x5aca5ba91ca2e6f1, 0x2e05d9eaf61f6fef }, + { -0x64d5bd91c49b9fdb, 0x32127190385ce4cf, -0x5da3003d229215bb, 0x06409010bea8de75 }, + { -0x3bb86fe529e414a7, 0x661f19bce5dc880a, 0x24685482b7ca6827, 0x293c778cefe07f26 } }, { - { 0x16c795d6a11ff200, 0xcb70d0e2b15815c9, 0x89f293209b5395b5, 0x50b8c2d031e47b4f }, - { 0x86809e7007069096, 0xaad75b15e4e50189, 0x07f35715a21a0147, 0x0487f3f112815d5e }, + { 0x16c795d6a11ff200, -0x348f2f1d4ea7ea37, -0x760d6cdf64ac6a4b, 0x50b8c2d031e47b4f }, + { -0x797f618ff8f96f6a, -0x5528a4ea1b1afe77, 0x07f35715a21a0147, 0x0487f3f112815d5e }, { 0x48350c08068a4962, 0x6ffdd05351092c9a, 0x17af4f4aaf6fc8dd, 0x4b0553b53cdba58b } }, { - { 0xbf05211b27c152d4, 0x5ec26849bd1af639, 0x5e0b2caa8e6fab98, 0x054c8bdd50bd0840 }, - { 0x9c65fcbe1b32ff79, 0xeb75ea9f03b50f9b, 0xfced2a6c6c07e606, 0x35106cd551717908 }, - { 0x38a0b12f1dcf073d, 0x4b60a8a3b7f6a276, 0xfed5ac25d3404f9a, 0x72e82d5e5505c229 } + { -0x40fadee4d83ead2c, 0x5ec26849bd1af639, 0x5e0b2caa8e6fab98, 0x054c8bdd50bd0840 }, + { -0x639a0341e4cd0087, -0x148a1560fc4af065, -0x0312d59393f819fa, 0x35106cd551717908 }, + { 0x38a0b12f1dcf073d, 0x4b60a8a3b7f6a276, -0x012a53da2cbfb066, 0x72e82d5e5505c229 } }, { { 0x00d9cdfd69771d02, 0x410276cd6cfbf17e, 0x4c45306c1cb12ec7, 0x2857bf1627500861 }, - { 0x6b0b697ff0d844c8, 0xbb12f85cd979cb49, 0xd2a541c6c1da0f1f, 0x7b7c242958ce7211 }, - { 0x9f21903f0101689e, 0xd779dfd3bf861005, 0xa122ee5f3deb0f1b, 0x510df84b485a00d4 } + { 0x6b0b697ff0d844c8, -0x44ed07a3268634b7, -0x2d5abe393e25f0e1, 0x7b7c242958ce7211 }, + { -0x60de6fc0fefe9762, -0x2886202c4079effb, -0x5edd11a0c214f0e5, 0x510df84b485a00d4 } }, { - { 0x24b3c887c70ac15e, 0xb0f3a557fb81b732, 0x9b2cde2fe578cc1b, 0x4cf7ed0703b54f8e }, - { 0xa54133bb9277a1fa, 0x74ec3b6263991237, 0x1a3c54dc35d2f15a, 0x2d347144e482ba3a }, - { 0x6bd47c6598fbee0f, 0x9e4733e2ab55be2d, 0x1093f624127610c5, 0x4e05e26ad0a1eaa4 } + { 0x24b3c887c70ac15e, -0x4f0c5aa8047e48ce, -0x64d321d01a8733e5, 0x4cf7ed0703b54f8e }, + { -0x5abecc446d885e06, 0x74ec3b6263991237, 0x1a3c54dc35d2f15a, 0x2d347144e482ba3a }, + { 0x6bd47c6598fbee0f, -0x61b8cc1d54aa41d3, 0x1093f624127610c5, 0x4e05e26ad0a1eaa4 } }, { - { 0x1833c773e18fe6c0, 0xe3c4711ad3c87265, 0x3bfd3c4f0116b283, 0x1955875eb4cd4db8 }, - { 0xda9b6b624b531f20, 0x429a760e77509abb, 0xdbe9f522e823cb80, 0x618f1856880c8f82 }, - { 0x6da6de8f0e399799, 0x7ad61aa440fda178, 0xb32cd8105e3563dd, 0x15f6beae2ae340ae } + { 0x1833c773e18fe6c0, -0x1c3b8ee52c378d9b, 0x3bfd3c4f0116b283, 0x1955875eb4cd4db8 }, + { -0x2564949db4ace0e0, 0x429a760e77509abb, -0x24160add17dc3480, 0x618f1856880c8f82 }, + { 0x6da6de8f0e399799, 0x7ad61aa440fda178, -0x4cd327efa1ca9c23, 0x15f6beae2ae340ae } }, { - { 0xba9a0f7b9245e215, 0xf368612dd98c0dbb, 0x2e84e4cbf220b020, 0x6ba92fe962d90eda }, - { 0x862bcb0c31ec3a62, 0x810e2b451138f3c2, 0x788ec4b839dac2a4, 0x28f76867ae2a9281 }, - { 0x3e4df9655884e2aa, 0xbd62fbdbdbd465a5, 0xd7596caa0de9e524, 0x6e8042ccb2b1b3d7 } + { -0x4565f0846dba1deb, -0x0c979ed22673f245, 0x2e84e4cbf220b020, 0x6ba92fe962d90eda }, + { -0x79d434f3ce13c59e, -0x7ef1d4baeec70c3e, 0x788ec4b839dac2a4, 0x28f76867ae2a9281 }, + { 0x3e4df9655884e2aa, -0x429d0424242b9a5b, -0x28a69355f2161adc, 0x6e8042ccb2b1b3d7 } }, { { 0x1530653616521f7e, 0x660d06b896203dba, 0x2d3989bc545f0879, 0x4b5303af78ebd7b0 }, - { 0xf10d3c29ce28ca6e, 0xbad34540fcb6093d, 0xe7426ed7a2ea2d3f, 0x08af9d4e4ff298b9 }, + { -0x0ef2c3d631d73592, -0x452cbabf0349f6c3, -0x18bd91285d15d2c1, 0x08af9d4e4ff298b9 }, { 0x72f8a6c3bebcbde8, 0x4f0fca4adc3a8e89, 0x6fa9d4e8c7bfdf7a, 0x0dcf2d679b624eb7 } }, }, { { - { 0x753941be5a45f06e, 0xd07caeed6d9c5f65, 0x11776b9c72ff51b6, 0x17d2d1d9ef0d4da9 }, + { 0x753941be5a45f06e, -0x2f8351129263a09b, 0x11776b9c72ff51b6, 0x17d2d1d9ef0d4da9 }, { 0x3d5947499718289c, 0x12ebf8c524533f26, 0x0262bfcb14c3ef15, 0x20b878d577b7518e }, - { 0x27f2af18073f3e6a, 0xfd3fe519d7521069, 0x22e3b72c3ca60022, 0x72214f63cc65c6a7 } + { 0x27f2af18073f3e6a, -0x02c01ae628adef97, 0x22e3b72c3ca60022, 0x72214f63cc65c6a7 } }, { - { 0x1d9db7b9f43b29c9, 0xd605824a4f518f75, 0xf2c072bd312f9dc4, 0x1f24ac855a1545b0 }, - { 0xb4e37f405307a693, 0xaba714d72f336795, 0xd6fbd0a773761099, 0x5fdf48c58171cbc9 }, - { 0x24d608328e9505aa, 0x4748c1d10c1420ee, 0xc7ffe45c06fb25a2, 0x00ba739e2ae395e6 } + { 0x1d9db7b9f43b29c9, -0x29fa7db5b0ae708b, -0x0d3f8d42ced0623c, 0x1f24ac855a1545b0 }, + { -0x4b1c80bfacf8596d, -0x5458eb28d0cc986b, -0x29042f588c89ef67, 0x5fdf48c58171cbc9 }, + { 0x24d608328e9505aa, 0x4748c1d10c1420ee, -0x38001ba3f904da5e, 0x00ba739e2ae395e6 } }, { - { 0xae4426f5ea88bb26, 0x360679d984973bfb, 0x5c9f030c26694e50, 0x72297de7d518d226 }, - { 0x592e98de5c8790d6, 0xe5bfb7d345c2a2df, 0x115a3b60f9b49922, 0x03283a3e67ad78f3 }, - { 0x48241dc7be0cb939, 0x32f19b4d8b633080, 0xd3dfc90d02289308, 0x05e1296846271945 } + { -0x51bbd90a157744da, 0x360679d984973bfb, 0x5c9f030c26694e50, 0x72297de7d518d226 }, + { 0x592e98de5c8790d6, -0x1a40482cba3d5d21, 0x115a3b60f9b49922, 0x03283a3e67ad78f3 }, + { 0x48241dc7be0cb939, 0x32f19b4d8b633080, -0x2c2036f2fdd76cf8, 0x05e1296846271945 } }, { - { 0xadbfbbc8242c4550, 0xbcc80cecd03081d9, 0x843566a6f5c8df92, 0x78cf25d38258ce4c }, - { 0xba82eeb32d9c495a, 0xceefc8fcf12bb97c, 0xb02dabae93b5d1e0, 0x39c00c9c13698d9b }, - { 0x15ae6b8e31489d68, 0xaa851cab9c2bf087, 0xc9a75a97f04efa05, 0x006b52076b3ff832 } + { -0x52404437dbd3bab0, -0x4337f3132fcf7e27, -0x7bca99590a37206e, 0x78cf25d38258ce4c }, + { -0x457d114cd263b6a6, -0x311037030ed44684, -0x4fd254516c4a2e20, 0x39c00c9c13698d9b }, + { 0x15ae6b8e31489d68, -0x557ae35463d40f79, -0x3658a5680fb105fb, 0x006b52076b3ff832 } }, { - { 0xf5cb7e16b9ce082d, 0x3407f14c417abc29, 0xd4b36bce2bf4a7ab, 0x7de2e9561a9f75ce }, - { 0x29e0cfe19d95781c, 0xb681df18966310e2, 0x57df39d370516b39, 0x4d57e3443bc76122 }, - { 0xde70d4f4b6a55ecb, 0x4801527f5d85db99, 0xdbc9c440d3ee9a81, 0x6b2a90af1a6029ed } + { -0x0a3481e94631f7d3, 0x3407f14c417abc29, -0x2b4c9431d40b5855, 0x7de2e9561a9f75ce }, + { 0x29e0cfe19d95781c, -0x497e20e7699cef1e, 0x57df39d370516b39, 0x4d57e3443bc76122 }, + { -0x218f2b0b495aa135, 0x4801527f5d85db99, -0x24363bbf2c11657f, 0x6b2a90af1a6029ed } }, { - { 0x77ebf3245bb2d80a, 0xd8301b472fb9079b, 0xc647e6f24cee7333, 0x465812c8276c2109 }, - { 0x6923f4fc9ae61e97, 0x5735281de03f5fd1, 0xa764ae43e6edd12d, 0x5fd8f4e9d12d3e4a }, + { 0x77ebf3245bb2d80a, -0x27cfe4b8d046f865, -0x39b8190db3118ccd, 0x465812c8276c2109 }, + { 0x6923f4fc9ae61e97, 0x5735281de03f5fd1, -0x589b51bc19122ed3, 0x5fd8f4e9d12d3e4a }, { 0x4d43beb22a1062d9, 0x7065fb753831dc16, 0x180d4a7bde2968d7, 0x05b32c2b1cb16790 } }, { - { 0xf7fca42c7ad58195, 0x3214286e4333f3cc, 0xb6c29d0d340b979d, 0x31771a48567307e1 }, - { 0xc8c05eccd24da8fd, 0xa1cf1aac05dfef83, 0xdbbeeff27df9cd61, 0x3b5556a37b471e99 }, - { 0x32b0c524e14dd482, 0xedb351541a2ba4b6, 0xa3d16048282b5af3, 0x4fc079d27a7336eb } + { -0x08035bd3852a7e6b, 0x3214286e4333f3cc, -0x493d62f2cbf46863, 0x31771a48567307e1 }, + { -0x373fa1332db25703, -0x5e30e553fa20107d, -0x2441100d8206329f, 0x3b5556a37b471e99 }, + { 0x32b0c524e14dd482, -0x124caeabe5d45b4a, -0x5c2e9fb7d7d4a50d, 0x4fc079d27a7336eb } }, { - { 0xdc348b440c86c50d, 0x1337cbc9cc94e651, 0x6422f74d643e3cb9, 0x241170c2bae3cd08 }, - { 0x51c938b089bf2f7f, 0x2497bd6502dfe9a7, 0xffffc09c7880e453, 0x124567cecaf98e92 }, - { 0x3ff9ab860ac473b4, 0xf0911dee0113e435, 0x4ae75060ebc6c4af, 0x3f8612966c87000d } + { -0x23cb74bbf3793af3, 0x1337cbc9cc94e651, 0x6422f74d643e3cb9, 0x241170c2bae3cd08 }, + { 0x51c938b089bf2f7f, 0x2497bd6502dfe9a7, -0x00003f63877f1bad, 0x124567cecaf98e92 }, + { 0x3ff9ab860ac473b4, -0x0f6ee211feec1bcb, 0x4ae75060ebc6c4af, 0x3f8612966c87000d } }, }, { { - { 0x529fdffe638c7bf3, 0xdf2b9e60388b4995, 0xe027b34f1bad0249, 0x7bc92fc9b9fa74ed }, - { 0x0c9c5303f7957be4, 0xa3c31a20e085c145, 0xb0721d71d0850050, 0x0aba390eab0bf2da }, - { 0x9f97ef2e801ad9f9, 0x83697d5479afda3a, 0xe906b3ffbd596b50, 0x02672b37dd3fb8e0 } + { 0x529fdffe638c7bf3, -0x20d4619fc774b66b, -0x1fd84cb0e452fdb7, 0x7bc92fc9b9fa74ed }, + { 0x0c9c5303f7957be4, -0x5c3ce5df1f7a3ebb, -0x4f8de28e2f7affb0, 0x0aba390eab0bf2da }, + { -0x606810d17fe52607, -0x7c9682ab865025c6, -0x16f94c0042a694b0, 0x02672b37dd3fb8e0 } }, { - { 0xee9ba729398ca7f5, 0xeb9ca6257a4849db, 0x29eb29ce7ec544e1, 0x232ca21ef736e2c8 }, - { 0x48b2ca8b260885e4, 0xa4286bec82b34c1c, 0x937e1a2617f58f74, 0x741d1fcbab2ca2a5 }, - { 0xbf61423d253fcb17, 0x08803ceafa39eb14, 0xf18602df9851c7af, 0x0400f3a049e3414b } + { -0x116458d6c673580b, -0x146359da85b7b625, 0x29eb29ce7ec544e1, 0x232ca21ef736e2c8 }, + { 0x48b2ca8b260885e4, -0x5bd794137d4cb3e4, -0x6c81e5d9e80a708c, 0x741d1fcbab2ca2a5 }, + { -0x409ebdc2dac034e9, 0x08803ceafa39eb14, -0x0e79fd2067ae3851, 0x0400f3a049e3414b } }, { - { 0x2efba412a06e7b06, 0x146785452c8d2560, 0xdf9713ebd67a91c7, 0x32830ac7157eadf3 }, - { 0xabce0476ba61c55b, 0x36a3d6d7c4d39716, 0x6eb259d5e8d82d09, 0x0c9176e984d756fb }, + { 0x2efba412a06e7b06, 0x146785452c8d2560, -0x2068ec1429856e39, 0x32830ac7157eadf3 }, + { -0x5431fb89459e3aa5, 0x36a3d6d7c4d39716, 0x6eb259d5e8d82d09, 0x0c9176e984d756fb }, { 0x0e782a7ab73769e8, 0x04a05d7875b18e2c, 0x29525226ebcceae1, 0x0d794f8383eba820 } }, { { 0x7be44ce7a7a2e1ac, 0x411fd93efad1b8b7, 0x1734a1d70d5f7c9b, 0x0d6592233127db16 }, - { 0xff35f5cb9e1516f4, 0xee805bcf648aae45, 0xf0d73c2bb93a9ef3, 0x097b0bf22092a6c2 }, - { 0xc48bab1521a9d733, 0xa6c2eaead61abb25, 0x625c6c1cc6cb4305, 0x7fc90fea93eb3a67 } + { -0x00ca0a3461eae90c, -0x117fa4309b7551bb, -0x0f28c3d446c5610d, 0x097b0bf22092a6c2 }, + { -0x3b7454eade5628cd, -0x593d151529e544db, 0x625c6c1cc6cb4305, 0x7fc90fea93eb3a67 } }, { - { 0xc527deb59c7cb23d, 0x955391695328404e, 0xd64392817ccf2c7a, 0x6ce97dabf7d8fa11 }, + { -0x3ad8214a63834dc3, -0x6aac6e96acd7bfb2, -0x29bc6d7e8330d386, 0x6ce97dabf7d8fa11 }, { 0x0408f1fe1f5c5926, 0x1a8f2f5e3b258bf4, 0x40a951a2fdc71669, 0x6598ee93c98b577e }, - { 0x25b5a8e50ef7c48f, 0xeb6034116f2ce532, 0xc5e75173e53de537, 0x73119fa08c12bb03 } + { 0x25b5a8e50ef7c48f, -0x149fcbee90d31ace, -0x3a18ae8c1ac21ac9, 0x73119fa08c12bb03 } }, { - { 0x7845b94d21f4774d, 0xbf62f16c7897b727, 0x671857c03c56522b, 0x3cd6a85295621212 }, - { 0xed30129453f1a4cb, 0xbce621c9c8f53787, 0xfacb2b1338bee7b9, 0x3025798a9ea8428c }, - { 0x3fecde923aeca999, 0xbdaa5b0062e8c12f, 0x67b99dfc96988ade, 0x3f52c02852661036 } + { 0x7845b94d21f4774d, -0x409d0e93876848d9, 0x671857c03c56522b, 0x3cd6a85295621212 }, + { -0x12cfed6bac0e5b35, -0x4319de36370ac879, -0x0534d4ecc7411847, 0x3025798a9ea8428c }, + { 0x3fecde923aeca999, -0x4255a4ff9d173ed1, 0x67b99dfc96988ade, 0x3f52c02852661036 } }, { - { 0x9258bf99eec416c6, 0xac8a5017a9d2f671, 0x629549ab16dea4ab, 0x05d0e85c99091569 }, - { 0xffeaa48e2a1351c6, 0x28624754fa7f53d7, 0x0b5ba9e57582ddf1, 0x60c0104ba696ac59 }, - { 0x051de020de9cbe97, 0xfa07fc56b50bcf74, 0x378cec9f0f11df65, 0x36853c69ab96de4d } + { -0x6da74066113be93a, -0x5375afe8562d098f, 0x629549ab16dea4ab, 0x05d0e85c99091569 }, + { -0x00155b71d5ecae3a, 0x28624754fa7f53d7, 0x0b5ba9e57582ddf1, 0x60c0104ba696ac59 }, + { 0x051de020de9cbe97, -0x05f803a94af4308c, 0x378cec9f0f11df65, 0x36853c69ab96de4d } }, { - { 0x4433c0b0fac5e7be, 0x724bae854c08dcbe, 0xf1f24cc446978f9b, 0x4a0aff6d62825fc8 }, + { 0x4433c0b0fac5e7be, 0x724bae854c08dcbe, -0x0e0db33bb9687065, 0x4a0aff6d62825fc8 }, { 0x36d9b8de78f39b2d, 0x7f42ed71a847b9ec, 0x241cd1d679bd3fde, 0x6a704fec92fbce6b }, - { 0xe917fb9e61095301, 0xc102df9402a092f8, 0xbf09e2f5fa66190b, 0x681109bee0dcfe37 } + { -0x16e804619ef6acff, -0x3efd206bfd5f6d08, -0x40f61d0a0599e6f5, 0x681109bee0dcfe37 } }, }, { { - { 0x9c18fcfa36048d13, 0x29159db373899ddd, 0xdc9f350b9f92d0aa, 0x26f57eee878a19d4 }, + { -0x63e70305c9fb72ed, 0x29159db373899ddd, -0x2360caf4606d2f56, 0x26f57eee878a19d4 }, { 0x559a0cc9782a0dde, 0x551dcdb2ea718385, 0x7f62865b31ef238c, 0x504aa7767973613d }, - { 0x0cab2cd55687efb1, 0x5180d162247af17b, 0x85c15a344f5a2467, 0x4041943d9dba3069 } + { 0x0cab2cd55687efb1, 0x5180d162247af17b, -0x7a3ea5cbb0a5db99, 0x4041943d9dba3069 } }, { - { 0x4b217743a26caadd, 0x47a6b424648ab7ce, 0xcb1d4f7a03fbc9e3, 0x12d931429800d019 }, - { 0xc3c0eeba43ebcc96, 0x8d749c9c26ea9caf, 0xd9fa95ee1c77ccc6, 0x1420a1d97684340f }, + { 0x4b217743a26caadd, 0x47a6b424648ab7ce, -0x34e2b085fc04361d, 0x12d931429800d019 }, + { -0x3c3f1145bc14336a, -0x728b6363d9156351, -0x26056a11e388333a, 0x1420a1d97684340f }, { 0x00c67799d337594f, 0x5e3c5140b23aa47b, 0x44182854e35ff395, 0x1b4f92314359a012 } }, { - { 0x33cf3030a49866b1, 0x251f73d2215f4859, 0xab82aa4051def4f6, 0x5ff191d56f9a23f6 }, + { 0x33cf3030a49866b1, 0x251f73d2215f4859, -0x547d55bfae210b0a, 0x5ff191d56f9a23f6 }, { 0x3e5c109d89150951, 0x39cefa912de9696a, 0x20eae43f975f3020, 0x239b572a7f132dae }, - { 0x819ed433ac2d9068, 0x2883ab795fc98523, 0xef4572805593eb3d, 0x020c526a758f36cb } + { -0x7e612bcc53d26f98, 0x2883ab795fc98523, -0x10ba8d7faa6c14c3, 0x020c526a758f36cb } }, { - { 0xe931ef59f042cc89, 0x2c589c9d8e124bb6, 0xadc8e18aaec75997, 0x452cfe0a5602c50c }, - { 0x779834f89ed8dbbc, 0xc8f2aaf9dc7ca46c, 0xa9524cdca3e1b074, 0x02aacc4615313877 }, - { 0x86a0f7a0647877df, 0xbbc464270e607c9f, 0xab17ea25f1fb11c9, 0x4cfb7d7b304b877b } + { -0x16ce10a60fbd3377, 0x2c589c9d8e124bb6, -0x52371e755138a669, 0x452cfe0a5602c50c }, + { 0x779834f89ed8dbbc, -0x370d550623835b94, -0x56adb3235c1e4f8c, 0x02aacc4615313877 }, + { -0x795f085f9b878821, -0x443b9bd8f19f8361, -0x54e815da0e04ee37, 0x4cfb7d7b304b877b } }, { - { 0xe28699c29789ef12, 0x2b6ecd71df57190d, 0xc343c857ecc970d0, 0x5b1d4cbc434d3ac5 }, - { 0x72b43d6cb89b75fe, 0x54c694d99c6adc80, 0xb8c3aa373ee34c9f, 0x14b4622b39075364 }, - { 0xb6fb2615cc0a9f26, 0x3a4f0e2bb88dcce5, 0x1301498b3369a705, 0x2f98f71258592dd1 } + { -0x1d79663d687610ee, 0x2b6ecd71df57190d, -0x3cbc37a813368f30, 0x5b1d4cbc434d3ac5 }, + { 0x72b43d6cb89b75fe, 0x54c694d99c6adc80, -0x473c55c8c11cb361, 0x14b4622b39075364 }, + { -0x4904d9ea33f560da, 0x3a4f0e2bb88dcce5, 0x1301498b3369a705, 0x2f98f71258592dd1 } }, { - { 0x2e12ae444f54a701, 0xfcfe3ef0a9cbd7de, 0xcebf890d75835de0, 0x1d8062e9e7614554 }, - { 0x0c94a74cb50f9e56, 0x5b1ff4a98e8e1320, 0x9a2acc2182300f67, 0x3a6ae249d806aaf9 }, - { 0x657ada85a9907c5a, 0x1a0ea8b591b90f62, 0x8d0e1dfbdf34b4e9, 0x298b8ce8aef25ff3 } + { 0x2e12ae444f54a701, -0x0301c10f56342822, -0x314076f28a7ca220, 0x1d8062e9e7614554 }, + { 0x0c94a74cb50f9e56, 0x5b1ff4a98e8e1320, -0x65d533de7dcff099, 0x3a6ae249d806aaf9 }, + { 0x657ada85a9907c5a, 0x1a0ea8b591b90f62, -0x72f1e20420cb4b17, 0x298b8ce8aef25ff3 } }, { - { 0x837a72ea0a2165de, 0x3fab07b40bcf79f6, 0x521636c77738ae70, 0x6ba6271803a7d7dc }, - { 0x2a927953eff70cb2, 0x4b89c92a79157076, 0x9418457a30a7cf6a, 0x34b8a8404d5ce485 }, - { 0xc26eecb583693335, 0xd5a813df63b5fefd, 0xa293aa9aa4b22573, 0x71d62bdd465e1c6a } + { -0x7c858d15f5de9a22, 0x3fab07b40bcf79f6, 0x521636c77738ae70, 0x6ba6271803a7d7dc }, + { 0x2a927953eff70cb2, 0x4b89c92a79157076, -0x6be7ba85cf583096, 0x34b8a8404d5ce485 }, + { -0x3d91134a7c96cccb, -0x2a57ec209c4a0103, -0x5d6c55655b4dda8d, 0x71d62bdd465e1c6a } }, { - { 0xcd2db5dab1f75ef5, 0xd77f95cf16b065f5, 0x14571fea3f49f085, 0x1c333621262b2b3d }, - { 0x6533cc28d378df80, 0xf6db43790a0fa4b4, 0xe3645ff9f701da5a, 0x74d5f317f3172ba4 }, - { 0xa86fe55467d9ca81, 0x398b7c752b298c37, 0xda6d0892e3ac623b, 0x4aebcc4547e9d98c } + { -0x32d24a254e08a10b, -0x28806a30e94f9a0b, 0x14571fea3f49f085, 0x1c333621262b2b3d }, + { 0x6533cc28d378df80, -0x0924bc86f5f05b4c, -0x1c9ba00608fe25a6, 0x74d5f317f3172ba4 }, + { -0x57901aab9826357f, 0x398b7c752b298c37, -0x2592f76d1c539dc5, 0x4aebcc4547e9d98c } }, }, { { - { 0x0de9b204a059a445, 0xe15cb4aa4b17ad0f, 0xe1bbec521f79c557, 0x2633f1b9d071081b }, - { 0x53175a7205d21a77, 0xb0c04422d3b934d4, 0xadd9f24bdd5deadc, 0x074f46e69f10ff8c }, - { 0xc1fb4177018b9910, 0xa6ea20dc6c0fe140, 0xd661f3e74354c6ff, 0x5ecb72e6f1a3407a } + { 0x0de9b204a059a445, -0x1ea34b55b4e852f1, -0x1e4413ade0863aa9, 0x2633f1b9d071081b }, + { 0x53175a7205d21a77, -0x4f3fbbdd2c46cb2c, -0x52260db422a21524, 0x074f46e69f10ff8c }, + { -0x3e04be88fe7466f0, -0x5915df2393f01ec0, -0x299e0c18bcab3901, 0x5ecb72e6f1a3407a } }, { - { 0xfeeae106e8e86997, 0x9863337f98d09383, 0x9470480eaa06ebef, 0x038b6898d4c5c2d0 }, - { 0xa515a31b2259fb4e, 0x0960f3972bcac52f, 0xedb52fec8d3454cb, 0x382e2720c476c019 }, - { 0xf391c51d8ace50a6, 0x3142d0b9ae2d2948, 0xdb4d5a1a7f24ca80, 0x21aeba8b59250ea8 } + { -0x01151ef917179669, -0x679ccc80672f6c7d, -0x6b8fb7f155f91411, 0x038b6898d4c5c2d0 }, + { -0x5aea5ce4dda604b2, 0x0960f3972bcac52f, -0x124ad01372cbab35, 0x382e2720c476c019 }, + { -0x0c6e3ae27531af5a, 0x3142d0b9ae2d2948, -0x24b2a5e580db3580, 0x21aeba8b59250ea8 } }, { { 0x53853600f0087f23, 0x4c461879da7d5784, 0x6af303deb41f6860, 0x0a3c16c5c27c18ed }, { 0x24f13b34cf405530, 0x3c44ea4a43088af7, 0x5dd5c5170006a482, 0x118eb8f8890b086d }, - { 0x17e49c17cc947f3d, 0xccc6eda6aac1d27b, 0xdf6092ceb0f08e56, 0x4909b3e22c67c36b } + { 0x17e49c17cc947f3d, -0x33391259553e2d85, -0x209f6d314f0f71aa, 0x4909b3e22c67c36b } }, { { 0x59a16676706ff64e, 0x10b953dd0d86a53d, 0x5848e1e6ce5c0b96, 0x2d8b78e712780c68 }, - { 0x9c9c85ea63fe2e89, 0xbe1baf910e9412ec, 0x8f7baa8a86fbfe7b, 0x0fb17f9fef968b6c }, - { 0x79d5c62eafc3902b, 0x773a215289e80728, 0xc38ae640e10120b9, 0x09ae23717b2b1a6d } + { -0x63637a159c01d177, -0x41e4506ef16bed14, -0x7084557579040185, 0x0fb17f9fef968b6c }, + { 0x79d5c62eafc3902b, 0x773a215289e80728, -0x3c7519bf1efedf47, 0x09ae23717b2b1a6d } }, { - { 0x10ab8fa1ad32b1d0, 0xe9aced1be2778b24, 0xa8856bc0373de90f, 0x66f35ddddda53996 }, - { 0xbb6a192a4e4d083c, 0x34ace0630029e192, 0x98245a59aafabaeb, 0x6d9c8a9ada97faac }, - { 0xd27d9afb24997323, 0x1bb7e07ef6f01d2e, 0x2ba7472df52ecc7f, 0x03019b4f646f9dc8 } + { 0x10ab8fa1ad32b1d0, -0x165312e41d8874dc, -0x577a943fc8c216f1, 0x66f35ddddda53996 }, + { -0x4495e6d5b1b2f7c4, 0x34ace0630029e192, -0x67dba5a655054515, 0x6d9c8a9ada97faac }, + { -0x2d826504db668cdd, 0x1bb7e07ef6f01d2e, 0x2ba7472df52ecc7f, 0x03019b4f646f9dc8 } }, { - { 0xaf09b214e6b3dc6b, 0x3f7573b5ad7d2f65, 0xd019d988100a23b0, 0x392b63a58b5c35f7 }, - { 0x04a186b5565345cd, 0xeee76610bcc4116a, 0x689c73b478fb2a45, 0x387dcbff65697512 }, - { 0x4093addc9c07c205, 0xc565be15f532c37e, 0x63dbecfd1583402a, 0x61722b4aef2e032e } + { -0x50f64deb194c2395, 0x3f7573b5ad7d2f65, -0x2fe62677eff5dc50, 0x392b63a58b5c35f7 }, + { 0x04a186b5565345cd, -0x111899ef433bee96, 0x689c73b478fb2a45, 0x387dcbff65697512 }, + { 0x4093addc9c07c205, -0x3a9a41ea0acd3c82, 0x63dbecfd1583402a, 0x61722b4aef2e032e } }, { - { 0xd6b07a5581cb0e3c, 0x290ff006d9444969, 0x08680b6a16dcda1f, 0x5568d2b75a06de59 }, - { 0x0012aafeecbd47af, 0x55a266fb1cd46309, 0xf203eb680967c72c, 0x39633944ca3c1429 }, - { 0x8d0cb88c1b37cfe1, 0x05b6a5a3053818f3, 0xf2e9bc04b787d959, 0x6beba1249add7f64 } + { -0x294f85aa7e34f1c4, 0x290ff006d9444969, 0x08680b6a16dcda1f, 0x5568d2b75a06de59 }, + { 0x0012aafeecbd47af, 0x55a266fb1cd46309, -0x0dfc1497f69838d4, 0x39633944ca3c1429 }, + { -0x72f34773e4c8301f, 0x05b6a5a3053818f3, -0x0d1643fb487826a7, 0x6beba1249add7f64 } }, { - { 0x5c3cecb943f5a53b, 0x9cc9a61d06c08df2, 0xcfba639a85895447, 0x5a845ae80df09fd5 }, + { 0x5c3cecb943f5a53b, -0x633659e2f93f720e, -0x30459c657a76abb9, 0x5a845ae80df09fd5 }, { 0x1d06005ca5b1b143, 0x6d4c6bb87fd1cda2, 0x6ef5967653fcffe7, 0x097c29e8c1ce1ea5 }, - { 0x4ce97dbe5deb94ca, 0x38d0a4388c709c48, 0xc43eced4a169d097, 0x0a1249fff7e587c3 } + { 0x4ce97dbe5deb94ca, 0x38d0a4388c709c48, -0x3bc1312b5e962f69, 0x0a1249fff7e587c3 } }, }, { { - { 0x0b408d9e7354b610, 0x806b32535ba85b6e, 0xdbe63a034a58a207, 0x173bd9ddc9a1df2c }, - { 0x12f0071b276d01c9, 0xe7b8bac586c48c70, 0x5308129b71d6fba9, 0x5d88fbf95a3db792 }, - { 0x2b500f1efe5872df, 0x58d6582ed43918c1, 0xe6ed278ec9673ae0, 0x06e1cd13b19ea319 } + { 0x0b408d9e7354b610, -0x7f94cdaca457a492, -0x2419c5fcb5a75df9, 0x173bd9ddc9a1df2c }, + { 0x12f0071b276d01c9, -0x1847453a793b7390, 0x5308129b71d6fba9, 0x5d88fbf95a3db792 }, + { 0x2b500f1efe5872df, 0x58d6582ed43918c1, -0x1912d8713698c520, 0x06e1cd13b19ea319 } }, { { 0x472baf629e5b0353, 0x3baa0b90278d0447, 0x0c785f469643bf27, 0x7f3a6a1a8d837b13 }, { 0x40d0ad516f166f23, 0x118e32931fab6abe, 0x3fe35e14a04d088e, 0x3080603526e16266 }, - { 0xf7e644395d3d800b, 0x95a8d555c901edf6, 0x68cd7830592c6339, 0x30d0fded2e51307e } + { -0x0819bbc6a2c27ff5, -0x6a572aaa36fe120a, 0x68cd7830592c6339, 0x30d0fded2e51307e } }, { - { 0x9cb4971e68b84750, 0xa09572296664bbcf, 0x5c8de72672fa412b, 0x4615084351c589d9 }, - { 0xe0594d1af21233b3, 0x1bdbe78ef0cc4d9c, 0x6965187f8f499a77, 0x0a9214202c099868 }, - { 0xbc9019c0aeb9a02e, 0x55c7110d16034cae, 0x0e6df501659932ec, 0x3bca0d2895ca5dfe } + { -0x634b68e19747b8b0, -0x5f6a8dd6999b4431, 0x5c8de72672fa412b, 0x4615084351c589d9 }, + { -0x1fa6b2e50dedcc4d, 0x1bdbe78ef0cc4d9c, 0x6965187f8f499a77, 0x0a9214202c099868 }, + { -0x436fe63f51465fd2, 0x55c7110d16034cae, 0x0e6df501659932ec, 0x3bca0d2895ca5dfe } }, { - { 0x9c688eb69ecc01bf, 0xf0bc83ada644896f, 0xca2d955f5f7a9fe2, 0x4ea8b4038df28241 }, - { 0x40f031bc3c5d62a4, 0x19fc8b3ecff07a60, 0x98183da2130fb545, 0x5631deddae8f13cd }, - { 0x2aed460af1cad202, 0x46305305a48cee83, 0x9121774549f11a5f, 0x24ce0930542ca463 } + { -0x639771496133fe41, -0x0f437c5259bb7691, -0x35d26aa0a085601e, 0x4ea8b4038df28241 }, + { 0x40f031bc3c5d62a4, 0x19fc8b3ecff07a60, -0x67e7c25decf04abb, 0x5631deddae8f13cd }, + { 0x2aed460af1cad202, 0x46305305a48cee83, -0x6ede88bab60ee5a1, 0x24ce0930542ca463 } }, { - { 0x3fcfa155fdf30b85, 0xd2f7168e36372ea4, 0xb2e064de6492f844, 0x549928a7324f4280 }, - { 0x1fe890f5fd06c106, 0xb5c468355d8810f2, 0x827808fe6e8caf3e, 0x41d4e3c28a06d74b }, - { 0xf26e32a763ee1a2e, 0xae91e4b7d25ffdea, 0xbc3bd33bd17f4d69, 0x491b66dec0dcff6a } + { 0x3fcfa155fdf30b85, -0x2d08e971c9c8d15c, -0x4d1f9b219b6d07bc, 0x549928a7324f4280 }, + { 0x1fe890f5fd06c106, -0x4a3b97caa277ef0e, -0x7d87f701917350c2, 0x41d4e3c28a06d74b }, + { -0x0d91cd589c11e5d2, -0x516e1b482da00216, -0x43c42cc42e80b297, 0x491b66dec0dcff6a } }, { - { 0x75f04a8ed0da64a1, 0xed222caf67e2284b, 0x8234a3791f7b7ba4, 0x4cf6b8b0b7018b67 }, - { 0x98f5b13dc7ea32a7, 0xe3d5f8cc7e16db98, 0xac0abf52cbf8d947, 0x08f338d0c85ee4ac }, - { 0xc383a821991a73bd, 0xab27bc01df320c7a, 0xc13d331b84777063, 0x530d4a82eb078a99 } + { 0x75f04a8ed0da64a1, -0x12ddd350981dd7b5, -0x7dcb5c86e084845c, 0x4cf6b8b0b7018b67 }, + { -0x670a4ec23815cd59, -0x1c2a073381e92468, -0x53f540ad340726b9, 0x08f338d0c85ee4ac }, + { -0x3c7c57de66e58c43, -0x54d843fe20cdf386, -0x3ec2cce47b888f9d, 0x530d4a82eb078a99 } }, { { 0x6d6973456c9abf9e, 0x257fb2fc4900a880, 0x2bacf412c8cfb850, 0x0db3e7e00cbfbd5b }, - { 0x004c3630e1f94825, 0x7e2d78268cab535a, 0xc7482323cc84ff8b, 0x65ea753f101770b9 }, - { 0x3d66fc3ee2096363, 0x81d62c7f61b5cb6b, 0x0fbe044213443b1a, 0x02a4ec1921e1a1db } + { 0x004c3630e1f94825, 0x7e2d78268cab535a, -0x38b7dcdc337b0075, 0x65ea753f101770b9 }, + { 0x3d66fc3ee2096363, -0x7e29d3809e4a3495, 0x0fbe044213443b1a, 0x02a4ec1921e1a1db } }, { - { 0xf5c86162f1cf795f, 0x118c861926ee57f2, 0x172124851c063578, 0x36d12b5dec067fcf }, - { 0x5ce6259a3b24b8a2, 0xb8577acc45afa0b8, 0xcccbe6e88ba07037, 0x3d143c51127809bf }, - { 0x126d279179154557, 0xd5e48f5cfc783a0a, 0x36bdb6e8df179bac, 0x2ef517885ba82859 } + { -0x0a379e9d0e3086a1, 0x118c861926ee57f2, 0x172124851c063578, 0x36d12b5dec067fcf }, + { 0x5ce6259a3b24b8a2, -0x47a88533ba505f48, -0x33341917745f8fc9, 0x3d143c51127809bf }, + { 0x126d279179154557, -0x2a1b70a30387c5f6, 0x36bdb6e8df179bac, 0x2ef517885ba82859 } }, }, { { - { 0x1ea436837c6da1e9, 0xf9c189af1fb9bdbe, 0x303001fcce5dd155, 0x28a7c99ebc57be52 }, - { 0x88bd438cd11e0d4a, 0x30cb610d43ccf308, 0xe09a0e3791937bcc, 0x4559135b25b1720c }, - { 0xb8fd9399e8d19e9d, 0x908191cb962423ff, 0xb2b948d747c742a3, 0x37f33226d7fb44c4 } + { 0x1ea436837c6da1e9, -0x063e7650e0464242, 0x303001fcce5dd155, 0x28a7c99ebc57be52 }, + { -0x7742bc732ee1f2b6, 0x30cb610d43ccf308, -0x1f65f1c86e6c8434, 0x4559135b25b1720c }, + { -0x47026c66172e6163, -0x6f7e6e3469dbdc01, -0x4d46b728b838bd5d, 0x37f33226d7fb44c4 } }, { { 0x33912553c821b11d, 0x66ed42c241e301df, 0x066fcc11104222fd, 0x307a3b41c192168f }, - { 0x0dae8767b55f6e08, 0x4a43b3b35b203a02, 0xe3725a6e80af8c79, 0x0f7a7fd1705fa7a3 }, - { 0x8eeb5d076eb55ce0, 0x2fc536bfaa0d925a, 0xbe81830fdcb6c6e8, 0x556c7045827baf52 } + { 0x0dae8767b55f6e08, 0x4a43b3b35b203a02, -0x1c8da5917f507387, 0x0f7a7fd1705fa7a3 }, + { -0x7114a2f8914aa320, 0x2fc536bfaa0d925a, -0x417e7cf023493918, 0x556c7045827baf52 } }, { - { 0xb94b90022bf44406, 0xabd4237eff90b534, 0x7600a960faf86d3a, 0x2f45abdac2322ee3 }, - { 0x8e2b517302e9d8b7, 0xe3e52269248714e8, 0xbd4fbd774ca960b5, 0x6f4b4199c5ecada9 }, - { 0x61af4912c8ef8a6a, 0xe58fa4fe43fb6e5e, 0xb5afcc5d6fd427cf, 0x6a5393281e1e11eb } + { -0x46b46ffdd40bbbfa, -0x542bdc81006f4acc, 0x7600a960faf86d3a, 0x2f45abdac2322ee3 }, + { -0x71d4ae8cfd162749, -0x1c1add96db78eb18, -0x42b04288b3569f4b, 0x6f4b4199c5ecada9 }, + { 0x61af4912c8ef8a6a, -0x1a705b01bc0491a2, -0x4a5033a2902bd831, 0x6a5393281e1e11eb } }, { { 0x0fff04fe149443cf, 0x53cac6d9865cddd7, 0x31385b03531ed1b7, 0x5846a27cacd1039d }, - { 0xf3da5139a5d1ee89, 0x8145457cff936988, 0x3f622fed00e188c4, 0x0f513815db8b5a3d }, + { -0x0c25aec65a2e1177, -0x7ebaba83006c9678, 0x3f622fed00e188c4, 0x0f513815db8b5a3d }, { 0x4ff5cdac1eb08717, 0x67e8b29590f2e9bc, 0x44093b5e237afa99, 0x0d414bed8708b8b2 } }, { - { 0x81886a92294ac9e8, 0x23162b45d55547be, 0x94cfbc4403715983, 0x50eb8fdb134bc401 }, - { 0xcfb68265fd0e75f6, 0xe45b3e28bb90e707, 0x7242a8de9ff92c7a, 0x685b3201933202dd }, - { 0xc0b73ec6d6b330cd, 0x84e44807132faff1, 0x732b7352c4a5dee1, 0x5d7c7cf1aa7cd2d2 } + { -0x7e77956dd6b53618, 0x23162b45d55547be, -0x6b3043bbfc8ea67d, 0x50eb8fdb134bc401 }, + { -0x30497d9a02f18a0a, -0x1ba4c1d7446f18f9, 0x7242a8de9ff92c7a, 0x685b3201933202dd }, + { -0x3f48c139294ccf33, -0x7b1bb7f8ecd0500f, 0x732b7352c4a5dee1, 0x5d7c7cf1aa7cd2d2 } }, { - { 0x33d1013e9b73a562, 0x925cef5748ec26e1, 0xa7fce614dd468058, 0x78b0fad41e9aa438 }, - { 0xaf3b46bf7a4aafa2, 0xb78705ec4d40d411, 0x114f0c6aca7c15e3, 0x3f364faaa9489d4d }, - { 0xbf56a431ed05b488, 0xa533e66c9c495c7e, 0xe8652baf87f3651a, 0x0241800059d66c33 } + { 0x33d1013e9b73a562, -0x6da310a8b713d91f, -0x580319eb22b97fa8, 0x78b0fad41e9aa438 }, + { -0x50c4b94085b5505e, -0x4878fa13b2bf2bef, 0x114f0c6aca7c15e3, 0x3f364faaa9489d4d }, + { -0x40a95bce12fa4b78, -0x5acc199363b6a382, -0x179ad450780c9ae6, 0x0241800059d66c33 } }, { - { 0x28350c7dcf38ea01, 0x7c6cdbc0b2917ab6, 0xace7cfbe857082f7, 0x4d2845aba2d9a1e0 }, - { 0xceb077fea37a5be4, 0xdb642f02e5a5eeb7, 0xc2e6d0c5471270b8, 0x4771b65538e4529c }, - { 0xbb537fe0447070de, 0xcba744436dd557df, 0xd3b5a3473600dbcb, 0x4aeabbe6f9ffd7f8 } + { 0x28350c7dcf38ea01, 0x7c6cdbc0b2917ab6, -0x531830417a8f7d09, 0x4d2845aba2d9a1e0 }, + { -0x314f88015c85a41c, -0x249bd0fd1a5a1149, -0x3d192f3ab8ed8f48, 0x4771b65538e4529c }, + { -0x44ac801fbb8f8f22, -0x3458bbbc922aa821, -0x2c4a5cb8c9ff2435, 0x4aeabbe6f9ffd7f8 } }, { - { 0x6a2134bcc4a9c8f2, 0xfbf8fd1c8ace2e37, 0x000ae3049911a0ba, 0x046e3a616bc89b9e }, - { 0x4630119e40d8f78c, 0xa01a9bc53c710e11, 0x486d2b258910dd79, 0x1e6c47b3db0324e5 }, + { 0x6a2134bcc4a9c8f2, -0x040702e37531d1c9, 0x000ae3049911a0ba, 0x046e3a616bc89b9e }, + { 0x4630119e40d8f78c, -0x5fe5643ac38ef1ef, 0x486d2b258910dd79, 0x1e6c47b3db0324e5 }, { 0x14e65442f03906be, 0x4a019d54e362be2a, 0x68ccdfec8dc230c7, 0x7cfb7e3faf6b861c } }, }, { { - { 0x96eebffb305b2f51, 0xd3f938ad889596b8, 0xf0f52dc746d5dd25, 0x57968290bb3a0095 }, - { 0x4637974e8c58aedc, 0xb9ef22fbabf041a4, 0xe185d956e980718a, 0x2f1b78fab143a8a6 }, - { 0xf71ab8430a20e101, 0xf393658d24f0ec47, 0xcf7509a86ee2eed1, 0x7dc43e35dc2aa3e1 } + { -0x69114004cfa4d0af, -0x2c06c752776a6948, -0x0f0ad238b92a22db, 0x57968290bb3a0095 }, + { 0x4637974e8c58aedc, -0x4610dd04540fbe5c, -0x1e7a26a9167f8e76, 0x2f1b78fab143a8a6 }, + { -0x08e547bcf5df1eff, -0x0c6c9a72db0f13b9, -0x308af657911d112f, 0x7dc43e35dc2aa3e1 } }, { { 0x5a782a5c273e9718, 0x3576c6995e4efd94, 0x0f2ed8051f237d3e, 0x044fb81d82d50a99 }, - { 0x85966665887dd9c3, 0xc90f9b314bb05355, 0xc6e08df8ef2079b1, 0x7ef72016758cc12f }, - { 0xc1df18c5a907e3d9, 0x57b3371dce4c6359, 0xca704534b201bb49, 0x7f79823f9c30dd2e } + { -0x7a69999a7782263d, -0x36f064ceb44facab, -0x391f720710df864f, 0x7ef72016758cc12f }, + { -0x3e20e73a56f81c27, 0x57b3371dce4c6359, -0x358fbacb4dfe44b7, 0x7f79823f9c30dd2e } }, { { 0x6a9c1ff068f587ba, 0x0827894e0050c8de, 0x3cbf99557ded5be7, 0x64a9b0431c06d6f0 }, - { 0x8334d239a3b513e8, 0xc13670d4b91fa8d8, 0x12b54136f590bd33, 0x0a4e0373d784d9b4 }, - { 0x2eb3d6a15b7d2919, 0xb0b4f6a0d53a8235, 0x7156ce4389a45d47, 0x071a7d0ace18346c } + { -0x7ccb2dc65c4aec18, -0x3ec98f2b46e05728, 0x12b54136f590bd33, 0x0a4e0373d784d9b4 }, + { 0x2eb3d6a15b7d2919, -0x4f4b095f2ac57dcb, 0x7156ce4389a45d47, 0x071a7d0ace18346c } }, { - { 0xcc0c355220e14431, 0x0d65950709b15141, 0x9af5621b209d5f36, 0x7c69bcf7617755d3 }, - { 0xd3072daac887ba0b, 0x01262905bfa562ee, 0xcf543002c0ef768b, 0x2c3bcc7146ea7e9c }, - { 0x07f0d7eb04e8295f, 0x10db18252f50f37d, 0xe951a9a3171798d7, 0x6f5a9a7322aca51d } + { -0x33f3caaddf1ebbcf, 0x0d65950709b15141, -0x650a9de4df62a0ca, 0x7c69bcf7617755d3 }, + { -0x2cf8d255377845f5, 0x01262905bfa562ee, -0x30abcffd3f108975, 0x2c3bcc7146ea7e9c }, + { 0x07f0d7eb04e8295f, 0x10db18252f50f37d, -0x16ae565ce8e86729, 0x6f5a9a7322aca51d } }, { - { 0xe729d4eba3d944be, 0x8d9e09408078af9e, 0x4525567a47869c03, 0x02ab9680ee8d3b24 }, - { 0x8ba1000c2f41c6c5, 0xc49f79c10cfefb9b, 0x4efa47703cc51c9f, 0x494e21a2e147afca }, - { 0xefa48a85dde50d9a, 0x219a224e0fb9a249, 0xfa091f1dd91ef6d9, 0x6b5d76cbea46bb34 } + { -0x18d62b145c26bb42, -0x7261f6bf7f875062, 0x4525567a47869c03, 0x02ab9680ee8d3b24 }, + { -0x745efff3d0be393b, -0x3b60863ef3010465, 0x4efa47703cc51c9f, 0x494e21a2e147afca }, + { -0x105b757a221af266, 0x219a224e0fb9a249, -0x05f6e0e226e10927, 0x6b5d76cbea46bb34 } }, { - { 0xe0f941171e782522, 0xf1e6ae74036936d3, 0x408b3ea2d0fcc746, 0x16fb869c03dd313e }, - { 0x8857556cec0cd994, 0x6472dc6f5cd01dba, 0xaf0169148f42b477, 0x0ae333f685277354 }, + { -0x1f06bee8e187dade, -0x0e19518bfc96c92d, 0x408b3ea2d0fcc746, 0x16fb869c03dd313e }, + { -0x77a8aa9313f3266c, 0x6472dc6f5cd01dba, -0x50fe96eb70bd4b89, 0x0ae333f685277354 }, { 0x288e199733b60962, 0x24fc72b4d8abe133, 0x4811f7ed0991d03e, 0x3f81e38b8f70d075 } }, { - { 0x0adb7f355f17c824, 0x74b923c3d74299a4, 0xd57c3e8bcbf8eaf7, 0x0ad3e2d34cdedc3d }, - { 0x7f910fcc7ed9affe, 0x545cb8a12465874b, 0xa8397ed24b0c4704, 0x50510fc104f50993 }, - { 0x6f0c0fc5336e249d, 0x745ede19c331cfd9, 0xf2d6fd0009eefe1c, 0x127c158bf0fa1ebe } + { 0x0adb7f355f17c824, 0x74b923c3d74299a4, -0x2a83c17434071509, 0x0ad3e2d34cdedc3d }, + { 0x7f910fcc7ed9affe, 0x545cb8a12465874b, -0x57c6812db4f3b8fc, 0x50510fc104f50993 }, + { 0x6f0c0fc5336e249d, 0x745ede19c331cfd9, -0x0d2902fff61101e4, 0x127c158bf0fa1ebe } }, { - { 0xdea28fc4ae51b974, 0x1d9973d3744dfe96, 0x6240680b873848a8, 0x4ed82479d167df95 }, - { 0xf6197c422e9879a2, 0xa44addd452ca3647, 0x9b413fc14b4eaccb, 0x354ef87d07ef4f68 }, - { 0xfee3b52260c5d975, 0x50352efceb41b0b8, 0x8808ac30a9f6653c, 0x302d92d20539236d } + { -0x215d703b51ae468c, 0x1d9973d3744dfe96, 0x6240680b873848a8, 0x4ed82479d167df95 }, + { -0x09e683bdd167865e, -0x5bb5222bad35c9b9, -0x64bec03eb4b15335, 0x354ef87d07ef4f68 }, + { -0x011c4add9f3a268b, 0x50352efceb41b0b8, -0x77f753cf56099ac4, 0x302d92d20539236d } }, }, { { - { 0x957b8b8b0df53c30, 0x2a1c770a8e60f098, 0xbbc7a670345796de, 0x22a48f9a90c99bc9 }, - { 0x4c59023fcb3efb7c, 0x6c2fcb99c63c2a94, 0xba4190e2c3c7e084, 0x0e545daea51874d9 }, + { -0x6a847474f20ac3d0, 0x2a1c770a8e60f098, -0x4438598fcba86922, 0x22a48f9a90c99bc9 }, + { 0x4c59023fcb3efb7c, 0x6c2fcb99c63c2a94, -0x45be6f1d3c381f7c, 0x0e545daea51874d9 }, { 0x6b7dc0dc8d3fac58, 0x5497cd6ce6e42bfd, 0x542f7d1bf400d305, 0x4159f47f048d9136 } }, { { 0x748515a8bbd24839, 0x77128347afb02b55, 0x50ba2ac649a2a17f, 0x060525513ad730f1 }, - { 0x20ad660839e31e32, 0xf81e1bd58405be50, 0xf8064056f4dabc69, 0x14d23dd4ce71b975 }, - { 0xf2398e098aa27f82, 0x6d7982bb89a1b024, 0xfa694084214dd24c, 0x71ab966fa32301c3 } + { 0x20ad660839e31e32, -0x07e1e42a7bfa41b0, -0x07f9bfa90b254397, 0x14d23dd4ce71b975 }, + { -0x0dc671f6755d807e, 0x6d7982bb89a1b024, -0x0596bf7bdeb22db4, 0x71ab966fa32301c3 } }, { - { 0xb1088a0702809955, 0x43b273ea0b43c391, 0xca9b67aefe0686ed, 0x605eecbf8335f4ed }, - { 0x2dcbd8e34ded02fc, 0x1151f3ec596f22aa, 0xbca255434e0328da, 0x35768fbe92411b22 }, - { 0x83200a656c340431, 0x9fcd71678ee59c2f, 0x75d4613f71300f8a, 0x7a912faf60f542f9 } + { -0x4ef775f8fd7f66ab, 0x43b273ea0b43c391, -0x3564985101f97913, 0x605eecbf8335f4ed }, + { 0x2dcbd8e34ded02fc, 0x1151f3ec596f22aa, -0x435daabcb1fcd726, 0x35768fbe92411b22 }, + { -0x7cdff59a93cbfbcf, -0x60328e98711a63d1, 0x75d4613f71300f8a, 0x7a912faf60f542f9 } }, { { 0x253f4f8dfa2d5597, 0x25e49c405477130c, 0x00c052e5996b1102, 0x33cb966e33bb6c4a }, - { 0xb204585e5edc1a43, 0x9f0e16ee5897c73c, 0x5b82c0ae4e70483c, 0x624a170e2bddf9be }, - { 0x597028047f116909, 0x828ac41c1e564467, 0x70417dbde6217387, 0x721627aefbac4384 } + { -0x4dfba7a1a123e5bd, -0x60f1e911a76838c4, 0x5b82c0ae4e70483c, 0x624a170e2bddf9be }, + { 0x597028047f116909, -0x7d753be3e1a9bb99, 0x70417dbde6217387, 0x721627aefbac4384 } }, { - { 0xfd3097bc410b2f22, 0xf1a05da7b5cfa844, 0x61289a1def57ca74, 0x245ea199bb821902 }, - { 0x97d03bc38736add5, 0x2f1422afc532b130, 0x3aa68a057101bbc4, 0x4c946cf7e74f9fa7 }, - { 0xaedca66978d477f8, 0x1898ba3c29117fe1, 0xcf73f983720cbd58, 0x67da12e6b8b56351 } + { -0x02cf6843bef4d0de, -0x0e5fa2584a3057bc, 0x61289a1def57ca74, 0x245ea199bb821902 }, + { -0x682fc43c78c9522b, 0x2f1422afc532b130, 0x3aa68a057101bbc4, 0x4c946cf7e74f9fa7 }, + { -0x51235996872b8808, 0x1898ba3c29117fe1, -0x308c067c8df342a8, 0x67da12e6b8b56351 } }, { - { 0x2b7ef3d38ec8308c, 0x828fd7ec71eb94ab, 0x807c3b36c5062abd, 0x0cb64cb831a94141 }, - { 0x7067e187b4bd6e07, 0x6e8f0203c7d1fe74, 0x93c6aa2f38c85a30, 0x76297d1f3d75a78a }, - { 0x3030fc33534c6378, 0xb9635c5ce541e861, 0x15d9a9bed9b2c728, 0x49233ea3f3775dcb } + { 0x2b7ef3d38ec8308c, -0x7d7028138e146b55, -0x7f83c4c93af9d543, 0x0cb64cb831a94141 }, + { 0x7067e187b4bd6e07, 0x6e8f0203c7d1fe74, -0x6c3955d0c737a5d0, 0x76297d1f3d75a78a }, + { 0x3030fc33534c6378, -0x469ca3a31abe179f, 0x15d9a9bed9b2c728, 0x49233ea3f3775dcb } }, { - { 0x7b3985fe1c9f249b, 0x4fd6b2d5a1233293, 0xceb345941adf4d62, 0x6987ff6f542de50c }, - { 0x629398fa8dbffc3a, 0xe12fe52dd54db455, 0xf3be11dfdaf25295, 0x628b140dce5e7b51 }, - { 0x47e241428f83753c, 0x6317bebc866af997, 0xdabb5b433d1a9829, 0x074d8d245287fb2d } + { 0x7b3985fe1c9f249b, 0x4fd6b2d5a1233293, -0x314cba6be520b29e, 0x6987ff6f542de50c }, + { 0x629398fa8dbffc3a, -0x1ed01ad22ab24bab, -0x0c41ee20250dad6b, 0x628b140dce5e7b51 }, + { 0x47e241428f83753c, 0x6317bebc866af997, -0x2544a4bcc2e567d7, 0x074d8d245287fb2d } }, { { 0x481875c6c0e31488, 0x219429b2e22034b4, 0x7223c98a31283b65, 0x3420d60b342277f9 }, - { 0x8337d9cd440bfc31, 0x729d2ca1af318fd7, 0xa040a4a4772c2070, 0x46002ef03a7349be }, - { 0xfaa23adeaffe65f7, 0x78261ed45be0764c, 0x441c0a1e2f164403, 0x5aea8e567a87d395 } + { -0x7cc82632bbf403cf, 0x729d2ca1af318fd7, -0x5fbf5b5b88d3df90, 0x46002ef03a7349be }, + { -0x055dc52150019a09, 0x78261ed45be0764c, 0x441c0a1e2f164403, 0x5aea8e567a87d395 } }, }, { { { 0x2dbc6fb6e4e0f177, 0x04e1bf29a4bd6a93, 0x5e1966d4787af6e8, 0x0edc5f5eb426d060 }, - { 0x7813c1a2bca4283d, 0xed62f091a1863dd9, 0xaec7bcb8c268fa86, 0x10e5d3b76f1cae4c }, - { 0x5453bfd653da8e67, 0xe9dc1eec24a9f641, 0xbf87263b03578a23, 0x45b46c51361cba72 } + { 0x7813c1a2bca4283d, -0x129d0f6e5e79c227, -0x513843473d97057a, 0x10e5d3b76f1cae4c }, + { 0x5453bfd653da8e67, -0x1623e113db5609bf, -0x4078d9c4fca875dd, 0x45b46c51361cba72 } }, { - { 0xce9d4ddd8a7fe3e4, 0xab13645676620e30, 0x4b594f7bb30e9958, 0x5c1c0aef321229df }, - { 0xa9402abf314f7fa1, 0xe257f1dc8e8cf450, 0x1dbbd54b23a8be84, 0x2177bfa36dcb713b }, + { -0x3162b22275801c1c, -0x54ec9ba9899df1d0, 0x4b594f7bb30e9958, 0x5c1c0aef321229df }, + { -0x56bfd540ceb0805f, -0x1da80e2371730bb0, 0x1dbbd54b23a8be84, 0x2177bfa36dcb713b }, { 0x37081bbcfa79db8f, 0x6048811ec25f59b3, 0x087a76659c832487, 0x4ae619387d8ab5bb } }, { - { 0x61117e44985bfb83, 0xfce0462a71963136, 0x83ac3448d425904b, 0x75685abe5ba43d64 }, - { 0x8ddbf6aa5344a32e, 0x7d88eab4b41b4078, 0x5eb0eb974a130d60, 0x1a00d91b17bf3e03 }, - { 0x6e960933eb61f2b2, 0x543d0fa8c9ff4952, 0xdf7275107af66569, 0x135529b623b0e6aa } + { 0x61117e44985bfb83, -0x031fb9d58e69ceca, -0x7c53cbb72bda6fb5, 0x75685abe5ba43d64 }, + { -0x72240955acbb5cd2, 0x7d88eab4b41b4078, 0x5eb0eb974a130d60, 0x1a00d91b17bf3e03 }, + { 0x6e960933eb61f2b2, 0x543d0fa8c9ff4952, -0x208d8aef85099a97, 0x135529b623b0e6aa } }, { - { 0xf5c716bce22e83fe, 0xb42beb19e80985c1, 0xec9da63714254aae, 0x5972ea051590a613 }, - { 0x18f0dbd7add1d518, 0x979f7888cfc11f11, 0x8732e1f07114759b, 0x79b5b81a65ca3a01 }, - { 0x0fd4ac20dc8f7811, 0x9a9ad294ac4d4fa8, 0xc01b2d64b3360434, 0x4f7e9c95905f3bdb } + { -0x0a38e9431dd17c02, -0x4bd414e617f67a3f, -0x136259c8ebdab552, 0x5972ea051590a613 }, + { 0x18f0dbd7add1d518, -0x68608777303ee0ef, -0x78cd1e0f8eeb8a65, 0x79b5b81a65ca3a01 }, + { 0x0fd4ac20dc8f7811, -0x65652d6b53b2b058, -0x3fe4d29b4cc9fbcc, 0x4f7e9c95905f3bdb } }, { - { 0x71c8443d355299fe, 0x8bcd3b1cdbebead7, 0x8092499ef1a49466, 0x1942eec4a144adc8 }, - { 0x62674bbc5781302e, 0xd8520f3989addc0f, 0x8c2999ae53fbd9c6, 0x31993ad92e638e4c }, + { 0x71c8443d355299fe, -0x7432c4e324141529, -0x7f6db6610e5b6b9a, 0x1942eec4a144adc8 }, + { 0x62674bbc5781302e, -0x27adf0c6765223f1, -0x73d66651ac04263a, 0x31993ad92e638e4c }, { 0x7dac5319ae234992, 0x2c1b3d910cea3e92, 0x553ce494253c1122, 0x2a0a65314ef9ca75 } }, { - { 0xcf361acd3c1c793a, 0x2f9ebcac5a35bc3b, 0x60e860e9a8cda6ab, 0x055dc39b6dea1a13 }, - { 0x2db7937ff7f927c2, 0xdb741f0617d0a635, 0x5982f3a21155af76, 0x4cf6e218647c2ded }, - { 0xb119227cc28d5bb6, 0x07e24ebc774dffab, 0xa83c78cee4a32c89, 0x121a307710aa24b6 } + { -0x30c9e532c3e386c6, 0x2f9ebcac5a35bc3b, 0x60e860e9a8cda6ab, 0x055dc39b6dea1a13 }, + { 0x2db7937ff7f927c2, -0x248be0f9e82f59cb, 0x5982f3a21155af76, 0x4cf6e218647c2ded }, + { -0x4ee6dd833d72a44a, 0x07e24ebc774dffab, -0x57c387311b5cd377, 0x121a307710aa24b6 } }, { - { 0xd659713ec77483c9, 0x88bfe077b82b96af, 0x289e28231097bcd3, 0x527bb94a6ced3a9b }, - { 0xe4db5d5e9f034a97, 0xe153fc093034bc2d, 0x460546919551d3b1, 0x333fc76c7a40e52d }, + { -0x29a68ec1388b7c37, -0x77401f8847d46951, 0x289e28231097bcd3, 0x527bb94a6ced3a9b }, + { -0x1b24a2a160fcb569, -0x1eac03f6cfcb43d3, 0x460546919551d3b1, 0x333fc76c7a40e52d }, { 0x563d992a995b482e, 0x3405d07c6e383801, 0x485035de2f64d8e5, 0x6b89069b20a7a9f7 } }, { { 0x4082fa8cb5c7db77, 0x068686f8c734c155, 0x29e6c8d9f6e7a57e, 0x0473d308a7639bcf }, - { 0x812aa0416270220d, 0x995a89faf9245b4e, 0xffadc4ce5072ef05, 0x23bc2103aa73eb73 }, - { 0xcaee792603589e05, 0x2b4b421246dcc492, 0x02a1ef74e601a94f, 0x102f73bfde04341a } + { -0x7ed55fbe9d8fddf3, -0x66a5760506dba4b2, -0x00523b31af8d10fb, 0x23bc2103aa73eb73 }, + { -0x351186d9fca761fb, 0x2b4b421246dcc492, 0x02a1ef74e601a94f, 0x102f73bfde04341a } }, }, { { - { 0x358ecba293a36247, 0xaf8f9862b268fd65, 0x412f7e9968a01c89, 0x5786f312cd754524 }, - { 0xb5a2d50c7ec20d3e, 0xc64bdd6ea0c97263, 0x56e89052c1ff734d, 0x4929c6f72b2ffaba }, - { 0x337788ffca14032c, 0xf3921028447f1ee3, 0x8b14071f231bccad, 0x4c817b4bf2344783 } + { 0x358ecba293a36247, -0x5070679d4d97029b, 0x412f7e9968a01c89, 0x5786f312cd754524 }, + { -0x4a5d2af3813df2c2, -0x39b422915f368d9d, 0x56e89052c1ff734d, 0x4929c6f72b2ffaba }, + { 0x337788ffca14032c, -0x0c6defd7bb80e11d, -0x74ebf8e0dce43353, 0x4c817b4bf2344783 } }, { - { 0x413ba057a40b4484, 0xba4c2e1a4f5f6a43, 0x614ba0a5aee1d61c, 0x78a1531a8b05dc53 }, - { 0x0ff853852871b96e, 0xe13e9fab60c3f1bb, 0xeefd595325344402, 0x0a37c37075b7744b }, - { 0x6cbdf1703ad0562b, 0x8ecf4830c92521a3, 0xdaebd303fd8424e7, 0x72ad82a42e5ec56f } + { 0x413ba057a40b4484, -0x45b3d1e5b0a095bd, 0x614ba0a5aee1d61c, 0x78a1531a8b05dc53 }, + { 0x0ff853852871b96e, -0x1ec160549f3c0e45, -0x1102a6acdacbbbfe, 0x0a37c37075b7744b }, + { 0x6cbdf1703ad0562b, -0x7130b7cf36dade5d, -0x25142cfc027bdb19, 0x72ad82a42e5ec56f } }, { - { 0xc368939167024bc3, 0x8e69d16d49502fda, 0xfcf2ec3ce45f4b29, 0x065f669ea3b4cbc4 }, + { -0x3c976c6e98fdb43d, -0x71962e92b6afd026, -0x030d13c31ba0b4d7, 0x065f669ea3b4cbc4 }, { 0x3f9e8e35bafb65f6, 0x39d69ec8f27293a1, 0x6cb8cd958cf6a3d0, 0x1734778173adae6d }, - { 0x8a00aec75532db4d, 0xb869a4e443e31bb1, 0x4a0f8552d3a7f515, 0x19adeb7c303d7c08 } + { -0x75ff5138aacd24b3, -0x47965b1bbc1ce44f, 0x4a0f8552d3a7f515, 0x19adeb7c303d7c08 } }, { - { 0x9d05ba7d43c31794, 0x2470c8ff93322526, 0x8323dec816197438, 0x2852709881569b53 }, - { 0xc720cb6153ead9a3, 0x55b2c97f512b636e, 0xb1e35b5fd40290b1, 0x2fd9ccf13b530ee2 }, - { 0x07bd475b47f796b8, 0xd2c7b013542c8f54, 0x2dbd23f43b24f87e, 0x6551afd77b0901d6 } + { -0x62fa4582bc3ce86c, 0x2470c8ff93322526, -0x7cdc2137e9e68bc8, 0x2852709881569b53 }, + { -0x38df349eac15265d, 0x55b2c97f512b636e, -0x4e1ca4a02bfd6f4f, 0x2fd9ccf13b530ee2 }, + { 0x07bd475b47f796b8, -0x2d384fecabd370ac, 0x2dbd23f43b24f87e, 0x6551afd77b0901d6 } }, { - { 0x68a24ce3a1d5c9ac, 0xbb77a33d10ff6461, 0x0f86ce4425d3166e, 0x56507c0950b9623b }, - { 0x4546baaf54aac27f, 0xf6f66fecb2a45a28, 0x582d1b5b562bcfe8, 0x44b123f3920f785f }, + { 0x68a24ce3a1d5c9ac, -0x44885cc2ef009b9f, 0x0f86ce4425d3166e, 0x56507c0950b9623b }, + { 0x4546baaf54aac27f, -0x090990134d5ba5d8, 0x582d1b5b562bcfe8, 0x44b123f3920f785f }, { 0x1206f0b7d1713e63, 0x353fe3d915bafc74, 0x194ceb970ad9d94d, 0x62fadd7cf9d03ad3 } }, { - { 0x3cd7bc61e7ce4594, 0xcd6b35a9b7dd267e, 0xa080abc84366ef27, 0x6ec7c46f59c79711 }, - { 0xc6b5967b5598a074, 0x5efe91ce8e493e25, 0xd4b72c4549280888, 0x20ef1149a26740c2 }, - { 0x2f07ad636f09a8a2, 0x8697e6ce24205e7d, 0xc0aefc05ee35a139, 0x15e80958b5f9d897 } + { 0x3cd7bc61e7ce4594, -0x3294ca564822d982, -0x5f7f5437bc9910d9, 0x6ec7c46f59c79711 }, + { -0x394a6984aa675f8c, 0x5efe91ce8e493e25, -0x2b48d3bab6d7f778, 0x20ef1149a26740c2 }, + { 0x2f07ad636f09a8a2, -0x79681931dbdfa183, -0x3f5103fa11ca5ec7, 0x15e80958b5f9d897 } }, { { 0x4dd1ed355bb061c4, 0x42dc0cef941c0700, 0x61305dc1fd86340e, 0x56b2cc930e55a443 }, - { 0x25a5ef7d0c3e235b, 0x6c39c17fbe134ee7, 0xc774e1342dc5c327, 0x021354b892021f39 }, - { 0x1df79da6a6bfc5a2, 0x02f3a2749fde4369, 0xb323d9f2cda390a7, 0x7be0847b8774d363 } + { 0x25a5ef7d0c3e235b, 0x6c39c17fbe134ee7, -0x388b1ecbd23a3cd9, 0x021354b892021f39 }, + { 0x1df79da6a6bfc5a2, 0x02f3a2749fde4369, -0x4cdc260d325c6f59, 0x7be0847b8774d363 } }, { - { 0x1466f5af5307fa11, 0x817fcc7ded6c0af2, 0x0a6de44ec3a4a3fb, 0x74071475bc927d0b }, - { 0x8c99cc5a8b3f55c3, 0x0611d7253fded2a0, 0xed2995ff36b70a36, 0x1f699a54d78a2619 }, - { 0xe77292f373e7ea8a, 0x296537d2cb045a31, 0x1bd0653ed3274fde, 0x2f9a2c4476bd2966 } + { 0x1466f5af5307fa11, -0x7e8033821293f50e, 0x0a6de44ec3a4a3fb, 0x74071475bc927d0b }, + { -0x736633a574c0aa3d, 0x0611d7253fded2a0, -0x12d66a00c948f5ca, 0x1f699a54d78a2619 }, + { -0x188d6d0c8c181576, 0x296537d2cb045a31, 0x1bd0653ed3274fde, 0x2f9a2c4476bd2966 } }, }, { { - { 0xa2b4dae0b5511c9a, 0x7ac860292bffff06, 0x981f375df5504234, 0x3f6bd725da4ea12d }, - { 0xeb18b9ab7f5745c6, 0x023a8aee5787c690, 0xb72712da2df7afa9, 0x36597d25ea5c013d }, - { 0x734d8d7b106058ac, 0xd940579e6fc6905f, 0x6466f8f99202932d, 0x7b7ecc19da60d6d0 } + { -0x5d4b251f4aaee366, 0x7ac860292bffff06, -0x67e0c8a20aafbdcc, 0x3f6bd725da4ea12d }, + { -0x14e7465480a8ba3a, 0x023a8aee5787c690, -0x48d8ed25d2085057, 0x36597d25ea5c013d }, + { 0x734d8d7b106058ac, -0x26bfa86190396fa1, 0x6466f8f99202932d, 0x7b7ecc19da60d6d0 } }, { - { 0x6dae4a51a77cfa9b, 0x82263654e7a38650, 0x09bbffcd8f2d82db, 0x03bedc661bf5caba }, - { 0x78c2373c695c690d, 0xdd252e660642906e, 0x951d44444ae12bd2, 0x4235ad7601743956 }, - { 0x6258cb0d078975f5, 0x492942549189f298, 0xa0cab423e2e36ee4, 0x0e7ce2b0cdf066a1 } + { 0x6dae4a51a77cfa9b, -0x7dd9c9ab185c79b0, 0x09bbffcd8f2d82db, 0x03bedc661bf5caba }, + { 0x78c2373c695c690d, -0x22dad199f9bd6f92, -0x6ae2bbbbb51ed42e, 0x4235ad7601743956 }, + { 0x6258cb0d078975f5, 0x492942549189f298, -0x5f354bdc1d1c911c, 0x0e7ce2b0cdf066a1 } }, { - { 0xfea6fedfd94b70f9, 0xf130c051c1fcba2d, 0x4882d47e7f2fab89, 0x615256138aeceeb5 }, - { 0xc494643ac48c85a3, 0xfd361df43c6139ad, 0x09db17dd3ae94d48, 0x666e0a5d8fb4674a }, - { 0x2abbf64e4870cb0d, 0xcd65bcf0aa458b6b, 0x9abe4eba75e8985d, 0x7f0bc810d514dee4 } + { -0x0159012026b48f07, -0x0ecf3fae3e0345d3, 0x4882d47e7f2fab89, 0x615256138aeceeb5 }, + { -0x3b6b9bc53b737a5d, -0x02c9e20bc39ec653, 0x09db17dd3ae94d48, 0x666e0a5d8fb4674a }, + { 0x2abbf64e4870cb0d, -0x329a430f55ba7495, -0x6541b1458a1767a3, 0x7f0bc810d514dee4 } }, { - { 0x83ac9dad737213a0, 0x9ff6f8ba2ef72e98, 0x311e2edd43ec6957, 0x1d3a907ddec5ab75 }, - { 0xb9006ba426f4136f, 0x8d67369e57e03035, 0xcbc8dfd94f463c28, 0x0d1f8dbcf8eedbf5 }, - { 0xba1693313ed081dc, 0x29329fad851b3480, 0x0128013c030321cb, 0x00011b44a31bfde3 } + { -0x7c5362528c8dec60, -0x60090745d108d168, 0x311e2edd43ec6957, 0x1d3a907ddec5ab75 }, + { -0x46ff945bd90bec91, -0x7298c961a81fcfcb, -0x34372026b0b9c3d8, 0x0d1f8dbcf8eedbf5 }, + { -0x45e96ccec12f7e24, 0x29329fad851b3480, 0x0128013c030321cb, 0x00011b44a31bfde3 } }, { - { 0x16561f696a0aa75c, 0xc1bf725c5852bd6a, 0x11a8dd7f9a7966ad, 0x63d988a2d2851026 }, + { 0x16561f696a0aa75c, -0x3e408da3a7ad4296, 0x11a8dd7f9a7966ad, 0x63d988a2d2851026 }, { 0x3fdfa06c3fc66c0c, 0x5d40e38e4dd60dd2, 0x7ae38b38268e4d71, 0x3ac48d916e8357e1 }, - { 0x00120753afbd232e, 0xe92bceb8fdd8f683, 0xf81669b384e72b91, 0x33fad52b2368a066 } + { 0x00120753afbd232e, -0x16d431470227097d, -0x07e9964c7b18d46f, 0x33fad52b2368a066 } }, { - { 0x8d2cc8d0c422cfe8, 0x072b4f7b05a13acb, 0xa3feb6e6ecf6a56f, 0x3cc355ccb90a71e2 }, - { 0x540649c6c5e41e16, 0x0af86430333f7735, 0xb2acfcd2f305e746, 0x16c0f429a256dca7 }, - { 0xe9b69443903e9131, 0xb8a494cb7a5637ce, 0xc87cd1a4baba9244, 0x631eaf426bae7568 } + { -0x72d3372f3bdd3018, 0x072b4f7b05a13acb, -0x5c01491913095a91, 0x3cc355ccb90a71e2 }, + { 0x540649c6c5e41e16, 0x0af86430333f7735, -0x4d53032d0cfa18ba, 0x16c0f429a256dca7 }, + { -0x16496bbc6fc16ecf, -0x475b6b3485a9c832, -0x37832e5b45456dbc, 0x631eaf426bae7568 } }, { { 0x47d975b9a3700de8, 0x7280c5fbe2f80552, 0x53658f2732e45de1, 0x431f2c7f665f80b5 }, - { 0xb3e90410da66fe9f, 0x85dd4b526c16e5a6, 0xbc3d97611ef9bf83, 0x5599648b1ea919b5 }, - { 0xd6026344858f7b19, 0x14ab352fa1ea514a, 0x8900441a2090a9d7, 0x7b04715f91253b26 } + { -0x4c16fbef25990161, -0x7a22b4ad93e91a5a, -0x43c2689ee106407d, 0x5599648b1ea919b5 }, + { -0x29fd9cbb7a7084e7, 0x14ab352fa1ea514a, -0x76ffbbe5df6f5629, 0x7b04715f91253b26 } }, { - { 0xb376c280c4e6bac6, 0x970ed3dd6d1d9b0b, 0xb09a9558450bf944, 0x48d0acfa57cde223 }, - { 0x83edbd28acf6ae43, 0x86357c8b7d5c7ab4, 0xc0404769b7eb2c44, 0x59b37bf5c2f6583f }, - { 0xb60f26e47dabe671, 0xf1d1a197622f3a37, 0x4208ce7ee9960394, 0x16234191336d3bdb } + { -0x4c893d7f3b19453a, -0x68f12c2292e264f5, -0x4f656aa7baf406bc, 0x48d0acfa57cde223 }, + { -0x7c1242d7530951bd, -0x79ca837482a3854c, -0x3fbfb8964814d3bc, 0x59b37bf5c2f6583f }, + { -0x49f0d91b8254198f, -0x0e2e5e689dd0c5c9, 0x4208ce7ee9960394, 0x16234191336d3bdb } }, }, { { - { 0x852dd1fd3d578bbe, 0x2b65ce72c3286108, 0x658c07f4eace2273, 0x0933f804ec38ab40 }, - { 0xf19aeac733a63aef, 0x2c7fba5d4442454e, 0x5da87aa04795e441, 0x413051e1a4e0b0f5 }, - { 0xa7ab69798d496476, 0x8121aadefcb5abc8, 0xa5dc12ef7b539472, 0x07fd47065e45351a } + { -0x7ad22e02c2a87442, 0x2b65ce72c3286108, 0x658c07f4eace2273, 0x0933f804ec38ab40 }, + { -0x0e651538cc59c511, 0x2c7fba5d4442454e, 0x5da87aa04795e441, 0x413051e1a4e0b0f5 }, + { -0x5854968672b69b8a, -0x7ede5521034a5438, -0x5a23ed1084ac6b8e, 0x07fd47065e45351a } }, { - { 0x304211559ae8e7c3, 0xf281b229944882a5, 0x8a13ac2e378250e4, 0x014afa0954ba48f4 }, - { 0xc8583c3d258d2bcd, 0x17029a4daf60b73f, 0xfa0fc9d6416a3781, 0x1c1e5fba38b3fb23 }, - { 0xcb3197001bb3666c, 0x330060524bffecb9, 0x293711991a88233c, 0x291884363d4ed364 } + { 0x304211559ae8e7c3, -0x0d7e4dd66bb77d5b, -0x75ec53d1c87daf1c, 0x014afa0954ba48f4 }, + { -0x37a7c3c2da72d433, 0x17029a4daf60b73f, -0x05f03629be95c87f, 0x1c1e5fba38b3fb23 }, + { -0x34ce68ffe44c9994, 0x330060524bffecb9, 0x293711991a88233c, 0x291884363d4ed364 } }, { - { 0xfb9d37c3bc1ab6eb, 0x02be14534d57a240, 0xf4d73415f8a5e1f6, 0x5964f4300ccc8188 }, + { -0x0462c83c43e54915, 0x02be14534d57a240, -0x0b28cbea075a1e0a, 0x5964f4300ccc8188 }, { 0x033c6805dc4babfa, 0x2c15bf5e5596ecc1, 0x1bc70624b59b1d3b, 0x3ede9850a19f0ec5 }, - { 0xe44a23152d096800, 0x5c08c55970866996, 0xdf2db60a46affb6e, 0x579155c1f856fd89 } + { -0x1bb5dcead2f69800, 0x5c08c55970866996, -0x20d249f5b9500492, 0x579155c1f856fd89 } }, { - { 0xb5f16b630817e7a6, 0x808c69233c351026, 0x324a983b54cef201, 0x53c092084a485345 }, - { 0x96324edd12e0c9ef, 0x468b878df2420297, 0x199a3776a4f573be, 0x1e7fbcf18e91e92a }, - { 0xd2d41481f1cbafbf, 0x231d2db6716174e5, 0x0b7d7656e2a55c98, 0x3e955cd82aa495f6 } + { -0x4a0e949cf7e8185a, -0x7f7396dcc3caefda, 0x324a983b54cef201, 0x53c092084a485345 }, + { -0x69cdb122ed1f3611, 0x468b878df2420297, 0x199a3776a4f573be, 0x1e7fbcf18e91e92a }, + { -0x2d2beb7e0e345041, 0x231d2db6716174e5, 0x0b7d7656e2a55c98, 0x3e955cd82aa495f6 } }, { - { 0xab39f3ef61bb3a3f, 0x8eb400652eb9193e, 0xb5de6ecc38c11f74, 0x654d7e9626f3c49f }, - { 0xe48f535e3ed15433, 0xd075692a0d7270a3, 0x40fbd21daade6387, 0x14264887cf4495f5 }, - { 0xe564cfdd5c7d2ceb, 0x82eeafded737ccb9, 0x6107db62d1f9b0ab, 0x0b6baac3b4358dbb } + { -0x54c60c109e44c5c1, -0x714bff9ad146e6c2, -0x4a219133c73ee08c, 0x654d7e9626f3c49f }, + { -0x1b70aca1c12eabcd, -0x2f8a96d5f28d8f5d, 0x40fbd21daade6387, 0x14264887cf4495f5 }, + { -0x1a9b3022a382d315, -0x7d11502128c83347, 0x6107db62d1f9b0ab, 0x0b6baac3b4358dbb } }, { - { 0x204abad63700a93b, 0xbe0023d3da779373, 0xd85f0346633ab709, 0x00496dc490820412 }, + { 0x204abad63700a93b, -0x41ffdc2c25886c8d, -0x27a0fcb99cc548f7, 0x00496dc490820412 }, { 0x7ae62bcb8622fe98, 0x47762256ceb891af, 0x1a5a92bcf2e406b4, 0x7d29401784e41501 }, - { 0x1c74b88dc27e6360, 0x074854268d14850c, 0xa145fb7b3e0dcb30, 0x10843f1b43803b23 } + { 0x1c74b88dc27e6360, 0x074854268d14850c, -0x5eba0484c1f234d0, 0x10843f1b43803b23 } }, { - { 0xd56f672de324689b, 0xd1da8aedb394a981, 0xdd7b58fe9168cfed, 0x7ce246cd4d56c1e8 }, - { 0xc5f90455376276dd, 0xce59158dd7645cd9, 0x92f65d511d366b39, 0x11574b6e526996c4 }, - { 0xb8f4308e7f80be53, 0x5f3cb8cb34a9d397, 0x18a961bd33cc2b2c, 0x710045fb3a9af671 } + { -0x2a9098d21cdb9765, -0x2e2575124c6b567f, -0x2284a7016e973013, 0x7ce246cd4d56c1e8 }, + { -0x3a06fbaac89d8923, -0x31a6ea72289ba327, -0x6d09a2aee2c994c7, 0x11574b6e526996c4 }, + { -0x470bcf71807f41ad, 0x5f3cb8cb34a9d397, 0x18a961bd33cc2b2c, 0x710045fb3a9af671 } }, { - { 0xa03fc862059d699e, 0x2370cfa19a619e69, 0xc4fe3b122f823deb, 0x1d1b056fa7f0844e }, - { 0x73f93d36101b95eb, 0xfaef33794f6f4486, 0x5651735f8f15e562, 0x7fa3f19058b40da1 }, - { 0x1bc64631e56bf61f, 0xd379ab106e5382a3, 0x4d58c57e0540168d, 0x566256628442d8e4 } + { -0x5fc0379dfa629662, 0x2370cfa19a619e69, -0x3b01c4edd07dc215, 0x1d1b056fa7f0844e }, + { 0x73f93d36101b95eb, -0x0510cc86b090bb7a, 0x5651735f8f15e562, 0x7fa3f19058b40da1 }, + { 0x1bc64631e56bf61f, -0x2c8654ef91ac7d5d, 0x4d58c57e0540168d, 0x566256628442d8e4 } }, }, { { - { 0xdd499cd61ff38640, 0x29cd9bc3063625a0, 0x51e2d8023dd73dc3, 0x4a25707a203b9231 }, - { 0xb9e499def6267ff6, 0x7772ca7b742c0843, 0x23a0153fe9a4f2b1, 0x2cdfdfecd5d05006 }, + { -0x22b66329e00c79c0, 0x29cd9bc3063625a0, 0x51e2d8023dd73dc3, 0x4a25707a203b9231 }, + { -0x461b662109d9800a, 0x7772ca7b742c0843, 0x23a0153fe9a4f2b1, 0x2cdfdfecd5d05006 }, { 0x2ab7668a53f6ed6a, 0x304242581dd170a1, 0x4000144c3ae20161, 0x5721896d248e49fc } }, { { 0x285d5091a1d0da4e, 0x4baa6fa7b5fe3e08, 0x63e5177ce19393b3, 0x03c935afc4b030fd }, - { 0x0b6e5517fd181bae, 0x9022629f2bb963b4, 0x5509bce932064625, 0x578edd74f63c13da }, - { 0x997276c6492b0c3d, 0x47ccc2c4dfe205fc, 0xdcd29b84dd623a3c, 0x3ec2ab590288c7a2 } + { 0x0b6e5517fd181bae, -0x6fdd9d60d4469c4c, 0x5509bce932064625, 0x578edd74f63c13da }, + { -0x668d8939b6d4f3c3, 0x47ccc2c4dfe205fc, -0x232d647b229dc5c4, 0x3ec2ab590288c7a2 } }, { - { 0xa7213a09ae32d1cb, 0x0f2b87df40f5c2d5, 0x0baea4c6e81eab29, 0x0e1bf66c6adbac5e }, - { 0xa1a0d27be4d87bb9, 0xa98b4deb61391aed, 0x99a0ddd073cb9b83, 0x2dd5c25a200fcace }, - { 0xe2abd5e9792c887e, 0x1a020018cb926d5d, 0xbfba69cdbaae5f1e, 0x730548b35ae88f5f } + { -0x58dec5f651cd2e35, 0x0f2b87df40f5c2d5, 0x0baea4c6e81eab29, 0x0e1bf66c6adbac5e }, + { -0x5e5f2d841b278447, -0x5674b2149ec6e513, -0x665f222f8c34647d, 0x2dd5c25a200fcace }, + { -0x1d542a1686d37782, 0x1a020018cb926d5d, -0x404596324551a0e2, 0x730548b35ae88f5f } }, { - { 0x805b094ba1d6e334, 0xbf3ef17709353f19, 0x423f06cb0622702b, 0x585a2277d87845dd }, - { 0xc43551a3cba8b8ee, 0x65a26f1db2115f16, 0x760f4f52ab8c3850, 0x3043443b411db8ca }, - { 0xa18a5f8233d48962, 0x6698c4b5ec78257f, 0xa78e6fa5373e41ff, 0x7656278950ef981f } + { -0x7fa4f6b45e291ccc, -0x40c10e88f6cac0e7, 0x423f06cb0622702b, 0x585a2277d87845dd }, + { -0x3bcaae5c34574712, 0x65a26f1db2115f16, 0x760f4f52ab8c3850, 0x3043443b411db8ca }, + { -0x5e75a07dcc2b769e, 0x6698c4b5ec78257f, -0x5871905ac8c1be01, 0x7656278950ef981f } }, { - { 0xe17073a3ea86cf9d, 0x3a8cfbb707155fdc, 0x4853e7fc31838a8e, 0x28bbf484b613f616 }, - { 0x38c3cf59d51fc8c0, 0x9bedd2fd0506b6f2, 0x26bf109fab570e8f, 0x3f4160a8c1b846a6 }, - { 0xf2612f5c6f136c7c, 0xafead107f6dd11be, 0x527e9ad213de6f33, 0x1e79cb358188f75d } + { -0x1e8f8c5c15793063, 0x3a8cfbb707155fdc, 0x4853e7fc31838a8e, 0x28bbf484b613f616 }, + { 0x38c3cf59d51fc8c0, -0x64122d02faf9490e, 0x26bf109fab570e8f, 0x3f4160a8c1b846a6 }, + { -0x0d9ed0a390ec9384, -0x50152ef80922ee42, 0x527e9ad213de6f33, 0x1e79cb358188f75d } }, { - { 0x77e953d8f5e08181, 0x84a50c44299dded9, 0xdc6c2d0c864525e5, 0x478ab52d39d1f2f4 }, - { 0x013436c3eef7e3f1, 0x828b6a7ffe9e10f8, 0x7ff908e5bcf9defc, 0x65d7951b3a3b3831 }, - { 0x66a6a4d39252d159, 0xe5dde1bc871ac807, 0xb82c6b40a6c1c96f, 0x16d87a411a212214 } + { 0x77e953d8f5e08181, -0x7b5af3bbd6622127, -0x2393d2f379bada1b, 0x478ab52d39d1f2f4 }, + { 0x013436c3eef7e3f1, -0x7d7495800161ef08, 0x7ff908e5bcf9defc, 0x65d7951b3a3b3831 }, + { 0x66a6a4d39252d159, -0x1a221e4378e537f9, -0x47d394bf593e3691, 0x16d87a411a212214 } }, { - { 0xfba4d5e2d54e0583, 0xe21fafd72ebd99fa, 0x497ac2736ee9778f, 0x1f990b577a5a6dde }, - { 0xb3bd7e5a42066215, 0x879be3cd0c5a24c1, 0x57c05db1d6f994b7, 0x28f87c8165f38ca6 }, - { 0xa3344ead1be8f7d6, 0x7d1e50ebacea798f, 0x77c6569e520de052, 0x45882fe1534d6d3e } + { -0x045b2a1d2ab1fa7d, -0x1de05028d1426606, 0x497ac2736ee9778f, 0x1f990b577a5a6dde }, + { -0x4c4281a5bdf99deb, -0x78641c32f3a5db3f, 0x57c05db1d6f994b7, 0x28f87c8165f38ca6 }, + { -0x5ccbb152e417082a, 0x7d1e50ebacea798f, 0x77c6569e520de052, 0x45882fe1534d6d3e } }, { - { 0xd8ac9929943c6fe4, 0xb5f9f161a38392a2, 0x2699db13bec89af3, 0x7dcf843ce405f074 }, + { -0x275366d66bc3901c, -0x4a060e9e5c7c6d5e, 0x2699db13bec89af3, 0x7dcf843ce405f074 }, { 0x6669345d757983d6, 0x62b6ed1117aa11a6, 0x7ddd1857985e128f, 0x688fe5b8f626f6dd }, - { 0x6c90d6484a4732c0, 0xd52143fdca563299, 0xb3be28c3915dc6e1, 0x6739687e7327191b } + { 0x6c90d6484a4732c0, -0x2adebc0235a9cd67, -0x4c41d73c6ea2391f, 0x6739687e7327191b } }, }, { { - { 0x8ce5aad0c9cb971f, 0x1156aaa99fd54a29, 0x41f7247015af9b78, 0x1fe8cca8420f49aa }, - { 0x9f65c5ea200814cf, 0x840536e169a31740, 0x8b0ed13925c8b4ad, 0x0080dbafe936361d }, + { -0x731a552f363468e1, 0x1156aaa99fd54a29, 0x41f7247015af9b78, 0x1fe8cca8420f49aa }, + { -0x609a3a15dff7eb31, -0x7bfac91e965ce8c0, -0x74f12ec6da374b53, 0x0080dbafe936361d }, { 0x72a1848f3c0cc82a, 0x38c560c2877c9e54, 0x5004e228ce554140, 0x042418a103429d71 } }, { - { 0x58e84c6f20816247, 0x8db2b2b6e36fd793, 0x977182561d484d85, 0x0822024f8632abd7 }, - { 0x899dea51abf3ff5f, 0x9b93a8672fc2d8ba, 0x2c38cb97be6ebd5c, 0x114d578497263b5d }, - { 0xb301bb7c6b1beca3, 0x55393f6dc6eb1375, 0x910d281097b6e4eb, 0x1ad4548d9d479ea3 } + { 0x58e84c6f20816247, -0x724d4d491c90286d, -0x688e7da9e2b7b27b, 0x0822024f8632abd7 }, + { -0x766215ae540c00a1, -0x646c5798d03d2746, 0x2c38cb97be6ebd5c, 0x114d578497263b5d }, + { -0x4cfe448394e4135d, 0x55393f6dc6eb1375, -0x6ef2d7ef68491b15, 0x1ad4548d9d479ea3 } }, { - { 0xa06fe66d0fe9fed3, 0xa8733a401c587909, 0x30d14d800df98953, 0x41ce5876c7b30258 }, - { 0xcd5a7da0389a48fd, 0xb38fa4aa9a78371e, 0xc6d9761b2cdb8e6c, 0x35cf51dbc97e1443 }, - { 0x59ac3bc5d670c022, 0xeae67c109b119406, 0x9798bdf0b3782fda, 0x651e3201fd074092 } + { -0x5f901992f016012d, -0x578cc5bfe3a786f7, 0x30d14d800df98953, 0x41ce5876c7b30258 }, + { -0x32a5825fc765b703, -0x4c705b556587c8e2, -0x392689e4d3247194, 0x35cf51dbc97e1443 }, + { 0x59ac3bc5d670c022, -0x151983ef64ee6bfa, -0x6867420f4c87d026, 0x651e3201fd074092 } }, { - { 0xa57ba4a01efcae9e, 0x769f4beedc308a94, 0xd1f10eeb3603cb2e, 0x4099ce5e7e441278 }, - { 0xd63d8483ef30c5cf, 0x4cd4b4962361cc0c, 0xee90e500a48426ac, 0x0af51d7d18c14eeb }, - { 0x1ac98e4f8a5121e9, 0x7dae9544dbfa2fe0, 0x8320aa0dd6430df9, 0x667282652c4a2fb5 } + { -0x5a845b5fe1035162, 0x769f4beedc308a94, -0x2e0ef114c9fc34d2, 0x4099ce5e7e441278 }, + { -0x29c27b7c10cf3a31, 0x4cd4b4962361cc0c, -0x116f1aff5b7bd954, 0x0af51d7d18c14eeb }, + { 0x1ac98e4f8a5121e9, 0x7dae9544dbfa2fe0, -0x7cdf55f229bcf207, 0x667282652c4a2fb5 } }, { - { 0xada8b6e02946db23, 0x1c0ce51a7b253ab7, 0x8448c85a66dd485b, 0x7f1fc025d0675adf }, - { 0x874621f4d86bc9ab, 0xb54c7bbe56fe6fea, 0x077a24257fadc22c, 0x1ab53be419b90d39 }, - { 0xd8ee1b18319ea6aa, 0x004d88083a21f0da, 0x3bd6aa1d883a4f4b, 0x4db9a3a6dfd9fd14 } + { -0x5257491fd6b924dd, 0x1c0ce51a7b253ab7, -0x7bb737a59922b7a5, 0x7f1fc025d0675adf }, + { -0x78b9de0b27943655, -0x4ab38441a9019016, 0x077a24257fadc22c, 0x1ab53be419b90d39 }, + { -0x2711e4e7ce615956, 0x004d88083a21f0da, 0x3bd6aa1d883a4f4b, 0x4db9a3a6dfd9fd14 } }, { - { 0xd95b00bbcbb77c68, 0xddbc846a91f17849, 0x7cf700aebe28d9b3, 0x5ce1285c85d31f3e }, - { 0x8ce7b23bb99c0755, 0x35c5d6edc4f50f7a, 0x7e1e2ed2ed9b50c3, 0x36305f16e8934da1 }, - { 0x31b6972d98b0bde8, 0x7d920706aca6de5b, 0xe67310f8908a659f, 0x50fac2a6efdf0235 } + { -0x26a4ff4434488398, -0x22437b956e0e87b7, 0x7cf700aebe28d9b3, 0x5ce1285c85d31f3e }, + { -0x73184dc44663f8ab, 0x35c5d6edc4f50f7a, 0x7e1e2ed2ed9b50c3, 0x36305f16e8934da1 }, + { 0x31b6972d98b0bde8, 0x7d920706aca6de5b, -0x198cef076f759a61, 0x50fac2a6efdf0235 } }, { - { 0x295b1c86f6f449bc, 0x51b2e84a1f0ab4dd, 0xc001cb30aa8e551d, 0x6a28d35944f43662 }, - { 0xf3d3a9f35b880f5a, 0xedec050cdb03e7c2, 0xa896981ff9f0b1a2, 0x49a4ae2bac5e34a4 }, + { 0x295b1c86f6f449bc, 0x51b2e84a1f0ab4dd, -0x3ffe34cf5571aae3, 0x6a28d35944f43662 }, + { -0x0c2c560ca477f0a6, -0x1213faf324fc183e, -0x576967e0060f4e5e, 0x49a4ae2bac5e34a4 }, { 0x28bb12ee04a740e0, 0x14313bbd9bce8174, 0x72f5b5e4e8c10c40, 0x7cbfb19936adcd5b } }, { - { 0x8e793a7acc36e6e0, 0xf9fab7a37d586eed, 0x3a4f9692bae1f4e4, 0x1c14b03eff5f447e }, - { 0xa311ddc26b89792d, 0x1b30b4c6da512664, 0x0ca77b4ccf150859, 0x1de443df1b009408 }, + { -0x7186c58533c91920, -0x0605485c82a79113, 0x3a4f9692bae1f4e4, 0x1c14b03eff5f447e }, + { -0x5cee223d947686d3, 0x1b30b4c6da512664, 0x0ca77b4ccf150859, 0x1de443df1b009408 }, { 0x19647bd114a85291, 0x57b76cb21034d3af, 0x6329db440f9d6dfa, 0x5ef43e586a571493 } }, }, { { - { 0xa66dcc9dc80c1ac0, 0x97a05cf41b38a436, 0xa7ebf3be95dbd7c6, 0x7da0b8f68d7e7dab }, - { 0xef782014385675a6, 0xa2649f30aafda9e8, 0x4cd1eb505cdfa8cb, 0x46115aba1d4dc0b3 }, - { 0xd40f1953c3b5da76, 0x1dac6f7321119e9b, 0x03cc6021feb25960, 0x5a5f887e83674b4b } + { -0x5992336237f3e540, -0x685fa30be4c75bca, -0x58140c416a24283a, 0x7da0b8f68d7e7dab }, + { -0x1087dfebc7a98a5a, -0x5d9b60cf55025618, 0x4cd1eb505cdfa8cb, 0x46115aba1d4dc0b3 }, + { -0x2bf0e6ac3c4a258a, 0x1dac6f7321119e9b, 0x03cc6021feb25960, 0x5a5f887e83674b4b } }, { - { 0x9e9628d3a0a643b9, 0xb5c3cb00e6c32064, 0x9b5302897c2dec32, 0x43e37ae2d5d1c70c }, - { 0x8f6301cf70a13d11, 0xcfceb815350dd0c4, 0xf70297d4a4bca47e, 0x3669b656e44d1434 }, - { 0x387e3f06eda6e133, 0x67301d5199a13ac0, 0xbd5ad8f836263811, 0x6a21e6cd4fd5e9be } + { -0x6169d72c5f59bc47, -0x4a3c34ff193cdf9c, -0x64acfd7683d213ce, 0x43e37ae2d5d1c70c }, + { -0x709cfe308f5ec2ef, -0x303147eacaf22f3c, -0x08fd682b5b435b82, 0x3669b656e44d1434 }, + { 0x387e3f06eda6e133, 0x67301d5199a13ac0, -0x42a52707c9d9c7ef, 0x6a21e6cd4fd5e9be } }, { - { 0xef4129126699b2e3, 0x71d30847708d1301, 0x325432d01182b0bd, 0x45371b07001e8b36 }, - { 0xf1c6170a3046e65f, 0x58712a2a00d23524, 0x69dbbd3c8c82b755, 0x586bf9f1a195ff57 }, - { 0xa6db088d5ef8790b, 0x5278f0dc610937e5, 0xac0349d261a16eb8, 0x0eafb03790e52179 } + { -0x10bed6ed99664d1d, 0x71d30847708d1301, 0x325432d01182b0bd, 0x45371b07001e8b36 }, + { -0x0e39e8f5cfb919a1, 0x58712a2a00d23524, 0x69dbbd3c8c82b755, 0x586bf9f1a195ff57 }, + { -0x5924f772a10786f5, 0x5278f0dc610937e5, -0x53fcb62d9e5e9148, 0x0eafb03790e52179 } }, { - { 0x5140805e0f75ae1d, 0xec02fbe32662cc30, 0x2cebdf1eea92396d, 0x44ae3344c5435bb3 }, - { 0x960555c13748042f, 0x219a41e6820baa11, 0x1c81f73873486d0c, 0x309acc675a02c661 }, - { 0x9cf289b9bba543ee, 0xf3760e9d5ac97142, 0x1d82e5c64f9360aa, 0x62d5221b7f94678f } + { 0x5140805e0f75ae1d, -0x13fd041cd99d33d0, 0x2cebdf1eea92396d, 0x44ae3344c5435bb3 }, + { -0x69faaa3ec8b7fbd1, 0x219a41e6820baa11, 0x1c81f73873486d0c, 0x309acc675a02c661 }, + { -0x630d7646445abc12, -0x0c89f162a5368ebe, 0x1d82e5c64f9360aa, 0x62d5221b7f94678f } }, { - { 0x7585d4263af77a3c, 0xdfae7b11fee9144d, 0xa506708059f7193d, 0x14f29a5383922037 }, - { 0x524c299c18d0936d, 0xc86bb56c8a0c1a0c, 0xa375052edb4a8631, 0x5c0efde4bc754562 }, - { 0xdf717edc25b2d7f5, 0x21f970db99b53040, 0xda9234b7c3ed4c62, 0x5e72365c7bee093e } + { 0x7585d4263af77a3c, -0x205184ee0116ebb3, -0x5af98f7fa608e6c3, 0x14f29a5383922037 }, + { 0x524c299c18d0936d, -0x37944a9375f3e5f4, -0x5c8afad124b579cf, 0x5c0efde4bc754562 }, + { -0x208e8123da4d280b, 0x21f970db99b53040, -0x256dcb483c12b39e, 0x5e72365c7bee093e } }, { - { 0x7d9339062f08b33e, 0x5b9659e5df9f32be, 0xacff3dad1f9ebdfd, 0x70b20555cb7349b7 }, - { 0x575bfc074571217f, 0x3779675d0694d95b, 0x9a0a37bbf4191e33, 0x77f1104c47b4eabc }, - { 0xbe5113c555112c4c, 0x6688423a9a881fcd, 0x446677855e503b47, 0x0e34398f4a06404a } + { 0x7d9339062f08b33e, 0x5b9659e5df9f32be, -0x5300c252e0614203, 0x70b20555cb7349b7 }, + { 0x575bfc074571217f, 0x3779675d0694d95b, -0x65f5c8440be6e1cd, 0x77f1104c47b4eabc }, + { -0x41aeec3aaaeed3b4, 0x6688423a9a881fcd, 0x446677855e503b47, 0x0e34398f4a06404a } }, { - { 0x18930b093e4b1928, 0x7de3e10e73f3f640, 0xf43217da73395d6f, 0x6f8aded6ca379c3e }, - { 0xb67d22d93ecebde8, 0x09b3e84127822f07, 0x743fa61fb05b6d8d, 0x5e5405368a362372 }, - { 0xe340123dfdb7b29a, 0x487b97e1a21ab291, 0xf9967d02fde6949e, 0x780de72ec8d3de97 } + { 0x18930b093e4b1928, 0x7de3e10e73f3f640, -0x0bcde8258cc6a291, 0x6f8aded6ca379c3e }, + { -0x4982dd26c1314218, 0x09b3e84127822f07, 0x743fa61fb05b6d8d, 0x5e5405368a362372 }, + { -0x1cbfedc202484d66, 0x487b97e1a21ab291, -0x066982fd02196b62, 0x780de72ec8d3de97 } }, { - { 0x671feaf300f42772, 0x8f72eb2a2a8c41aa, 0x29a17fd797373292, 0x1defc6ad32b587a6 }, + { 0x671feaf300f42772, -0x708d14d5d573be56, 0x29a17fd797373292, 0x1defc6ad32b587a6 }, { 0x0ae28545089ae7bc, 0x388ddecf1c7f4d06, 0x38ac15510a4811b8, 0x0eb28bf671928ce4 }, - { 0xaf5bbe1aef5195a7, 0x148c1277917b15ed, 0x2991f7fb7ae5da2e, 0x467d201bf8dd2867 } + { -0x50a441e510ae6a59, 0x148c1277917b15ed, 0x2991f7fb7ae5da2e, 0x467d201bf8dd2867 } }, }, { { - { 0x745f9d56296bc318, 0x993580d4d8152e65, 0xb0e5b13f5839e9ce, 0x51fc2b28d43921c0 }, - { 0x7906ee72f7bd2e6b, 0x05d270d6109abf4e, 0x8d5cfe45b941a8a4, 0x44c218671c974287 }, + { 0x745f9d56296bc318, -0x66ca7f2b27ead19b, -0x4f1a4ec0a7c61632, 0x51fc2b28d43921c0 }, + { 0x7906ee72f7bd2e6b, 0x05d270d6109abf4e, -0x72a301ba46be575c, 0x44c218671c974287 }, { 0x1b8fd11795e2a98c, 0x1c4e5ee12b6b6291, 0x5b30e7107424b572, 0x6e6b9de84c4f4ac6 } }, { - { 0x6b7c5f10f80cb088, 0x736b54dc56e42151, 0xc2b620a5c6ef99c4, 0x5f4c802cc3a06f42 }, - { 0xdff25fce4b1de151, 0xd841c0c7e11c4025, 0x2554b3c854749c87, 0x2d292459908e0df9 }, - { 0x9b65c8f17d0752da, 0x881ce338c77ee800, 0xc3b514f05b62f9e3, 0x66ed5dd5bec10d48 } + { 0x6b7c5f10f80cb088, 0x736b54dc56e42151, -0x3d49df5a3910663c, 0x5f4c802cc3a06f42 }, + { -0x200da031b4e21eaf, -0x27be3f381ee3bfdb, 0x2554b3c854749c87, 0x2d292459908e0df9 }, + { -0x649a370e82f8ad26, -0x77e31cc738811800, -0x3c4aeb0fa49d061d, 0x66ed5dd5bec10d48 } }, { - { 0xf0adf3c9cbca047d, 0x81c3b2cbf4552f6b, 0xcfda112d44735f93, 0x1f23a0c77e20048c }, - { 0x7d38a1c20bb2089d, 0x808334e196ccd412, 0xc4a70b8c6c97d313, 0x2eacf8bc03007f20 }, - { 0xf235467be5bc1570, 0x03d2d9020dbab38c, 0x27529aa2fcf9e09e, 0x0840bef29d34bc50 } + { -0x0f520c363435fb83, -0x7e3c4d340baad095, -0x3025eed2bb8ca06d, 0x1f23a0c77e20048c }, + { 0x7d38a1c20bb2089d, -0x7f7ccb1e69332bee, -0x3b58f47393682ced, 0x2eacf8bc03007f20 }, + { -0x0dcab9841a43ea90, 0x03d2d9020dbab38c, 0x27529aa2fcf9e09e, 0x0840bef29d34bc50 } }, { - { 0xcd54e06b7f37e4eb, 0x8cc15f87f5e96cca, 0xb8248bb0d3597dce, 0x246affa06074400c }, + { -0x32ab1f9480c81b15, -0x733ea0780a169336, -0x47db744f2ca68232, 0x246affa06074400c }, { 0x796dfb35dc10b287, 0x27176bcd5c7ff29d, 0x7f3d43e8c7b24905, 0x0304f5a191c54276 }, - { 0x37d88e68fbe45321, 0x86097548c0d75032, 0x4e9b13ef894a0d35, 0x25a83cac5753d325 } + { 0x37d88e68fbe45321, -0x79f68ab73f28afce, 0x4e9b13ef894a0d35, 0x25a83cac5753d325 } }, { - { 0x9f0f66293952b6e2, 0x33db5e0e0934267b, 0xff45252bd609fedc, 0x06be10f5c506e0c9 }, + { -0x60f099d6c6ad491e, 0x33db5e0e0934267b, -0x00badad429f60124, 0x06be10f5c506e0c9 }, { 0x10222f48eed8165e, 0x623fc1234b8bcf3a, 0x1e145c09c221e8f0, 0x7ccfa59fca782630 }, - { 0x1a9615a9b62a345f, 0x22050c564a52fecc, 0xa7a2788528bc0dfe, 0x5e82770a1a1ee71d } + { 0x1a9615a9b62a345f, 0x22050c564a52fecc, -0x585d877ad743f202, 0x5e82770a1a1ee71d } }, { - { 0xe802e80a42339c74, 0x34175166a7fffae5, 0x34865d1f1c408cae, 0x2cca982c605bc5ee }, - { 0x35425183ad896a5c, 0xe8673afbe78d52f6, 0x2c66f25f92a35f64, 0x09d04f3b3b86b102 }, - { 0xfd2d5d35197dbe6e, 0x207c2eea8be4ffa3, 0x2613d8db325ae918, 0x7a325d1727741d3e } + { -0x17fd17f5bdcc638c, 0x34175166a7fffae5, 0x34865d1f1c408cae, 0x2cca982c605bc5ee }, + { 0x35425183ad896a5c, -0x1798c5041872ad0a, 0x2c66f25f92a35f64, 0x09d04f3b3b86b102 }, + { -0x02d2a2cae6824192, 0x207c2eea8be4ffa3, 0x2613d8db325ae918, 0x7a325d1727741d3e } }, { - { 0xecd27d017e2a076a, 0xd788689f1636495e, 0x52a61af0919233e5, 0x2a479df17bb1ae64 }, - { 0xd036b9bbd16dfde2, 0xa2055757c497a829, 0x8e6cc966a7f12667, 0x4d3b1a791239c180 }, - { 0x9e5eee8e33db2710, 0x189854ded6c43ca5, 0xa41c22c592718138, 0x27ad5538a43a5e9b } + { -0x132d82fe81d5f896, -0x28779760e9c9b6a2, 0x52a61af0919233e5, 0x2a479df17bb1ae64 }, + { -0x2fc946442e92021e, -0x5dfaa8a83b6857d7, -0x71933699580ed999, 0x4d3b1a791239c180 }, + { -0x61a11171cc24d8f0, 0x189854ded6c43ca5, -0x5be3dd3a6d8e7ec8, 0x27ad5538a43a5e9b } }, { - { 0xcb5a7d638e47077c, 0x8db7536120a1c059, 0x549e1e4d8bedfdcc, 0x080153b7503b179d }, - { 0x2746dd4b15350d61, 0xd03fcbc8ee9521b7, 0xe86e365a138672ca, 0x510e987f7e7d89e2 }, - { 0xdda69d930a3ed3e3, 0x3d386ef1cd60a722, 0xc817ad58bdaa4ee6, 0x23be8d554fe7372a } + { -0x34a5829c71b8f884, -0x7248ac9edf5e3fa7, 0x549e1e4d8bedfdcc, 0x080153b7503b179d }, + { 0x2746dd4b15350d61, -0x2fc03437116ade49, -0x1791c9a5ec798d36, 0x510e987f7e7d89e2 }, + { -0x2259626cf5c12c1d, 0x3d386ef1cd60a722, -0x37e852a74255b11a, 0x23be8d554fe7372a } }, }, { { - { 0xbc1ef4bd567ae7a9, 0x3f624cb2d64498bd, 0xe41064d22c1f4ec8, 0x2ef9c5a5ba384001 }, - { 0x95fe919a74ef4fad, 0x3a827becf6a308a2, 0x964e01d309a47b01, 0x71c43c4f5ba3c797 }, - { 0xb6fd6df6fa9e74cd, 0xf18278bce4af267a, 0x8255b3d0f1ef990e, 0x5a758ca390c5f293 } + { -0x43e10b42a9851857, 0x3f624cb2d64498bd, -0x1bef9b2dd3e0b138, 0x2ef9c5a5ba384001 }, + { -0x6a016e658b10b053, 0x3a827becf6a308a2, -0x69b1fe2cf65b84ff, 0x71c43c4f5ba3c797 }, + { -0x4902920905618b33, -0x0e7d87431b50d986, -0x7daa4c2f0e1066f2, 0x5a758ca390c5f293 } }, { - { 0x8ce0918b1d61dc94, 0x8ded36469a813066, 0xd4e6a829afe8aad3, 0x0a738027f639d43f }, - { 0xa2b72710d9462495, 0x3aa8c6d2d57d5003, 0xe3d400bfa0b487ca, 0x2dbae244b3eb72ec }, - { 0x980f4a2f57ffe1cc, 0x00670d0de1839843, 0x105c3f4a49fb15fd, 0x2698ca635126a69c } + { -0x731f6e74e29e236c, -0x7212c9b9657ecf9a, -0x2b1957d65017552d, 0x0a738027f639d43f }, + { -0x5d48d8ef26b9db6b, 0x3aa8c6d2d57d5003, -0x1c2bff405f4b7836, 0x2dbae244b3eb72ec }, + { -0x67f0b5d0a8001e34, 0x00670d0de1839843, 0x105c3f4a49fb15fd, 0x2698ca635126a69c } }, { - { 0x2e3d702f5e3dd90e, 0x9e3f0918e4d25386, 0x5e773ef6024da96a, 0x3c004b0c4afa3332 }, - { 0xe765318832b0ba78, 0x381831f7925cff8b, 0x08a81b91a0291fcc, 0x1fb43dcc49caeb07 }, - { 0x9aa946ac06f4b82b, 0x1ca284a5a806c4f3, 0x3ed3265fc6cd4787, 0x6b43fd01cd1fd217 } + { 0x2e3d702f5e3dd90e, -0x61c0f6e71b2dac7a, 0x5e773ef6024da96a, 0x3c004b0c4afa3332 }, + { -0x189ace77cd4f4588, 0x381831f7925cff8b, 0x08a81b91a0291fcc, 0x1fb43dcc49caeb07 }, + { -0x6556b953f90b47d5, 0x1ca284a5a806c4f3, 0x3ed3265fc6cd4787, 0x6b43fd01cd1fd217 } }, { - { 0xb5c742583e760ef3, 0x75dc52b9ee0ab990, 0xbf1427c2072b923f, 0x73420b2d6ff0d9f0 }, - { 0xc7a75d4b4697c544, 0x15fdf848df0fffbf, 0x2868b9ebaa46785a, 0x5a68d7105b52f714 }, - { 0xaf2cf6cb9e851e06, 0x8f593913c62238c4, 0xda8ab89699fbf373, 0x3db5632fea34bc9e } + { -0x4a38bda7c189f10d, 0x75dc52b9ee0ab990, -0x40ebd83df8d46dc1, 0x73420b2d6ff0d9f0 }, + { -0x3858a2b4b9683abc, 0x15fdf848df0fffbf, 0x2868b9ebaa46785a, 0x5a68d7105b52f714 }, + { -0x50d30934617ae1fa, -0x70a6c6ec39ddc73c, -0x2575476966040c8d, 0x3db5632fea34bc9e } }, { - { 0x2e4990b1829825d5, 0xedeaeb873e9a8991, 0xeef03d394c704af8, 0x59197ea495df2b0e }, - { 0xf46eee2bf75dd9d8, 0x0d17b1f6396759a5, 0x1bf2d131499e7273, 0x04321adf49d75f13 }, - { 0x04e16019e4e55aae, 0xe77b437a7e2f92e9, 0xc7ce2dc16f159aa4, 0x45eafdc1f4d70cc0 } + { 0x2e4990b1829825d5, -0x12151478c165766f, -0x110fc2c6b38fb508, 0x59197ea495df2b0e }, + { -0x0b9111d408a22628, 0x0d17b1f6396759a5, 0x1bf2d131499e7273, 0x04321adf49d75f13 }, + { 0x04e16019e4e55aae, -0x1884bc8581d06d17, -0x3831d23e90ea655c, 0x45eafdc1f4d70cc0 } }, { - { 0xb60e4624cfccb1ed, 0x59dbc292bd5c0395, 0x31a09d1ddc0481c9, 0x3f73ceea5d56d940 }, - { 0x698401858045d72b, 0x4c22faa2cf2f0651, 0x941a36656b222dc6, 0x5a5eebc80362dade }, - { 0xb7a7bfd10a4e8dc6, 0xbe57007e44c9b339, 0x60c1207f1557aefa, 0x26058891266218db } + { -0x49f1b9db30334e13, 0x59dbc292bd5c0395, 0x31a09d1ddc0481c9, 0x3f73ceea5d56d940 }, + { 0x698401858045d72b, 0x4c22faa2cf2f0651, -0x6be5c99a94ddd23a, 0x5a5eebc80362dade }, + { -0x4858402ef5b1723a, -0x41a8ff81bb364cc7, 0x60c1207f1557aefa, 0x26058891266218db } }, { - { 0x4c818e3cc676e542, 0x5e422c9303ceccad, 0xec07cccab4129f08, 0x0dedfa10b24443b8 }, - { 0x59f704a68360ff04, 0xc3d93fde7661e6f4, 0x831b2a7312873551, 0x54ad0c2e4e615d57 }, - { 0xee3b67d5b82b522a, 0x36f163469fa5c1eb, 0xa5b4d2f26ec19fd3, 0x62ecb2baa77a9408 } + { 0x4c818e3cc676e542, 0x5e422c9303ceccad, -0x13f833354bed60f8, 0x0dedfa10b24443b8 }, + { 0x59f704a68360ff04, -0x3c26c021899e190c, -0x7ce4d58ced78caaf, 0x54ad0c2e4e615d57 }, + { -0x11c4982a47d4add6, 0x36f163469fa5c1eb, -0x5a4b2d0d913e602d, 0x62ecb2baa77a9408 } }, { - { 0x92072836afb62874, 0x5fcd5e8579e104a5, 0x5aad01adc630a14a, 0x61913d5075663f98 }, - { 0xe5ed795261152b3d, 0x4962357d0eddd7d1, 0x7482c8d0b96b4c71, 0x2e59f919a966d8be }, - { 0x0dc62d361a3231da, 0xfa47583294200270, 0x02d801513f9594ce, 0x3ddbc2a131c05d5c } + { -0x6df8d7c95049d78c, 0x5fcd5e8579e104a5, 0x5aad01adc630a14a, 0x61913d5075663f98 }, + { -0x1a1286ad9eead4c3, 0x4962357d0eddd7d1, 0x7482c8d0b96b4c71, 0x2e59f919a966d8be }, + { 0x0dc62d361a3231da, -0x05b8a7cd6bdffd90, 0x02d801513f9594ce, 0x3ddbc2a131c05d5c } }, }, { { - { 0xfb735ac2004a35d1, 0x31de0f433a6607c3, 0x7b8591bfc528d599, 0x55be9a25f5bb050c }, - { 0x3f50a50a4ffb81ef, 0xb1e035093bf420bf, 0x9baa8e1cc6aa2cd0, 0x32239861fa237a40 }, + { -0x048ca53dffb5ca2f, 0x31de0f433a6607c3, 0x7b8591bfc528d599, 0x55be9a25f5bb050c }, + { 0x3f50a50a4ffb81ef, -0x4e1fcaf6c40bdf41, -0x645571e33955d330, 0x32239861fa237a40 }, { 0x0d005acd33db3dbf, 0x0111b37c80ac35e2, 0x4892d66c6f88ebeb, 0x770eadb16508fbcd } }, { - { 0xf1d3b681a05071b9, 0x2207659a3592ff3a, 0x5f0169297881e40e, 0x16bedd0e86ba374e }, - { 0x8451f9e05e4e89dd, 0xc06302ffbc793937, 0x5d22749556a6495c, 0x09a6755ca05603fb }, + { -0x0e2c497e5faf8e47, 0x2207659a3592ff3a, 0x5f0169297881e40e, 0x16bedd0e86ba374e }, + { -0x7bae061fa1b17623, -0x3f9cfd004386c6c9, 0x5d22749556a6495c, 0x09a6755ca05603fb }, { 0x5ecccc4f2c2737b5, 0x43b79e0c2dccb703, 0x33e008bc4ec43df3, 0x06c1b840f07566c0 } }, { - { 0x69ee9e7f9b02805c, 0xcbff828a547d1640, 0x3d93a869b2430968, 0x46b7b8cd3fe26972 }, + { 0x69ee9e7f9b02805c, -0x34007d75ab82e9c0, 0x3d93a869b2430968, 0x46b7b8cd3fe26972 }, { 0x7688a5c6a388f877, 0x02a96c14deb2b6ac, 0x64c9f3431b8c2af8, 0x3628435554a1eed6 }, - { 0xe9812086fe7eebe0, 0x4cba6be72f515437, 0x1d04168b516efae9, 0x5ea1391043982cb9 } + { -0x167edf7901811420, 0x4cba6be72f515437, 0x1d04168b516efae9, 0x5ea1391043982cb9 } }, { - { 0x6f2b3be4d5d3b002, 0xafec33d96a09c880, 0x035f73a4a8bcc4cc, 0x22c5b9284662198b }, + { 0x6f2b3be4d5d3b002, -0x5013cc2695f63780, 0x035f73a4a8bcc4cc, 0x22c5b9284662198b }, { 0x49125c9cf4702ee1, 0x4520b71f8b25b32d, 0x33193026501fef7e, 0x656d8997c8d2eb2b }, - { 0xcb58c8fe433d8939, 0x89a0cb2e6a8d7e50, 0x79ca955309fbbe5a, 0x0c626616cd7fc106 } + { -0x34a73701bcc276c7, -0x765f34d1957281b0, 0x79ca955309fbbe5a, 0x0c626616cd7fc106 } }, { - { 0x8fdfc379fbf454b1, 0x45a5a970f1a4b771, 0xac921ef7bad35915, 0x42d088dca81c2192 }, + { -0x70203c86040bab4f, 0x45a5a970f1a4b771, -0x536de108452ca6eb, 0x42d088dca81c2192 }, { 0x1ffeb80a4879b61f, 0x6396726e4ada21ed, 0x33c7b093368025ba, 0x471aa0c6f3c31788 }, - { 0x8fda0f37a0165199, 0x0adadb77c8a0e343, 0x20fbfdfcc875e820, 0x1cf2bea80c2206e7 } + { -0x7025f0c85fe9ae67, 0x0adadb77c8a0e343, 0x20fbfdfcc875e820, 0x1cf2bea80c2206e7 } }, { - { 0x982d6e1a02c0412f, 0x90fa4c83db58e8fe, 0x01c2f5bcdcb18bc0, 0x686e0c90216abc66 }, - { 0xc2ddf1deb36202ac, 0x92a5fe09d2e27aa5, 0x7d1648f6fc09f1d3, 0x74c2cc0513bc4959 }, - { 0x1fadbadba54395a7, 0xb41a02a0ae0da66a, 0xbf19f598bba37c07, 0x6a12b8acde48430d } + { -0x67d291e5fd3fbed1, -0x6f05b37c24a71702, 0x01c2f5bcdcb18bc0, 0x686e0c90216abc66 }, + { -0x3d220e214c9dfd54, -0x6d5a01f62d1d855b, 0x7d1648f6fc09f1d3, 0x74c2cc0513bc4959 }, + { 0x1fadbadba54395a7, -0x4be5fd5f51f25996, -0x40e60a67445c83f9, 0x6a12b8acde48430d } }, { - { 0x793bdd801aaeeb5f, 0x00a2a0aac1518871, 0xe8a373a31f2136b4, 0x48aab888fc91ef19 }, - { 0xf8daea1f39d495d9, 0x592c190e525f1dfc, 0xdb8cbd04c9991d1b, 0x11f7fda3d88f0cb7 }, - { 0x041f7e925830f40e, 0x002d6ca979661c06, 0x86dc9ff92b046a2e, 0x760360928b0493d1 } + { 0x793bdd801aaeeb5f, 0x00a2a0aac1518871, -0x175c8c5ce0dec94c, 0x48aab888fc91ef19 }, + { -0x072515e0c62b6a27, 0x592c190e525f1dfc, -0x247342fb3666e2e5, 0x11f7fda3d88f0cb7 }, + { 0x041f7e925830f40e, 0x002d6ca979661c06, -0x79236006d4fb95d2, 0x760360928b0493d1 } }, { - { 0xb43108e5695a0b05, 0x6cb00ee8ad37a38b, 0x5edad6eea3537381, 0x3f2602d4b6dc3224 }, - { 0x21bb41c6120cf9c6, 0xeab2aa12decda59b, 0xc1a72d020aa48b34, 0x215d4d27e87d3b68 }, - { 0xc8b247b65bcaf19c, 0x49779dc3b1b2c652, 0x89a180bbd5ece2e2, 0x13f098a3cec8e039 } + { -0x4bcef71a96a5f4fb, 0x6cb00ee8ad37a38b, 0x5edad6eea3537381, 0x3f2602d4b6dc3224 }, + { 0x21bb41c6120cf9c6, -0x154d55ed21325a65, -0x3e58d2fdf55b74cc, 0x215d4d27e87d3b68 }, + { -0x374db849a4350e64, 0x49779dc3b1b2c652, -0x765e7f442a131d1e, 0x13f098a3cec8e039 } }, }, { { - { 0xf3aa57a22796bb14, 0x883abab79b07da21, 0xe54be21831a0391c, 0x5ee7fb38d83205f9 }, - { 0x9adc0ff9ce5ec54b, 0x039c2a6b8c2f130d, 0x028007c7f0f89515, 0x78968314ac04b36b }, - { 0x538dfdcb41446a8e, 0xa5acfda9434937f9, 0x46af908d263c8c78, 0x61d0633c9bca0d09 } + { -0x0c55a85dd86944ec, -0x77c5454864f825df, -0x1ab41de7ce5fc6e4, 0x5ee7fb38d83205f9 }, + { -0x6523f00631a13ab5, 0x039c2a6b8c2f130d, 0x028007c7f0f89515, 0x78968314ac04b36b }, + { 0x538dfdcb41446a8e, -0x5a530256bcb6c807, 0x46af908d263c8c78, 0x61d0633c9bca0d09 } }, { - { 0xada328bcf8fc73df, 0xee84695da6f037fc, 0x637fb4db38c2a909, 0x5b23ac2df8067bdc }, - { 0x63744935ffdb2566, 0xc5bd6b89780b68bb, 0x6f1b3280553eec03, 0x6e965fd847aed7f5 }, - { 0x9ad2b953ee80527b, 0xe88f19aafade6d8d, 0x0e711704150e82cf, 0x79b9bbb9dd95dedc } + { -0x525cd74307038c21, -0x117b96a2590fc804, 0x637fb4db38c2a909, 0x5b23ac2df8067bdc }, + { 0x63744935ffdb2566, -0x3a42947687f49745, 0x6f1b3280553eec03, 0x6e965fd847aed7f5 }, + { -0x652d46ac117fad85, -0x1770e65505219273, 0x0e711704150e82cf, 0x79b9bbb9dd95dedc } }, { - { 0xd1997dae8e9f7374, 0xa032a2f8cfbb0816, 0xcd6cba126d445f0a, 0x1ba811460accb834 }, - { 0xebb355406a3126c2, 0xd26383a868c8c393, 0x6c0c6429e5b97a82, 0x5065f158c9fd2147 }, - { 0x708169fb0c429954, 0xe14600acd76ecf67, 0x2eaab98a70e645ba, 0x3981f39e58a4faf2 } + { -0x2e66825171608c8c, -0x5fcd5d073044f7ea, -0x329345ed92bba0f6, 0x1ba811460accb834 }, + { -0x144caabf95ced93e, -0x2d9c7c5797373c6d, 0x6c0c6429e5b97a82, 0x5065f158c9fd2147 }, + { 0x708169fb0c429954, -0x1eb9ff5328913099, 0x2eaab98a70e645ba, 0x3981f39e58a4faf2 } }, { - { 0xc845dfa56de66fde, 0xe152a5002c40483a, 0xe9d2e163c7b4f632, 0x30f4452edcbc1b65 }, + { -0x37ba205a92199022, -0x1ead5affd3bfb7c6, -0x162d1e9c384b09ce, 0x30f4452edcbc1b65 }, { 0x18fb8a7559230a93, 0x1d168f6960e6f45d, 0x3a85a94514a93cb5, 0x38dc083705acd0fd }, - { 0x856d2782c5759740, 0xfa134569f99cbecc, 0x8844fc73c0ea4e71, 0x632d9a1a593f2469 } + { -0x7a92d87d3a8a68c0, -0x05ecba9606634134, -0x77bb038c3f15b18f, 0x632d9a1a593f2469 } }, { - { 0xbf09fd11ed0c84a7, 0x63f071810d9f693a, 0x21908c2d57cf8779, 0x3a5a7df28af64ba2 }, - { 0xf6bb6b15b807cba6, 0x1823c7dfbc54f0d7, 0xbb1d97036e29670b, 0x0b24f48847ed4a57 }, - { 0xdcdad4be511beac7, 0xa4538075ed26ccf2, 0xe19cff9f005f9a65, 0x34fcf74475481f63 } + { -0x40f602ee12f37b59, 0x63f071810d9f693a, 0x21908c2d57cf8779, 0x3a5a7df28af64ba2 }, + { -0x094494ea47f8345a, 0x1823c7dfbc54f0d7, -0x44e268fc91d698f5, 0x0b24f48847ed4a57 }, + { -0x23252b41aee41539, -0x5bac7f8a12d9330e, -0x1e630060ffa0659b, 0x34fcf74475481f63 } }, { - { 0xa5bb1dab78cfaa98, 0x5ceda267190b72f2, 0x9309c9110a92608e, 0x0119a3042fb374b0 }, - { 0xc197e04c789767ca, 0xb8714dcb38d9467d, 0x55de888283f95fa8, 0x3d3bdc164dfa63f7 }, - { 0x67a2d89ce8c2177d, 0x669da5f66895d0c1, 0xf56598e5b282a2b0, 0x56c088f1ede20a73 } + { -0x5a44e25487305568, 0x5ceda267190b72f2, -0x6cf636eef56d9f72, 0x0119a3042fb374b0 }, + { -0x3e681fb387689836, -0x478eb234c726b983, 0x55de888283f95fa8, 0x3d3bdc164dfa63f7 }, + { 0x67a2d89ce8c2177d, 0x669da5f66895d0c1, -0x0a9a671a4d7d5d50, 0x56c088f1ede20a73 } }, { - { 0x581b5fac24f38f02, 0xa90be9febae30cbd, 0x9a2169028acf92f0, 0x038b7ea48359038f }, - { 0x336d3d1110a86e17, 0xd7f388320b75b2fa, 0xf915337625072988, 0x09674c6b99108b87 }, - { 0x9f4ef82199316ff8, 0x2f49d282eaa78d4f, 0x0971a5ab5aef3174, 0x6e5e31025969eb65 } + { 0x581b5fac24f38f02, -0x56f41601451cf343, -0x65de96fd75306d10, 0x038b7ea48359038f }, + { 0x336d3d1110a86e17, -0x280c77cdf48a4d06, -0x06eacc89daf8d678, 0x09674c6b99108b87 }, + { -0x60b107de66ce9008, 0x2f49d282eaa78d4f, 0x0971a5ab5aef3174, 0x6e5e31025969eb65 } }, { - { 0x3304fb0e63066222, 0xfb35068987acba3f, 0xbd1924778c1061a3, 0x3058ad43d1838620 }, - { 0xb16c62f587e593fb, 0x4999eddeca5d3e71, 0xb491c1e014cc3e6d, 0x08f5114789a8dba8 }, - { 0x323c0ffde57663d0, 0x05c3df38a22ea610, 0xbdc78abdac994f9a, 0x26549fa4efe3dc99 } + { 0x3304fb0e63066222, -0x04caf976785345c1, -0x42e6db8873ef9e5d, 0x3058ad43d1838620 }, + { -0x4e939d0a781a6c05, 0x4999eddeca5d3e71, -0x4b6e3e1feb33c193, 0x08f5114789a8dba8 }, + { 0x323c0ffde57663d0, 0x05c3df38a22ea610, -0x423875425366b066, 0x26549fa4efe3dc99 } }, }, { { - { 0x04dbbc17f75396b9, 0x69e6a2d7d2f86746, 0xc6409d99f53eabc6, 0x606175f6332e25d2 }, - { 0x738b38d787ce8f89, 0xb62658e24179a88d, 0x30738c9cf151316d, 0x49128c7f727275c9 }, + { 0x04dbbc17f75396b9, 0x69e6a2d7d2f86746, -0x39bf62660ac1543a, 0x606175f6332e25d2 }, + { 0x738b38d787ce8f89, -0x49d9a71dbe865773, 0x30738c9cf151316d, 0x49128c7f727275c9 }, { 0x4021370ef540e7dd, 0x0910d6f5a1f1d0a5, 0x4634aacd5b06b807, 0x6a39e6356944f235 } }, { - { 0x1da1965774049e9d, 0xfbcd6ea198fe352b, 0xb1cbcd50cc5236a6, 0x1f5ec83d3f9846e2 }, - { 0x96cd5640df90f3e7, 0x6c3a760edbfa25ea, 0x24f3ef0959e33cc4, 0x42889e7e530d2e58 }, - { 0x8efb23c3328ccb75, 0xaf42a207dd876ee9, 0x20fbdadc5dfae796, 0x241e246b06bf9f51 } + { 0x1da1965774049e9d, -0x0432915e6701cad5, -0x4e3432af33adc95a, 0x1f5ec83d3f9846e2 }, + { -0x6932a9bf206f0c19, 0x6c3a760edbfa25ea, 0x24f3ef0959e33cc4, 0x42889e7e530d2e58 }, + { -0x7104dc3ccd73348b, -0x50bd5df822789117, 0x20fbdadc5dfae796, 0x241e246b06bf9f51 } }, { { 0x7eaafc9a6280bbb8, 0x22a70f12f403d809, 0x31ce40bb1bfc8d20, 0x2bc65635e8bd53ee }, { 0x29e68e57ad6e98f6, 0x4c9260c80b462065, 0x3f00862ea51ebb4b, 0x5bc2c77fb38d9097 }, - { 0xe8d5dc9fa96bad93, 0xe58fb17dde1947dc, 0x681532ea65185fa3, 0x1fdd6c3b034a7830 } + { -0x172a23605694526d, -0x1a704e8221e6b824, 0x681532ea65185fa3, 0x1fdd6c3b034a7830 } }, { - { 0x9c13a6a52dd8f7a9, 0x2dbb1f8c3efdcabf, 0x961e32405e08f7b5, 0x48c8a121bbe6c9e5 }, - { 0x0a64e28c55dc18fe, 0xe3df9e993399ebdd, 0x79ac432370e2e652, 0x35ff7fc33ae4cc0e }, - { 0xfc415a7c59646445, 0xd224b2d7c128b615, 0x6035c9c905fbb912, 0x42d7a91274429fab } + { -0x63ec595ad2270857, 0x2dbb1f8c3efdcabf, -0x69e1cdbfa1f7084b, 0x48c8a121bbe6c9e5 }, + { 0x0a64e28c55dc18fe, -0x1c206166cc661423, 0x79ac432370e2e652, 0x35ff7fc33ae4cc0e }, + { -0x03bea583a69b9bbb, -0x2ddb4d283ed749eb, 0x6035c9c905fbb912, 0x42d7a91274429fab } }, { - { 0xa9a48947933da5bc, 0x4a58920ec2e979ec, 0x96d8800013e5ac4c, 0x453692d74b48b147 }, - { 0x4e6213e3eaf72ed3, 0x6794981a43acd4e7, 0xff547cde6eb508cb, 0x6fed19dd10fcb532 }, - { 0xdd775d99a8559c6f, 0xf42a2140df003e24, 0x5223e229da928a66, 0x063f46ba6d38f22c } + { -0x565b76b86cc25a44, 0x4a58920ec2e979ec, -0x69277fffec1a53b4, 0x453692d74b48b147 }, + { 0x4e6213e3eaf72ed3, 0x6794981a43acd4e7, -0x00ab8321914af735, 0x6fed19dd10fcb532 }, + { -0x2288a26657aa6391, -0x0bd5debf20ffc1dc, 0x5223e229da928a66, 0x063f46ba6d38f22c } }, { - { 0x39843cb737346921, 0xa747fb0738c89447, 0xcb8d8031a245307e, 0x67810f8e6d82f068 }, - { 0xd2d242895f536694, 0xca33a2c542939b2c, 0x986fada6c7ddb95c, 0x5a152c042f712d5d }, + { 0x39843cb737346921, -0x58b804f8c7376bb9, -0x34727fce5dbacf82, 0x67810f8e6d82f068 }, + { -0x2d2dbd76a0ac996c, -0x35cc5d3abd6c64d4, -0x67905259382246a4, 0x5a152c042f712d5d }, { 0x3eeb8fbcd2287db4, 0x72c7d3a301a03e93, 0x5473e88cbd98265a, 0x7324aa515921b403 } }, { - { 0xad23f6dae82354cb, 0x6962502ab6571a6d, 0x9b651636e38e37d1, 0x5cac5005d1a3312f }, - { 0x857942f46c3cbe8e, 0xa1d364b14730c046, 0x1c8ed914d23c41bf, 0x0838e161eef6d5d2 }, - { 0x8cc154cce9e39904, 0x5b3a040b84de6846, 0xc4d8a61cb1be5d6e, 0x40fb897bd8861f02 } + { -0x52dc092517dcab35, 0x6962502ab6571a6d, -0x649ae9c91c71c82f, 0x5cac5005d1a3312f }, + { -0x7a86bd0b93c34172, -0x5e2c9b4eb8cf3fba, 0x1c8ed914d23c41bf, 0x0838e161eef6d5d2 }, + { -0x733eab33161c66fc, 0x5b3a040b84de6846, -0x3b2759e34e41a292, 0x40fb897bd8861f02 } }, { - { 0xe57ed8475ab10761, 0x71435e206fd13746, 0x342f824ecd025632, 0x4b16281ea8791e7b }, - { 0x84c5aa9062de37a1, 0x421da5000d1d96e1, 0x788286306a9242d9, 0x3c5e464a690d10da }, - { 0xd1c101d50b813381, 0xdee60f1176ee6828, 0x0cb68893383f6409, 0x6183c565f6ff484a } + { -0x1a8127b8a54ef89f, 0x71435e206fd13746, 0x342f824ecd025632, 0x4b16281ea8791e7b }, + { -0x7b3a556f9d21c85f, 0x421da5000d1d96e1, 0x788286306a9242d9, 0x3c5e464a690d10da }, + { -0x2e3efe2af47ecc7f, -0x2119f0ee891197d8, 0x0cb68893383f6409, 0x6183c565f6ff484a } }, }, { { - { 0xdb468549af3f666e, 0xd77fcf04f14a0ea5, 0x3df23ff7a4ba0c47, 0x3a10dfe132ce3c85 }, - { 0x741d5a461e6bf9d6, 0x2305b3fc7777a581, 0xd45574a26474d3d9, 0x1926e1dc6401e0ff }, - { 0xe07f4e8aea17cea0, 0x2fd515463a1fc1fd, 0x175322fd31f2c0f1, 0x1fa1d01d861e5d15 } + { -0x24b97ab650c09992, -0x288030fb0eb5f15b, 0x3df23ff7a4ba0c47, 0x3a10dfe132ce3c85 }, + { 0x741d5a461e6bf9d6, 0x2305b3fc7777a581, -0x2baa8b5d9b8b2c27, 0x1926e1dc6401e0ff }, + { -0x1f80b17515e83160, 0x2fd515463a1fc1fd, 0x175322fd31f2c0f1, 0x1fa1d01d861e5d15 } }, { { 0x38dcac00d1df94ab, 0x2e712bddd1080de9, 0x7f13e93efdd5e262, 0x73fced18ee9a01e5 }, - { 0xcc8055947d599832, 0x1e4656da37f15520, 0x99f6f7744e059320, 0x773563bc6a75cf33 }, - { 0x06b1e90863139cb3, 0xa493da67c5a03ecd, 0x8d77cec8ad638932, 0x1f426b701b864f44 } + { -0x337faa6b82a667ce, 0x1e4656da37f15520, -0x6609088bb1fa6ce0, 0x773563bc6a75cf33 }, + { 0x06b1e90863139cb3, -0x5b6c25983a5fc133, -0x72883137529c76ce, 0x1f426b701b864f44 } }, { - { 0xf17e35c891a12552, 0xb76b8153575e9c76, 0xfa83406f0d9b723e, 0x0b76bb1b3fa7e438 }, - { 0xefc9264c41911c01, 0xf1a3b7b817a22c25, 0x5875da6bf30f1447, 0x4e1af5271d31b090 }, - { 0x08b8c1f97f92939b, 0xbe6771cbd444ab6e, 0x22e5646399bb8017, 0x7b6dd61eb772a955 } + { -0x0e81ca376e5edaae, -0x48947eaca8a1638a, -0x057cbf90f2648dc2, 0x0b76bb1b3fa7e438 }, + { -0x1036d9b3be6ee3ff, -0x0e5c4847e85dd3db, 0x5875da6bf30f1447, 0x4e1af5271d31b090 }, + { 0x08b8c1f97f92939b, -0x41988e342bbb5492, 0x22e5646399bb8017, 0x7b6dd61eb772a955 } }, { - { 0x5730abf9ab01d2c7, 0x16fb76dc40143b18, 0x866cbe65a0cbb281, 0x53fa9b659bff6afe }, - { 0xb7adc1e850f33d92, 0x7998fa4f608cd5cf, 0xad962dbd8dfc5bdb, 0x703e9bceaf1d2f4f }, - { 0x6c14c8e994885455, 0x843a5d6665aed4e5, 0x181bb73ebcd65af1, 0x398d93e5c4c61f50 } + { 0x5730abf9ab01d2c7, 0x16fb76dc40143b18, -0x7993419a5f344d7f, 0x53fa9b659bff6afe }, + { -0x48523e17af0cc26e, 0x7998fa4f608cd5cf, -0x5269d2427203a425, 0x703e9bceaf1d2f4f }, + { 0x6c14c8e994885455, -0x7bc5a2999a512b1b, 0x181bb73ebcd65af1, 0x398d93e5c4c61f50 } }, { - { 0xc3877c60d2e7e3f2, 0x3b34aaa030828bb1, 0x283e26e7739ef138, 0x699c9c9002c30577 }, - { 0x1c4bd16733e248f3, 0xbd9e128715bf0a5f, 0xd43f8cf0a10b0376, 0x53b09b5ddf191b13 }, - { 0xf306a7235946f1cc, 0x921718b5cce5d97d, 0x28cdd24781b4e975, 0x51caf30c6fcdd907 } + { -0x3c78839f2d181c0e, 0x3b34aaa030828bb1, 0x283e26e7739ef138, 0x699c9c9002c30577 }, + { 0x1c4bd16733e248f3, -0x4261ed78ea40f5a1, -0x2bc0730f5ef4fc8a, 0x53b09b5ddf191b13 }, + { -0x0cf958dca6b90e34, -0x6de8e74a331a2683, 0x28cdd24781b4e975, 0x51caf30c6fcdd907 } }, { - { 0x737af99a18ac54c7, 0x903378dcc51cb30f, 0x2b89bc334ce10cc7, 0x12ae29c189f8e99a }, - { 0xa60ba7427674e00a, 0x630e8570a17a7bf3, 0x3758563dcf3324cc, 0x5504aa292383fdaa }, - { 0xa99ec0cb1f0d01cf, 0x0dd1efcc3a34f7ae, 0x55ca7521d09c4e22, 0x5fd14fe958eba5ea } + { 0x737af99a18ac54c7, -0x6fcc87233ae34cf1, 0x2b89bc334ce10cc7, 0x12ae29c189f8e99a }, + { -0x59f458bd898b1ff6, 0x630e8570a17a7bf3, 0x3758563dcf3324cc, 0x5504aa292383fdaa }, + { -0x56613f34e0f2fe31, 0x0dd1efcc3a34f7ae, 0x55ca7521d09c4e22, 0x5fd14fe958eba5ea } }, { - { 0x3c42fe5ebf93cb8e, 0xbedfa85136d4565f, 0xe0f0859e884220e8, 0x7dd73f960725d128 }, - { 0xb5dc2ddf2845ab2c, 0x069491b10a7fe993, 0x4daaf3d64002e346, 0x093ff26e586474d1 }, - { 0xb10d24fe68059829, 0x75730672dbaf23e5, 0x1367253ab457ac29, 0x2f59bcbc86b470a4 } + { 0x3c42fe5ebf93cb8e, -0x412057aec92ba9a1, -0x1f0f7a6177bddf18, 0x7dd73f960725d128 }, + { -0x4a23d220d7ba54d4, 0x069491b10a7fe993, 0x4daaf3d64002e346, 0x093ff26e586474d1 }, + { -0x4ef2db0197fa67d7, 0x75730672dbaf23e5, 0x1367253ab457ac29, 0x2f59bcbc86b470a4 } }, { - { 0x7041d560b691c301, 0x85201b3fadd7e71e, 0x16c2e16311335585, 0x2aa55e3d010828b1 }, - { 0x83847d429917135f, 0xad1b911f567d03d7, 0x7e7748d9be77aad1, 0x5458b42e2e51af4a }, - { 0xed5192e60c07444f, 0x42c54e2d74421d10, 0x352b4c82fdb5c864, 0x13e9004a8a768664 } + { 0x7041d560b691c301, -0x7adfe4c0522818e2, 0x16c2e16311335585, 0x2aa55e3d010828b1 }, + { -0x7c7b82bd66e8eca1, -0x52e46ee0a982fc29, 0x7e7748d9be77aad1, 0x5458b42e2e51af4a }, + { -0x12ae6d19f3f8bbb1, 0x42c54e2d74421d10, 0x352b4c82fdb5c864, 0x13e9004a8a768664 } }, }, { { - { 0x1e6284c5806b467c, 0xc5f6997be75d607b, 0x8b67d958b378d262, 0x3d88d66a81cd8b70 }, - { 0xcbb5b5556c032bff, 0xdf7191b729297a3a, 0xc1ff7326aded81bb, 0x71ade8bb68be03f5 }, - { 0x8b767a93204ed789, 0x762fcacb9fa0ae2a, 0x771febcc6dce4887, 0x343062158ff05fb3 } + { 0x1e6284c5806b467c, -0x3a09668418a29f85, -0x749826a74c872d9e, 0x3d88d66a81cd8b70 }, + { -0x344a4aaa93fcd401, -0x208e6e48d6d685c6, -0x3e008cd952127e45, 0x71ade8bb68be03f5 }, + { -0x7489856cdfb12877, 0x762fcacb9fa0ae2a, 0x771febcc6dce4887, 0x343062158ff05fb3 } }, { - { 0xfce219072a7b31b4, 0x4d7adc75aa578016, 0x0ec276a687479324, 0x6d6d9d5d1fda4beb }, - { 0xe05da1a7e1f5bf49, 0x26457d6dd4736092, 0x77dcb07773cc32f6, 0x0a5d94969cdd5fcd }, - { 0x22b1a58ae9b08183, 0xfd95d071c15c388b, 0xa9812376850a0517, 0x33384cbabb7f335e } + { -0x031de6f8d584ce4c, 0x4d7adc75aa578016, 0x0ec276a687479324, 0x6d6d9d5d1fda4beb }, + { -0x1fa25e581e0a40b7, 0x26457d6dd4736092, 0x77dcb07773cc32f6, 0x0a5d94969cdd5fcd }, + { 0x22b1a58ae9b08183, -0x026a2f8e3ea3c775, -0x567edc897af5fae9, 0x33384cbabb7f335e } }, { - { 0x33bc627a26218b8d, 0xea80b21fc7a80c61, 0x9458b12b173e9ee6, 0x076247be0e2f3059 }, - { 0x3c6fa2680ca2c7b5, 0x1b5082046fb64fda, 0xeb53349c5431d6de, 0x5278b38f6b879c89 }, - { 0x52e105f61416375a, 0xec97af3685abeba4, 0x26e6b50623a67c36, 0x5cf0e856f3d4fb01 } + { 0x33bc627a26218b8d, -0x157f4de03857f39f, -0x6ba74ed4e8c1611a, 0x076247be0e2f3059 }, + { 0x3c6fa2680ca2c7b5, 0x1b5082046fb64fda, -0x14accb63abce2922, 0x5278b38f6b879c89 }, + { 0x52e105f61416375a, -0x136850c97a54145c, 0x26e6b50623a67c36, 0x5cf0e856f3d4fb01 } }, { - { 0xbeaece313db342a8, 0xcba3635b842db7ee, 0xe88c6620817f13ef, 0x1b9438aa4e76d5c6 }, - { 0xf6c968731ae8cab4, 0x5e20741ecb4f92c5, 0x2da53be58ccdbc3e, 0x2dddfea269970df7 }, - { 0x8a50777e166f031a, 0x067b39f10fb7a328, 0x1925c9a6010fbd76, 0x6df9b575cc740905 } + { -0x415131cec24cbd58, -0x345c9ca47bd24812, -0x177399df7e80ec11, 0x1b9438aa4e76d5c6 }, + { -0x0936978ce517354c, 0x5e20741ecb4f92c5, 0x2da53be58ccdbc3e, 0x2dddfea269970df7 }, + { -0x75af8881e990fce6, 0x067b39f10fb7a328, 0x1925c9a6010fbd76, 0x6df9b575cc740905 } }, { - { 0xecdfc35b48cade41, 0x6a88471fb2328270, 0x740a4a2440a01b6a, 0x471e5796003b5f29 }, - { 0x42c1192927f6bdcf, 0x8f91917a403d61ca, 0xdc1c5a668b9e1f61, 0x1596047804ec0f8d }, - { 0xda96bbb3aced37ac, 0x7a2423b5e9208cea, 0x24cc5c3038aebae2, 0x50c356afdc5dae2f } + { -0x13203ca4b73521bf, 0x6a88471fb2328270, 0x740a4a2440a01b6a, 0x471e5796003b5f29 }, + { 0x42c1192927f6bdcf, -0x706e6e85bfc29e36, -0x23e3a5997461e09f, 0x1596047804ec0f8d }, + { -0x2569444c5312c854, 0x7a2423b5e9208cea, 0x24cc5c3038aebae2, 0x50c356afdc5dae2f } }, { - { 0xcfed9cdf1b31b964, 0xf486a9858ca51af3, 0x14897265ea8c1f84, 0x784a53dd932acc00 }, - { 0x09dcbf4341c30318, 0xeeba061183181dce, 0xc179c0cedc1e29a1, 0x1dbf7b89073f35b0 }, - { 0x2d99f9df14fc4920, 0x76ccb60cc4499fe5, 0xa4132cbbe5cf0003, 0x3f93d82354f000ea } + { -0x30126320e4ce469c, -0x0b79567a735ae50d, 0x14897265ea8c1f84, 0x784a53dd932acc00 }, + { 0x09dcbf4341c30318, -0x1145f9ee7ce7e232, -0x3e863f3123e1d65f, 0x1dbf7b89073f35b0 }, + { 0x2d99f9df14fc4920, 0x76ccb60cc4499fe5, -0x5becd3441a30fffd, 0x3f93d82354f000ea } }, { - { 0xeaac12d179e14978, 0xff923ff3bbebff5e, 0x4af663e40663ce27, 0x0fd381a811a5f5ff }, - { 0x8183e7689e04ce85, 0x678fb71e04465341, 0xad92058f6688edac, 0x5da350d3532b099a }, - { 0xf256aceca436df54, 0x108b6168ae69d6e8, 0x20d986cb6b5d036c, 0x655957b9fee2af50 } + { -0x1553ed2e861eb688, -0x006dc00c441400a2, 0x4af663e40663ce27, 0x0fd381a811a5f5ff }, + { -0x7e7c189761fb317b, 0x678fb71e04465341, -0x526dfa7099771254, 0x5da350d3532b099a }, + { -0x0da953135bc920ac, 0x108b6168ae69d6e8, 0x20d986cb6b5d036c, 0x655957b9fee2af50 } }, { - { 0xbdc1409bd002d0ac, 0x66660245b5ccd9a6, 0x82317dc4fade85ec, 0x02fe934b6ad7df0d }, - { 0xaea8b07fa902030f, 0xf88c766af463d143, 0x15b083663c787a60, 0x08eab1148267a4a8 }, - { 0xef5cf100cfb7ea74, 0x22897633a1cb42ac, 0xd4ce0c54cef285e2, 0x30408c048a146a55 } + { -0x423ebf642ffd2f54, 0x66660245b5ccd9a6, -0x7dce823b05217a14, 0x02fe934b6ad7df0d }, + { -0x51574f8056fdfcf1, -0x077389950b9c2ebd, 0x15b083663c787a60, 0x08eab1148267a4a8 }, + { -0x10a30eff3048158c, 0x22897633a1cb42ac, -0x2b31f3ab310d7a1e, 0x30408c048a146a55 } }, }, { { - { 0xbb2e00c9193b877f, 0xece3a890e0dc506b, 0xecf3b7c036de649f, 0x5f46040898de9e1a }, - { 0x739d8845832fcedb, 0xfa38d6c9ae6bf863, 0x32bc0dcab74ffef7, 0x73937e8814bce45e }, - { 0xb9037116297bf48d, 0xa9d13b22d4f06834, 0xe19715574696bdc6, 0x2cf8a4e891d5e835 } + { -0x44d1ff36e6c47881, -0x131c576f1f23af95, -0x130c483fc9219b61, 0x5f46040898de9e1a }, + { 0x739d8845832fcedb, -0x05c729365194079d, 0x32bc0dcab74ffef7, 0x73937e8814bce45e }, + { -0x46fc8ee9d6840b73, -0x562ec4dd2b0f97cc, -0x1e68eaa8b969423a, 0x2cf8a4e891d5e835 } }, { - { 0x2cb5487e17d06ba2, 0x24d2381c3950196b, 0xd7659c8185978a30, 0x7a6f7f2891d6a4f6 }, - { 0x6d93fd8707110f67, 0xdd4c09d37c38b549, 0x7cb16a4cc2736a86, 0x2049bd6e58252a09 }, - { 0x7d09fd8d6a9aef49, 0xf0ee60be5b3db90b, 0x4c21b52c519ebfd4, 0x6011aadfc545941d } + { 0x2cb5487e17d06ba2, 0x24d2381c3950196b, -0x289a637e7a6875d0, 0x7a6f7f2891d6a4f6 }, + { 0x6d93fd8707110f67, -0x22b3f62c83c74ab7, 0x7cb16a4cc2736a86, 0x2049bd6e58252a09 }, + { 0x7d09fd8d6a9aef49, -0x0f119f41a4c246f5, 0x4c21b52c519ebfd4, 0x6011aadfc545941d } }, { - { 0x63ded0c802cbf890, 0xfbd098ca0dff6aaa, 0x624d0afdb9b6ed99, 0x69ce18b779340b1e }, - { 0x5f67926dcf95f83c, 0x7c7e856171289071, 0xd6a1e7f3998f7a5b, 0x6fc5cc1b0b62f9e0 }, - { 0xd1ef5528b29879cb, 0xdd1aae3cd47e9092, 0x127e0442189f2352, 0x15596b3ae57101f1 } + { 0x63ded0c802cbf890, -0x042f6735f2009556, 0x624d0afdb9b6ed99, 0x69ce18b779340b1e }, + { 0x5f67926dcf95f83c, 0x7c7e856171289071, -0x295e180c667085a5, 0x6fc5cc1b0b62f9e0 }, + { -0x2e10aad74d678635, -0x22e551c32b816f6e, 0x127e0442189f2352, 0x15596b3ae57101f1 } }, { { 0x09ff31167e5124ca, 0x0be4158bd9c745df, 0x292b7d227ef556e5, 0x3aa4e241afb6d138 }, - { 0x462739d23f9179a2, 0xff83123197d6ddcf, 0x1307deb553f2148a, 0x0d2237687b5f4dda }, + { 0x462739d23f9179a2, -0x007cedce68292231, 0x1307deb553f2148a, 0x0d2237687b5f4dda }, { 0x2cc138bf2a3305f5, 0x48583f8fa2e926c3, 0x083ab1a25549d2eb, 0x32fcaa6e4687a36c } }, { - { 0x3207a4732787ccdf, 0x17e31908f213e3f8, 0xd5b2ecd7f60d964e, 0x746f6336c2600be9 }, - { 0x7bc56e8dc57d9af5, 0x3e0bd2ed9df0bdf2, 0xaac014de22efe4a3, 0x4627e9cefebd6a5c }, - { 0x3f4af345ab6c971c, 0xe288eb729943731f, 0x33596a8a0344186d, 0x7b4917007ed66293 } + { 0x3207a4732787ccdf, 0x17e31908f213e3f8, -0x2a4d132809f269b2, 0x746f6336c2600be9 }, + { 0x7bc56e8dc57d9af5, 0x3e0bd2ed9df0bdf2, -0x553feb21dd101b5d, 0x4627e9cefebd6a5c }, + { 0x3f4af345ab6c971c, -0x1d77148d66bc8ce1, 0x33596a8a0344186d, 0x7b4917007ed66293 } }, { - { 0x54341b28dd53a2dd, 0xaa17905bdf42fc3f, 0x0ff592d94dd2f8f4, 0x1d03620fe08cd37d }, + { 0x54341b28dd53a2dd, -0x55e86fa420bd03c1, 0x0ff592d94dd2f8f4, 0x1d03620fe08cd37d }, { 0x2d85fb5cab84b064, 0x497810d289f3bc14, 0x476adc447b15ce0c, 0x122ba376f844fd7b }, - { 0xc20232cda2b4e554, 0x9ed0fd42115d187f, 0x2eabb4be7dd479d9, 0x02c70bf52b68ec4c } + { -0x3dfdcd325d4b1aac, -0x612f02bdeea2e781, 0x2eabb4be7dd479d9, 0x02c70bf52b68ec4c } }, { - { 0xace532bf458d72e1, 0x5be768e07cb73cb5, 0x56cf7d94ee8bbde7, 0x6b0697e3feb43a03 }, - { 0xa287ec4b5d0b2fbb, 0x415c5790074882ca, 0xe044a61ec1d0815c, 0x26334f0a409ef5e0 }, - { 0xb6c8f04adf62a3c0, 0x3ef000ef076da45d, 0x9c9cb95849f0d2a9, 0x1cc37f43441b2fae } + { -0x531acd40ba728d1f, 0x5be768e07cb73cb5, 0x56cf7d94ee8bbde7, 0x6b0697e3feb43a03 }, + { -0x5d7813b4a2f4d045, 0x415c5790074882ca, -0x1fbb59e13e2f7ea4, 0x26334f0a409ef5e0 }, + { -0x49370fb5209d5c40, 0x3ef000ef076da45d, -0x636346a7b60f2d57, 0x1cc37f43441b2fae } }, { - { 0xd76656f1c9ceaeb9, 0x1c5b15f818e5656a, 0x26e72832844c2334, 0x3a346f772f196838 }, - { 0x508f565a5cc7324f, 0xd061c4c0e506a922, 0xfb18abdb5c45ac19, 0x6c6809c10380314a }, - { 0xd2d55112e2da6ac8, 0xe9bd0331b1e851ed, 0x960746dd8ec67262, 0x05911b9f6ef7c5d0 } + { -0x2899a90e36315147, 0x1c5b15f818e5656a, 0x26e72832844c2334, 0x3a346f772f196838 }, + { 0x508f565a5cc7324f, -0x2f9e3b3f1af956de, -0x04e75424a3ba53e7, 0x6c6809c10380314a }, + { -0x2d2aaeed1d259538, -0x1642fcce4e17ae13, -0x69f8b92271398d9e, 0x05911b9f6ef7c5d0 } }, }, { { - { 0x01c18980c5fe9f94, 0xcd656769716fd5c8, 0x816045c3d195a086, 0x6e2b7f3266cc7982 }, - { 0xe9dcd756b637ff2d, 0xec4c348fc987f0c4, 0xced59285f3fbc7b7, 0x3305354793e1ea87 }, - { 0xcc802468f7c3568f, 0x9de9ba8219974cb3, 0xabb7229cb5b81360, 0x44e2017a6fbeba62 } + { 0x01c18980c5fe9f94, -0x329a98968e902a38, -0x7e9fba3c2e6a5f7a, 0x6e2b7f3266cc7982 }, + { -0x162328a949c800d3, -0x13b3cb7036780f3c, -0x312a6d7a0c043849, 0x3305354793e1ea87 }, + { -0x337fdb97083ca971, -0x6216457de668b34d, -0x5448dd634a47eca0, 0x44e2017a6fbeba62 } }, { - { 0x87f82cf3b6ca6ecd, 0x580f893e18f4a0c2, 0x058930072604e557, 0x6cab6ac256d19c1d }, - { 0xc4c2a74354dab774, 0x8e5d4c3c4eaf031a, 0xb76c23d242838f17, 0x749a098f68dce4ea }, - { 0xdcdfe0a02cc1de60, 0x032665ff51c5575b, 0x2c0c32f1073abeeb, 0x6a882014cd7b8606 } + { -0x7807d30c49359133, 0x580f893e18f4a0c2, 0x058930072604e557, 0x6cab6ac256d19c1d }, + { -0x3b3d58bcab25488c, -0x71a2b3c3b150fce6, -0x4893dc2dbd7c70e9, 0x749a098f68dce4ea }, + { -0x23201f5fd33e21a0, 0x032665ff51c5575b, 0x2c0c32f1073abeeb, 0x6a882014cd7b8606 } }, { - { 0xd111d17caf4feb6e, 0x050bba42b33aa4a3, 0x17514c3ceeb46c30, 0x54bedb8b1bc27d75 }, - { 0xa52a92fea4747fb5, 0xdc12a4491fa5ab89, 0xd82da94bb847a4ce, 0x4d77edce9512cc4e }, - { 0x77c8e14577e2189c, 0xa3e46f6aff99c445, 0x3144dfc86d335343, 0x3a96559e7c4216a9 } + { -0x2eee2e8350b01492, 0x050bba42b33aa4a3, 0x17514c3ceeb46c30, 0x54bedb8b1bc27d75 }, + { -0x5ad56d015b8b804b, -0x23ed5bb6e05a5477, -0x27d256b447b85b32, 0x4d77edce9512cc4e }, + { 0x77c8e14577e2189c, -0x5c1b909500663bbb, 0x3144dfc86d335343, 0x3a96559e7c4216a9 } }, { - { 0x4493896880baaa52, 0x4c98afc4f285940e, 0xef4aa79ba45448b6, 0x5278c510a57aae7f }, - { 0x12550d37f42ad2ee, 0x8b78e00498a1fbf5, 0x5d53078233894cb2, 0x02c84e4e3e498d0c }, - { 0xa54dd074294c0b94, 0xf55d46b8df18ffb6, 0xf06fecc58dae8366, 0x588657668190d165 } + { 0x4493896880baaa52, 0x4c98afc4f285940e, -0x10b558645babb74a, 0x5278c510a57aae7f }, + { 0x12550d37f42ad2ee, -0x74871ffb675e040b, 0x5d53078233894cb2, 0x02c84e4e3e498d0c }, + { -0x5ab22f8bd6b3f46c, -0x0aa2b94720e7004a, -0x0f90133a72517c9a, 0x588657668190d165 } }, { - { 0xbf5834f03de25cc3, 0xb887c8aed6815496, 0x5105221a9481e892, 0x6760ed19f7723f93 }, - { 0xd47712311aef7117, 0x50343101229e92c7, 0x7a95e1849d159b97, 0x2449959b8b5d29c9 }, + { -0x40a7cb0fc21da33d, -0x47783751297eab6a, 0x5105221a9481e892, 0x6760ed19f7723f93 }, + { -0x2b88edcee5108ee9, 0x50343101229e92c7, 0x7a95e1849d159b97, 0x2449959b8b5d29c9 }, { 0x669ba3b7ac35e160, 0x2eccf73fba842056, 0x1aec1f17c0804f07, 0x0d96bc031856f4e7 } }, { - { 0xb1d534b0cc7505e1, 0x32cd003416c35288, 0xcb36a5800762c29d, 0x5bfe69b9237a0bf8 }, - { 0x3318be7775c52d82, 0x4cb764b554d0aab9, 0xabcf3d27cc773d91, 0x3bf4d1848123288a }, - { 0x183eab7e78a151ab, 0xbbe990c999093763, 0xff717d6e4ac7e335, 0x4c5cddb325f39f88 } + { -0x4e2acb4f338afa1f, 0x32cd003416c35288, -0x34c95a7ff89d3d63, 0x5bfe69b9237a0bf8 }, + { 0x3318be7775c52d82, 0x4cb764b554d0aab9, -0x5430c2d83388c26f, 0x3bf4d1848123288a }, + { 0x183eab7e78a151ab, -0x44166f3666f6c89d, -0x008e8291b5381ccb, 0x4c5cddb325f39f88 } }, { - { 0x57750967e7a9f902, 0x2c37fdfc4f5b467e, 0xb261663a3177ba46, 0x3a375e78dc2d532b }, - { 0xc0f6b74d6190a6eb, 0x20ea81a42db8f4e4, 0xa8bd6f7d97315760, 0x33b1d60262ac7c21 }, - { 0x8141e72f2d4dddea, 0xe6eafe9862c607c8, 0x23c28458573cafd0, 0x46b9476f4ff97346 } + { 0x57750967e7a9f902, 0x2c37fdfc4f5b467e, -0x4d9e99c5ce8845ba, 0x3a375e78dc2d532b }, + { -0x3f0948b29e6f5915, 0x20ea81a42db8f4e4, -0x5742908268cea8a0, 0x33b1d60262ac7c21 }, + { -0x7ebe18d0d2b22216, -0x191501679d39f838, 0x23c28458573cafd0, 0x46b9476f4ff97346 } }, { { 0x1215505c0d58359f, 0x2a2013c7fc28c46b, 0x24a0a1af89ea664e, 0x4400b638a1130e1f }, - { 0x0c1ffea44f901e5c, 0x2b0b6fb72184b782, 0xe587ff910114db88, 0x37130f364785a142 }, + { 0x0c1ffea44f901e5c, 0x2b0b6fb72184b782, -0x1a78006efeeb2478, 0x37130f364785a142 }, { 0x3a01b76496ed19c3, 0x31e00ab0ed327230, 0x520a885783ca15b1, 0x06aab9875accbec7 } }, }, { { { 0x5349acf3512eeaef, 0x20c141d31cc1cb49, 0x24180c07a99a688d, 0x555ef9d1c64b2d17 }, - { 0xc1339983f5df0ebb, 0xc0f3758f512c4cac, 0x2cf1130a0bb398e1, 0x6b3cecf9aa270c62 }, + { -0x3ecc667c0a20f145, -0x3f0c8a70aed3b354, 0x2cf1130a0bb398e1, 0x6b3cecf9aa270c62 }, { 0x36a770ba3b73bd08, 0x624aef08a3afbf0c, 0x5737ff98b40946f2, 0x675f4de13381749d } }, { - { 0xa12ff6d93bdab31d, 0x0725d80f9d652dfe, 0x019c4ff39abe9487, 0x60f450b882cd3c43 }, - { 0x0e2c52036b1782fc, 0x64816c816cad83b4, 0xd0dcbdd96964073e, 0x13d99df70164c520 }, + { -0x5ed00926c4254ce3, 0x0725d80f9d652dfe, 0x019c4ff39abe9487, 0x60f450b882cd3c43 }, + { 0x0e2c52036b1782fc, 0x64816c816cad83b4, -0x2f234226969bf8c2, 0x13d99df70164c520 }, { 0x014b5ec321e5c0ca, 0x4fcb69c9d719bfa2, 0x4e5f1c18750023a0, 0x1c06de9e55edac80 } }, { - { 0xffd52b40ff6d69aa, 0x34530b18dc4049bb, 0x5e4a5c2fa34d9897, 0x78096f8e7d32ba2d }, - { 0x990f7ad6a33ec4e2, 0x6608f938be2ee08e, 0x9ca143c563284515, 0x4cf38a1fec2db60d }, - { 0xa0aaaa650dfa5ce7, 0xf9c49e2a48b5478c, 0x4f09cc7d7003725b, 0x373cad3a26091abe } + { -0x002ad4bf00929656, 0x34530b18dc4049bb, 0x5e4a5c2fa34d9897, 0x78096f8e7d32ba2d }, + { -0x66f085295cc13b1e, 0x6608f938be2ee08e, -0x635ebc3a9cd7baeb, 0x4cf38a1fec2db60d }, + { -0x5f55559af205a319, -0x063b61d5b74ab874, 0x4f09cc7d7003725b, 0x373cad3a26091abe } }, { - { 0xf1bea8fb89ddbbad, 0x3bcb2cbc61aeaecb, 0x8f58a7bb1f9b8d9d, 0x21547eda5112a686 }, - { 0xb294634d82c9f57c, 0x1fcbfde124934536, 0x9e9c4db3418cdb5a, 0x0040f3d9454419fc }, - { 0xdefde939fd5986d3, 0xf4272c89510a380c, 0xb72ba407bb3119b9, 0x63550a334a254df4 } + { -0x0e41570476224453, 0x3bcb2cbc61aeaecb, -0x70a75844e0647263, 0x21547eda5112a686 }, + { -0x4d6b9cb27d360a84, 0x1fcbfde124934536, -0x6163b24cbe7324a6, 0x0040f3d9454419fc }, + { -0x210216c602a6792d, -0x0bd8d376aef5c7f4, -0x48d45bf844cee647, 0x63550a334a254df4 } }, { - { 0x9bba584572547b49, 0xf305c6fae2c408e0, 0x60e8fa69c734f18d, 0x39a92bafaa7d767a }, - { 0x6507d6edb569cf37, 0x178429b00ca52ee1, 0xea7c0090eb6bd65d, 0x3eea62c7daf78f51 }, - { 0x9d24c713e693274e, 0x5f63857768dbd375, 0x70525560eb8ab39a, 0x68436a0665c9c4cd } + { -0x6445a7ba8dab84b7, -0x0cfa39051d3bf720, 0x60e8fa69c734f18d, 0x39a92bafaa7d767a }, + { 0x6507d6edb569cf37, 0x178429b00ca52ee1, -0x1583ff6f149429a3, 0x3eea62c7daf78f51 }, + { -0x62db38ec196cd8b2, 0x5f63857768dbd375, 0x70525560eb8ab39a, 0x68436a0665c9c4cd } }, { - { 0x1e56d317e820107c, 0xc5266844840ae965, 0xc1e0a1c6320ffc7a, 0x5373669c91611472 }, - { 0xbc0235e8202f3f27, 0xc75c00e264f975b0, 0x91a4e9d5a38c2416, 0x17b6e7f68ab789f9 }, - { 0x5d2814ab9a0e5257, 0x908f2084c9cab3fc, 0xafcaf5885b2d1eca, 0x1cb4b5a678f87d11 } + { 0x1e56d317e820107c, -0x3ad997bb7bf5169b, -0x3e1f5e39cdf00386, 0x5373669c91611472 }, + { -0x43fdca17dfd0c0d9, -0x38a3ff1d9b068a50, -0x6e5b162a5c73dbea, 0x17b6e7f68ab789f9 }, + { 0x5d2814ab9a0e5257, -0x6f70df7b36354c04, -0x50350a77a4d2e136, 0x1cb4b5a678f87d11 } }, { - { 0x6b74aa62a2a007e7, 0xf311e0b0f071c7b1, 0x5707e438000be223, 0x2dc0fd2d82ef6eac }, - { 0xb664c06b394afc6c, 0x0c88de2498da5fb1, 0x4f8d03164bcad834, 0x330bca78de7434a2 }, - { 0x982eff841119744e, 0xf9695e962b074724, 0xc58ac14fbfc953fb, 0x3c31be1b369f1cf5 } + { 0x6b74aa62a2a007e7, -0x0cee1f4f0f8e384f, 0x5707e438000be223, 0x2dc0fd2d82ef6eac }, + { -0x499b3f94c6b50394, 0x0c88de2498da5fb1, 0x4f8d03164bcad834, 0x330bca78de7434a2 }, + { -0x67d1007beee68bb2, -0x0696a169d4f8b8dc, -0x3a753eb04036ac05, 0x3c31be1b369f1cf5 } }, { - { 0xc168bc93f9cb4272, 0xaeb8711fc7cedb98, 0x7f0e52aa34ac8d7a, 0x41cec1097e7d55bb }, - { 0xb0f4864d08948aee, 0x07dc19ee91ba1c6f, 0x7975cdaea6aca158, 0x330b61134262d4bb }, - { 0xf79619d7a26d808a, 0xbb1fd49e1d9e156d, 0x73d7c36cdba1df27, 0x26b44cd91f28777d } + { -0x3e97436c0634bd8e, -0x51478ee038312468, 0x7f0e52aa34ac8d7a, 0x41cec1097e7d55bb }, + { -0x4f0b79b2f76b7512, 0x07dc19ee91ba1c6f, 0x7975cdaea6aca158, 0x330b61134262d4bb }, + { -0x0869e6285d927f76, -0x44e02b61e261ea93, 0x73d7c36cdba1df27, 0x26b44cd91f28777d } }, }, { { - { 0xaf44842db0285f37, 0x8753189047efc8df, 0x9574e091f820979a, 0x0e378d6069615579 }, + { -0x50bb7bd24fd7a0c9, -0x78ace76fb8103721, -0x6a8b1f6e07df6866, 0x0e378d6069615579 }, { 0x300a9035393aa6d8, 0x2b501131a12bb1cd, 0x7b1ff677f093c222, 0x4309c1f8cab82bad }, - { 0xd9fa917183075a55, 0x4bdb5ad26b009fdc, 0x7829ad2cd63def0e, 0x078fc54975fd3877 } + { -0x26056e8e7cf8a5ab, 0x4bdb5ad26b009fdc, 0x7829ad2cd63def0e, 0x078fc54975fd3877 } }, { - { 0xe2004b5bb833a98a, 0x44775dec2d4c3330, 0x3aa244067eace913, 0x272630e3d58e00a9 }, - { 0x87dfbd1428878f2d, 0x134636dd1e9421a1, 0x4f17c951257341a3, 0x5df98d4bad296cb8 }, - { 0xf3678fd0ecc90b54, 0xf001459b12043599, 0x26725fbc3758b89b, 0x4325e4aa73a719ae } + { -0x1dffb4a447cc5676, 0x44775dec2d4c3330, 0x3aa244067eace913, 0x272630e3d58e00a9 }, + { -0x782042ebd77870d3, 0x134636dd1e9421a1, 0x4f17c951257341a3, 0x5df98d4bad296cb8 }, + { -0x0c98702f1336f4ac, -0x0ffeba64edfbca67, 0x26725fbc3758b89b, 0x4325e4aa73a719ae } }, { - { 0xed24629acf69f59d, 0x2a4a1ccedd5abbf4, 0x3535ca1f56b2d67b, 0x5d8c68d043b1b42d }, + { -0x12db9d6530960a63, 0x2a4a1ccedd5abbf4, 0x3535ca1f56b2d67b, 0x5d8c68d043b1b42d }, { 0x657dc6ef433c3493, 0x65375e9f80dbf8c3, 0x47fd2d465b372dae, 0x4966ab79796e7947 }, - { 0xee332d4de3b42b0a, 0xd84e5a2b16a4601c, 0x78243877078ba3e4, 0x77ed1eb4184ee437 } + { -0x11ccd2b21c4bd4f6, -0x27b1a5d4e95b9fe4, 0x78243877078ba3e4, 0x77ed1eb4184ee437 } }, { - { 0x185d43f89e92ed1a, 0xb04a1eeafe4719c6, 0x499fbe88a6f03f4f, 0x5d8b0d2f3c859bdd }, - { 0xbfd4e13f201839a0, 0xaeefffe23e3df161, 0xb65b04f06b5d1fe3, 0x52e085fb2b62fbc0 }, - { 0x124079eaa54cf2ba, 0xd72465eb001b26e7, 0x6843bcfdc97af7fd, 0x0524b42b55eacd02 } + { 0x185d43f89e92ed1a, -0x4fb5e11501b8e63a, 0x499fbe88a6f03f4f, 0x5d8b0d2f3c859bdd }, + { -0x402b1ec0dfe7c660, -0x5110001dc1c20e9f, -0x49a4fb0f94a2e01d, 0x52e085fb2b62fbc0 }, + { 0x124079eaa54cf2ba, -0x28db9a14ffe4d919, 0x6843bcfdc97af7fd, 0x0524b42b55eacd02 } }, { - { 0xbc18dcad9b829eac, 0x23ae7d28b5f579d0, 0xc346122a69384233, 0x1a6110b2e7d4ac89 }, - { 0xfd0d5dbee45447b0, 0x6cec351a092005ee, 0x99a47844567579cb, 0x59d242a216e7fa45 }, + { -0x43e72352647d6154, 0x23ae7d28b5f579d0, -0x3cb9edd596c7bdcd, 0x1a6110b2e7d4ac89 }, + { -0x02f2a2411babb850, 0x6cec351a092005ee, -0x665b87bba98a8635, 0x59d242a216e7fa45 }, { 0x4f833f6ae66997ac, 0x6849762a361839a4, 0x6985dec1970ab525, 0x53045e89dcb1f546 } }, { - { 0x84da3cde8d45fe12, 0xbd42c218e444e2d2, 0xa85196781f7e3598, 0x7642c93f5616e2b2 }, - { 0xcb8bb346d75353db, 0xfcfcb24bae511e22, 0xcba48d40d50ae6ef, 0x26e3bae5f4f7cb5d }, - { 0x2323daa74595f8e4, 0xde688c8b857abeb4, 0x3fc48e961c59326e, 0x0b2e73ca15c9b8ba } + { -0x7b25c32172ba01ee, -0x42bd3de71bbb1d2e, -0x57ae6987e081ca68, 0x7642c93f5616e2b2 }, + { -0x34744cb928acac25, -0x03034db451aee1de, -0x345b72bf2af51911, 0x26e3bae5f4f7cb5d }, + { 0x2323daa74595f8e4, -0x219773747a85414c, 0x3fc48e961c59326e, 0x0b2e73ca15c9b8ba } }, { - { 0x0e3fbfaf79c03a55, 0x3077af054cbb5acf, 0xd5c55245db3de39f, 0x015e68c1476a4af7 }, - { 0xd6bb4428c17f5026, 0x9eb27223fb5a9ca7, 0xe37ba5031919c644, 0x21ce380db59a6602 }, - { 0xc1d5285220066a38, 0x95603e523570aef3, 0x832659a7226b8a4d, 0x5dd689091f8eedc9 } + { 0x0e3fbfaf79c03a55, 0x3077af054cbb5acf, -0x2a3aadba24c21c61, 0x015e68c1476a4af7 }, + { -0x2944bbd73e80afda, -0x614d8ddc04a56359, -0x1c845afce6e639bc, 0x21ce380db59a6602 }, + { -0x3e2ad7addff995c8, -0x6a9fc1adca8f510d, -0x7cd9a658dd9475b3, 0x5dd689091f8eedc9 } }, { - { 0x1d022591a5313084, 0xca2d4aaed6270872, 0x86a12b852f0bfd20, 0x56e6c439ad7da748 }, - { 0xcbac84debfd3c856, 0x1624c348b35ff244, 0xb7f88dca5d9cad07, 0x3b0e574da2c2ebe8 }, - { 0xc704ff4942bdbae6, 0x5e21ade2b2de1f79, 0xe95db3f35652fad8, 0x0822b5378f08ebc1 } + { 0x1d022591a5313084, -0x35d2b55129d8f78e, -0x795ed47ad0f402e0, 0x56e6c439ad7da748 }, + { -0x34537b21402c37aa, 0x1624c348b35ff244, -0x48077235a26352f9, 0x3b0e574da2c2ebe8 }, + { -0x38fb00b6bd42451a, 0x5e21ade2b2de1f79, -0x16a24c0ca9ad0528, 0x0822b5378f08ebc1 } }, }, { { - { 0xe1b7f29362730383, 0x4b5279ffebca8a2c, 0xdafc778abfd41314, 0x7deb10149c72610f }, - { 0x51f048478f387475, 0xb25dbcf49cbecb3c, 0x9aab1244d99f2055, 0x2c709e6c1c10a5d6 }, - { 0xcb62af6a8766ee7a, 0x66cbec045553cd0e, 0x588001380f0be4b5, 0x08e68e9ff62ce2ea } + { -0x1e480d6c9d8cfc7d, 0x4b5279ffebca8a2c, -0x25038875402becec, 0x7deb10149c72610f }, + { 0x51f048478f387475, -0x4da2430b634134c4, -0x6554edbb2660dfab, 0x2c709e6c1c10a5d6 }, + { -0x349d509578991186, 0x66cbec045553cd0e, 0x588001380f0be4b5, 0x08e68e9ff62ce2ea } }, { - { 0x2f2d09d50ab8f2f9, 0xacb9218dc55923df, 0x4a8f342673766cb9, 0x4cb13bd738f719f5 }, - { 0x34ad500a4bc130ad, 0x8d38db493d0bd49c, 0xa25c3d98500a89be, 0x2f1f3f87eeba3b09 }, - { 0xf7848c75e515b64a, 0xa59501badb4a9038, 0xc20d313f3f751b50, 0x19a1e353c0ae2ee8 } + { 0x2f2d09d50ab8f2f9, -0x5346de723aa6dc21, 0x4a8f342673766cb9, 0x4cb13bd738f719f5 }, + { 0x34ad500a4bc130ad, -0x72c724b6c2f42b64, -0x5da3c267aff57642, 0x2f1f3f87eeba3b09 }, + { -0x087b738a1aea49b6, -0x5a6afe4524b56fc8, -0x3df2cec0c08ae4b0, 0x19a1e353c0ae2ee8 } }, { - { 0xb42172cdd596bdbd, 0x93e0454398eefc40, 0x9fb15347b44109b5, 0x736bd3990266ae34 }, - { 0x7d1c7560bafa05c3, 0xb3e1a0a0c6e55e61, 0xe3529718c0d66473, 0x41546b11c20c3486 }, - { 0x85532d509334b3b4, 0x46fd114b60816573, 0xcc5f5f30425c8375, 0x412295a2b87fab5c } + { -0x4bde8d322a694243, -0x6c1fbabc671103c0, -0x604eacb84bbef64b, 0x736bd3990266ae34 }, + { 0x7d1c7560bafa05c3, -0x4c1e5f5f391aa19f, -0x1cad68e73f299b8d, 0x41546b11c20c3486 }, + { -0x7aacd2af6ccb4c4c, 0x46fd114b60816573, -0x33a0a0cfbda37c8b, 0x412295a2b87fab5c } }, { - { 0x2e655261e293eac6, 0x845a92032133acdb, 0x460975cb7900996b, 0x0760bb8d195add80 }, + { 0x2e655261e293eac6, -0x7ba56dfcdecc5325, 0x460975cb7900996b, 0x0760bb8d195add80 }, { 0x19c99b88f57ed6e9, 0x5393cb266df8c825, 0x5cee3213b30ad273, 0x14e153ebb52d2e34 }, { 0x413e1a17cde6818a, 0x57156da9ed69a084, 0x2cbf268f46caccb1, 0x6b34be9bc33ac5f2 } }, { - { 0x11fc69656571f2d3, 0xc6c9e845530e737a, 0xe33ae7a2d4fe5035, 0x01b9c7b62e6dd30b }, - { 0xf3df2f643a78c0b2, 0x4c3e971ef22e027c, 0xec7d1c5e49c1b5a3, 0x2012c18f0922dd2d }, - { 0x880b55e55ac89d29, 0x1483241f45a0a763, 0x3d36efdfc2e76c1f, 0x08af5b784e4bade8 } + { 0x11fc69656571f2d3, -0x393617baacf18c86, -0x1cc5185d2b01afcb, 0x01b9c7b62e6dd30b }, + { -0x0c20d09bc5873f4e, 0x4c3e971ef22e027c, -0x1382e3a1b63e4a5d, 0x2012c18f0922dd2d }, + { -0x77f4aa1aa53762d7, 0x1483241f45a0a763, 0x3d36efdfc2e76c1f, 0x08af5b784e4bade8 } }, { - { 0xe27314d289cc2c4b, 0x4be4bd11a287178d, 0x18d528d6fa3364ce, 0x6423c1d5afd9826e }, - { 0x283499dc881f2533, 0x9d0525da779323b6, 0x897addfb673441f4, 0x32b79d71163a168d }, - { 0xcc85f8d9edfcb36a, 0x22bcc28f3746e5f9, 0xe49de338f9e5d3cd, 0x480a5efbc13e2dcc } + { -0x1d8ceb2d7633d3b5, 0x4be4bd11a287178d, 0x18d528d6fa3364ce, 0x6423c1d5afd9826e }, + { 0x283499dc881f2533, -0x62fada25886cdc4a, -0x7685220498cbbe0c, 0x32b79d71163a168d }, + { -0x337a072612034c96, 0x22bcc28f3746e5f9, -0x1b621cc7061a2c33, 0x480a5efbc13e2dcc } }, { - { 0xb6614ce442ce221f, 0x6e199dcc4c053928, 0x663fb4a4dc1cbe03, 0x24b31d47691c8e06 }, + { -0x499eb31bbd31dde1, 0x6e199dcc4c053928, 0x663fb4a4dc1cbe03, 0x24b31d47691c8e06 }, { 0x0b51e70b01622071, 0x06b505cf8b1dafc5, 0x2c6bb061ef5aabcd, 0x47aa27600cb7bf31 }, - { 0x2a541eedc015f8c3, 0x11a4fe7e7c693f7c, 0xf0af66134ea278d6, 0x545b585d14dda094 } + { 0x2a541eedc015f8c3, 0x11a4fe7e7c693f7c, -0x0f5099ecb15d872a, 0x545b585d14dda094 } }, { { 0x6204e4d0e3b321e1, 0x3baa637a28ff1e95, 0x0b0ccffd5b99bd9e, 0x4d22dc3e64c8d071 }, - { 0x67bf275ea0d43a0f, 0xade68e34089beebe, 0x4289134cd479e72e, 0x0f62f9c332ba5454 }, - { 0xfcb46589d63b5f39, 0x5cae6a3f57cbcf61, 0xfebac2d2953afa05, 0x1c0fa01a36371436 } + { 0x67bf275ea0d43a0f, -0x521971cbf7641142, 0x4289134cd479e72e, 0x0f62f9c332ba5454 }, + { -0x034b9a7629c4a0c7, 0x5cae6a3f57cbcf61, -0x01453d2d6ac505fb, 0x1c0fa01a36371436 } }, }, { { - { 0xc11ee5e854c53fae, 0x6a0b06c12b4f3ff4, 0x33540f80e0b67a72, 0x15f18fc3cd07e3ef }, - { 0xe7547449bc7cd692, 0x0f9abeaae6f73ddf, 0x4af01ca700837e29, 0x63ab1b5d3f1bc183 }, - { 0x32750763b028f48c, 0x06020740556a065f, 0xd53bd812c3495b58, 0x08706c9b865f508d } + { -0x3ee11a17ab3ac052, 0x6a0b06c12b4f3ff4, 0x33540f80e0b67a72, 0x15f18fc3cd07e3ef }, + { -0x18ab8bb64383296e, 0x0f9abeaae6f73ddf, 0x4af01ca700837e29, 0x63ab1b5d3f1bc183 }, + { 0x32750763b028f48c, 0x06020740556a065f, -0x2ac427ed3cb6a4a8, 0x08706c9b865f508d } }, { - { 0xcc991b4138b41246, 0x243b9c526f9ac26b, 0xb9ef494db7cbabbd, 0x5fba433dd082ed00 }, - { 0xf37ca2ab3d343dff, 0x1a8c6a2d80abc617, 0x8e49e035d4ccffca, 0x48b46beebaa1d1b9 }, - { 0x9c49e355c9941ad0, 0xb9734ade74498f84, 0x41c3fed066663e5c, 0x0ecfedf8e8e710b3 } + { -0x3366e4bec74bedba, 0x243b9c526f9ac26b, -0x4610b6b248345443, 0x5fba433dd082ed00 }, + { -0x0c835d54c2cbc201, 0x1a8c6a2d80abc617, -0x71b61fca2b330036, 0x48b46beebaa1d1b9 }, + { -0x63b61caa366be530, -0x468cb5218bb6707c, 0x41c3fed066663e5c, 0x0ecfedf8e8e710b3 } }, { - { 0x744f7463e9403762, 0xf79a8dee8dfcc9c9, 0x163a649655e4cde3, 0x3b61788db284f435 }, - { 0x76430f9f9cd470d9, 0xb62acc9ba42f6008, 0x1898297c59adad5e, 0x7789dd2db78c5080 }, - { 0xb22228190d6ef6b2, 0xa94a66b246ce4bfa, 0x46c1a77a4f0b6cc7, 0x4236ccffeb7338cf } + { 0x744f7463e9403762, -0x0865721172033637, 0x163a649655e4cde3, 0x3b61788db284f435 }, + { 0x76430f9f9cd470d9, -0x49d533645bd09ff8, 0x1898297c59adad5e, 0x7789dd2db78c5080 }, + { -0x4dddd7e6f291094e, -0x56b5994db931b406, 0x46c1a77a4f0b6cc7, 0x4236ccffeb7338cf } }, { { 0x3bd82dbfda777df6, 0x71b177cc0b98369e, 0x1d0e8463850c3699, 0x5a71945b48e2d1f1 }, - { 0x8497404d0d55e274, 0x6c6663d9c4ad2b53, 0xec2fb0d9ada95734, 0x2617e120cdb8f73c }, - { 0x6f203dd5405b4b42, 0x327ec60410b24509, 0x9c347230ac2a8846, 0x77de29fc11ffeb6a } + { -0x7b68bfb2f2aa1d8c, 0x6c6663d9c4ad2b53, -0x13d04f265256a8cc, 0x2617e120cdb8f73c }, + { 0x6f203dd5405b4b42, 0x327ec60410b24509, -0x63cb8dcf53d577ba, 0x77de29fc11ffeb6a } }, { - { 0x835e138fecced2ca, 0x8c9eaf13ea963b9a, 0xc95fbfc0b2160ea6, 0x575e66f3ad877892 }, - { 0xb0ac57c983b778a8, 0x53cdcca9d7fe912c, 0x61c2b854ff1f59dc, 0x3a1a2cf0f0de7dac }, - { 0x99803a27c88fcb3a, 0x345a6789275ec0b0, 0x459789d0ff6c2be5, 0x62f882651e70a8b2 } + { -0x7ca1ec7013312d36, -0x736150ec1569c466, -0x36a0403f4de9f15a, 0x575e66f3ad877892 }, + { -0x4f53a8367c488758, 0x53cdcca9d7fe912c, 0x61c2b854ff1f59dc, 0x3a1a2cf0f0de7dac }, + { -0x667fc5d8377034c6, 0x345a6789275ec0b0, 0x459789d0ff6c2be5, 0x62f882651e70a8b2 } }, { - { 0x6d822986698a19e0, 0xdc9821e174d78a71, 0x41a85f31f6cb1f47, 0x352721c2bcda9c51 }, - { 0x085ae2c759ff1be4, 0x149145c93b0e40b7, 0xc467e7fa7ff27379, 0x4eeecf0ad5c73a95 }, - { 0x48329952213fc985, 0x1087cf0d368a1746, 0x8e5261b166c15aa5, 0x2d5b2d842ed24c21 } + { 0x6d822986698a19e0, -0x2367de1e8b28758f, 0x41a85f31f6cb1f47, 0x352721c2bcda9c51 }, + { 0x085ae2c759ff1be4, 0x149145c93b0e40b7, -0x3b981805800d8c87, 0x4eeecf0ad5c73a95 }, + { 0x48329952213fc985, 0x1087cf0d368a1746, -0x71ad9e4e993ea55b, 0x2d5b2d842ed24c21 } }, { - { 0x5eb7d13d196ac533, 0x377234ecdb80be2b, 0xe144cffc7cf5ae24, 0x5226bcf9c441acec }, - { 0x02cfebd9ebd3ded1, 0xd45b217739021974, 0x7576f813fe30a1b7, 0x5691b6f9a34ef6c2 }, - { 0x79ee6c7223e5b547, 0x6f5f50768330d679, 0xed73e1e96d8adce9, 0x27c3da1e1d8ccc03 } + { 0x5eb7d13d196ac533, 0x377234ecdb80be2b, -0x1ebb3003830a51dc, 0x5226bcf9c441acec }, + { 0x02cfebd9ebd3ded1, -0x2ba4de88c6fde68c, 0x7576f813fe30a1b7, 0x5691b6f9a34ef6c2 }, + { 0x79ee6c7223e5b547, 0x6f5f50768330d679, -0x128c1e1692752317, 0x27c3da1e1d8ccc03 } }, { - { 0x28302e71630ef9f6, 0xc2d4a2032b64cee0, 0x090820304b6292be, 0x5fca747aa82adf18 }, + { 0x28302e71630ef9f6, -0x3d2b5dfcd49b3120, 0x090820304b6292be, 0x5fca747aa82adf18 }, { 0x7eb9efb23fe24c74, 0x3e50f49f1651be01, 0x3ea732dc21858dea, 0x17377bd75bb810f9 }, - { 0x232a03c35c258ea5, 0x86f23a2c6bcb0cf1, 0x3dad8d0d2e442166, 0x04a8933cab76862b } + { 0x232a03c35c258ea5, -0x790dc5d39434f30f, 0x3dad8d0d2e442166, 0x04a8933cab76862b } }, }, { { - { 0x69082b0e8c936a50, 0xf9c9a035c1dac5b6, 0x6fb73e54c4dfb634, 0x4005419b1d2bc140 }, - { 0xd2c604b622943dff, 0xbc8cbece44cfb3a0, 0x5d254ff397808678, 0x0fa3614f3b1ca6bf }, - { 0xa003febdb9be82f0, 0x2089c1af3a44ac90, 0xf8499f911954fa8e, 0x1fba218aef40ab42 } + { 0x69082b0e8c936a50, -0x06365fca3e253a4a, 0x6fb73e54c4dfb634, 0x4005419b1d2bc140 }, + { -0x2d39fb49dd6bc201, -0x43734131bb304c60, 0x5d254ff397808678, 0x0fa3614f3b1ca6bf }, + { -0x5ffc014246417d10, 0x2089c1af3a44ac90, -0x07b6606ee6ab0572, 0x1fba218aef40ab42 } }, { - { 0x4f3e57043e7b0194, 0xa81d3eee08daaf7f, 0xc839c6ab99dcdef1, 0x6c535d13ff7761d5 }, - { 0xab549448fac8f53e, 0x81f6e89a7ba63741, 0x74fd6c7d6c2b5e01, 0x392e3acaa8c86e42 }, + { 0x4f3e57043e7b0194, -0x57e2c111f7255081, -0x37c639546623210f, 0x6c535d13ff7761d5 }, + { -0x54ab6bb705370ac2, -0x7e0917658459c8bf, 0x74fd6c7d6c2b5e01, 0x392e3acaa8c86e42 }, { 0x4cbd34e93e8a35af, 0x2e0781445887e816, 0x19319c76f29ab0ab, 0x25e17fe4d50ac13b } }, { - { 0x915f7ff576f121a7, 0xc34a32272fcd87e3, 0xccba2fde4d1be526, 0x6bba828f8969899b }, + { -0x6ea0800a890ede59, -0x3cb5cdd8d032781d, -0x3345d021b2e41ada, 0x6bba828f8969899b }, { 0x0a289bd71e04f676, 0x208e1c52d6420f95, 0x5186d8b034691fab, 0x255751442a9fb351 }, - { 0xe2d1bc6690fe3901, 0x4cb54a18a0997ad5, 0x971d6914af8460d4, 0x559d504f7f6b7be4 } + { -0x1d2e43996f01c6ff, 0x4cb54a18a0997ad5, -0x68e296eb507b9f2c, 0x559d504f7f6b7be4 } }, { - { 0x9c4891e7f6d266fd, 0x0744a19b0307781b, 0x88388f1d6061e23b, 0x123ea6a3354bd50e }, - { 0xa7738378b3eb54d5, 0x1d69d366a5553c7c, 0x0a26cf62f92800ba, 0x01ab12d5807e3217 }, - { 0x118d189041e32d96, 0xb9ede3c2d8315848, 0x1eab4271d83245d9, 0x4a3961e2c918a154 } + { -0x63b76e18092d9903, 0x0744a19b0307781b, -0x77c770e29f9e1dc5, 0x123ea6a3354bd50e }, + { -0x588c7c874c14ab2b, 0x1d69d366a5553c7c, 0x0a26cf62f92800ba, 0x01ab12d5807e3217 }, + { 0x118d189041e32d96, -0x46121c3d27cea7b8, 0x1eab4271d83245d9, 0x4a3961e2c918a154 } }, { - { 0x0327d644f3233f1e, 0x499a260e34fcf016, 0x83b5a716f2dab979, 0x68aceead9bd4111f }, - { 0x71dc3be0f8e6bba0, 0xd6cef8347effe30a, 0xa992425fe13a476a, 0x2cd6bce3fb1db763 }, + { 0x0327d644f3233f1e, 0x499a260e34fcf016, -0x7c4a58e90d254687, 0x68aceead9bd4111f }, + { 0x71dc3be0f8e6bba0, -0x293107cb81001cf6, -0x566dbda01ec5b896, 0x2cd6bce3fb1db763 }, { 0x38b4c90ef3d7c210, 0x308e6e24b7ad040c, 0x3860d9f1b7e73e23, 0x595760d5b508f597 } }, { - { 0x882acbebfd022790, 0x89af3305c4115760, 0x65f492e37d3473f4, 0x2cb2c5df54515a2b }, - { 0x6129bfe104aa6397, 0x8f960008a4a7fccb, 0x3f8bc0897d909458, 0x709fa43edcb291a9 }, - { 0xeb0a5d8c63fd2aca, 0xd22bc1662e694eff, 0x2723f36ef8cbb03a, 0x70f029ecf0c8131f } + { -0x77d5341402fdd870, -0x7650ccfa3beea8a0, 0x65f492e37d3473f4, 0x2cb2c5df54515a2b }, + { 0x6129bfe104aa6397, -0x7069fff75b580335, 0x3f8bc0897d909458, 0x709fa43edcb291a9 }, + { -0x14f5a2739c02d536, -0x2dd43e99d196b101, 0x2723f36ef8cbb03a, 0x70f029ecf0c8131f } }, { { 0x2a6aafaa5e10b0b9, 0x78f0a370ef041aa9, 0x773efb77aa3ad61f, 0x44eca5a2a74bd9e1 }, - { 0x461307b32eed3e33, 0xae042f33a45581e7, 0xc94449d3195f0366, 0x0b7d5d8a6c314858 }, - { 0x25d448327b95d543, 0x70d38300a3340f1d, 0xde1c531c60e1c52b, 0x272224512c7de9e4 } + { 0x461307b32eed3e33, -0x51fbd0cc5baa7e19, -0x36bbb62ce6a0fc9a, 0x0b7d5d8a6c314858 }, + { 0x25d448327b95d543, 0x70d38300a3340f1d, -0x21e3ace39f1e3ad5, 0x272224512c7de9e4 } }, { - { 0xbf7bbb8a42a975fc, 0x8c5c397796ada358, 0xe27fc76fcdedaa48, 0x19735fd7f6bc20a6 }, - { 0x1abc92af49c5342e, 0xffeed811b2e6fad0, 0xefa28c8dfcc84e29, 0x11b5df18a44cc543 }, - { 0xe3ab90d042c84266, 0xeb848e0f7f19547e, 0x2503a1d065a497b9, 0x0fef911191df895f } + { -0x40844475bd568a04, -0x73a3c68869525ca8, -0x1d803890321255b8, 0x19735fd7f6bc20a6 }, + { 0x1abc92af49c5342e, -0x001127ee4d190530, -0x105d73720337b1d7, 0x11b5df18a44cc543 }, + { -0x1c546f2fbd37bd9a, -0x147b71f080e6ab82, 0x2503a1d065a497b9, 0x0fef911191df895f } }, }, { { - { 0x6ab5dcb85b1c16b7, 0x94c0fce83c7b27a5, 0xa4b11c1a735517be, 0x499238d0ba0eafaa }, - { 0xb1507ca1ab1c6eb9, 0xbd448f3e16b687b3, 0x3455fb7f2c7a91ab, 0x7579229e2f2adec1 }, - { 0xecf46e527aba8b57, 0x15a08c478bd1647b, 0x7af1c6a65f706fef, 0x6345fa78f03a30d5 } + { 0x6ab5dcb85b1c16b7, -0x6b3f0317c384d85b, -0x5b4ee3e58caae842, 0x499238d0ba0eafaa }, + { -0x4eaf835e54e39147, -0x42bb70c1e949784d, 0x3455fb7f2c7a91ab, 0x7579229e2f2adec1 }, + { -0x130b91ad854574a9, 0x15a08c478bd1647b, 0x7af1c6a65f706fef, 0x6345fa78f03a30d5 } }, { - { 0x93d3cbe9bdd8f0a4, 0xdb152c1bfd177302, 0x7dbddc6d7f17a875, 0x3e1a71cc8f426efe }, - { 0xdf02f95f1015e7a1, 0x790ec41da9b40263, 0x4d3a0ea133ea1107, 0x54f70be7e33af8c9 }, - { 0xc83ca3e390babd62, 0x80ede3670291c833, 0xc88038ccd37900c4, 0x2c5fc0231ec31fa1 } + { -0x6c2c341642270f5c, -0x24ead3e402e88cfe, 0x7dbddc6d7f17a875, 0x3e1a71cc8f426efe }, + { -0x20fd06a0efea185f, 0x790ec41da9b40263, 0x4d3a0ea133ea1107, 0x54f70be7e33af8c9 }, + { -0x37c35c1c6f45429e, -0x7f121c98fd6e37cd, -0x377fc7332c86ff3c, 0x2c5fc0231ec31fa1 } }, { - { 0xc422e4d102456e65, 0x87414ac1cad47b91, 0x1592e2bba2b6ffdd, 0x75d9d2bff5c2100f }, - { 0xfeba911717038b4f, 0xe5123721c9deef81, 0x1c97e4e75d0d8834, 0x68afae7a23dc3bc6 }, - { 0x5bd9b4763626e81c, 0x89966936bca02edd, 0x0a41193d61f077b3, 0x3097a24200ce5471 } + { -0x3bdd1b2efdba919b, -0x78beb53e352b846f, 0x1592e2bba2b6ffdd, 0x75d9d2bff5c2100f }, + { -0x01456ee8e8fc74b1, -0x1aedc8de3621107f, 0x1c97e4e75d0d8834, 0x68afae7a23dc3bc6 }, + { 0x5bd9b4763626e81c, -0x766996c9435fd123, 0x0a41193d61f077b3, 0x3097a24200ce5471 } }, { - { 0xa162e7246695c486, 0x131d633435a89607, 0x30521561a0d12a37, 0x56704bada6afb363 }, - { 0x57427734c7f8b84c, 0xf141a13e01b270e9, 0x02d1adfeb4e564a6, 0x4bb23d92ce83bd48 }, - { 0xaf6c4aa752f912b9, 0x5e665f6cd86770c8, 0x4c35ac83a3c8cd58, 0x2b7a29c010a58a7e } + { -0x5e9d18db996a3b7a, 0x131d633435a89607, 0x30521561a0d12a37, 0x56704bada6afb363 }, + { 0x57427734c7f8b84c, -0x0ebe5ec1fe4d8f17, 0x02d1adfeb4e564a6, 0x4bb23d92ce83bd48 }, + { -0x5093b558ad06ed47, 0x5e665f6cd86770c8, 0x4c35ac83a3c8cd58, 0x2b7a29c010a58a7e } }, { - { 0x33810a23bf00086e, 0xafce925ee736ff7c, 0x3d60e670e24922d4, 0x11ce9e714f96061b }, - { 0xc4007f77d0c1cec3, 0x8d1020b6bac492f8, 0x32ec29d57e69daaf, 0x599408759d95fce0 }, - { 0x219ef713d815bac1, 0xf141465d485be25c, 0x6d5447cc4e513c51, 0x174926be5ef44393 } + { 0x33810a23bf00086e, -0x50316da118c90084, 0x3d60e670e24922d4, 0x11ce9e714f96061b }, + { -0x3bff80882f3e313d, -0x72efdf49453b6d08, 0x32ec29d57e69daaf, 0x599408759d95fce0 }, + { 0x219ef713d815bac1, -0x0ebeb9a2b7a41da4, 0x6d5447cc4e513c51, 0x174926be5ef44393 } }, { - { 0x3ef5d41593ea022e, 0x5cbcc1a20ed0eed6, 0x8fd24ecf07382c8c, 0x6fa42ead06d8e1ad }, - { 0xb5deb2f9fc5bd5bb, 0x92daa72ae1d810e1, 0xafc4cfdcb72a1c59, 0x497d78813fc22a24 }, - { 0xe276824a1f73371f, 0x7f7cf01c4f5b6736, 0x7e201fe304fa46e7, 0x785a36a357808c96 } + { 0x3ef5d41593ea022e, 0x5cbcc1a20ed0eed6, -0x702db130f8c7d374, 0x6fa42ead06d8e1ad }, + { -0x4a214d0603a42a45, -0x6d2558d51e27ef1f, -0x503b302348d5e3a7, 0x497d78813fc22a24 }, + { -0x1d897db5e08cc8e1, 0x7f7cf01c4f5b6736, 0x7e201fe304fa46e7, 0x785a36a357808c96 } }, { { 0x070442985d517bc3, 0x6acd56c7ae653678, 0x00a27983985a7763, 0x5167effae512662b }, - { 0x825fbdfd63014d2b, 0xc852369c6ca7578b, 0x5b2fcd285c0b5df0, 0x12ab214c58048c8f }, - { 0xbd4ea9e10f53c4b6, 0x1673dc5f8ac91a14, 0xa8f81a4e2acc1aba, 0x33a92a7924332a25 } + { -0x7da042029cfeb2d5, -0x37adc9639358a875, 0x5b2fcd285c0b5df0, 0x12ab214c58048c8f }, + { -0x42b1561ef0ac3b4a, 0x1673dc5f8ac91a14, -0x5707e5b1d533e546, 0x33a92a7924332a25 } }, { - { 0x7ba95ba0218f2ada, 0xcff42287330fb9ca, 0xdada496d56c6d907, 0x5380c296f4beee54 }, - { 0x9dd1f49927996c02, 0x0cb3b058e04d1752, 0x1f7e88967fd02c3e, 0x2f964268cb8b3eb1 }, - { 0x9d4f270466898d0a, 0x3d0987990aff3f7a, 0xd09ef36267daba45, 0x7761455e7b1c669c } + { 0x7ba95ba0218f2ada, -0x300bdd78ccf04636, -0x2525b692a93926f9, 0x5380c296f4beee54 }, + { -0x622e0b66d86693fe, 0x0cb3b058e04d1752, 0x1f7e88967fd02c3e, 0x2f964268cb8b3eb1 }, + { -0x62b0d8fb997672f6, 0x3d0987990aff3f7a, -0x2f610c9d982545bb, 0x7761455e7b1c669c } }, }, }; @@ -6501,44 +6501,44 @@ static void slide(signed char *r,const unsigned char *a) #ifdef CURVED25519_X64 static const ge_precomp Bi[8] = { { - { 0x2fbc93c6f58c3b85, 0xcf932dc6fb8c0e19, 0x270b4898643d42c2, 0x7cf9d3a33d4ba65, }, - { 0x9d103905d740913e, 0xfd399f05d140beb3, 0xa5c18434688f8a09, 0x44fd2f9298f81267, }, - { 0xabc91205877aaa68, 0x26d9e823ccaac49e, 0x5a1b7dcbdd43598c, 0x6f117b689f0c65a8, }, + { 0x2fbc93c6f58c3b85, -0x306cd2390473f1e7, 0x270b4898643d42c2, 0x07cf9d3a33d4ba65, }, + { -0x62efc6fa28bf6ec2, -0x02c660fa2ebf414d, -0x5a3e7bcb977075f7, 0x44fd2f9298f81267, }, + { -0x5436edfa78855598, 0x26d9e823ccaac49e, 0x5a1b7dcbdd43598c, 0x6f117b689f0c65a8, }, }, { - { 0xaf25b0a84cee9730, 0x25a8430e8864b8a, 0xc11b50029f016732, 0x7a164e1b9a80f8f4, }, - { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, 0x8131f31a214bd6bd, 0x2ab91587555bda62, }, - { 0x14ae933f0dd0d889, 0x589423221c35da62, 0xd170e5458cf2db4c, 0x5a2826af12b9b4c6, }, + { -0x50da4f57b31168d0, 0x025a8430e8864b8a, -0x3ee4affd60fe98ce, 0x7a164e1b9a80f8f4, }, + { 0x56611fe8a4fcd265, 0x3bd353fde5c1ba7d, -0x7ece0ce5deb42943, 0x2ab91587555bda62, }, + { 0x14ae933f0dd0d889, 0x589423221c35da62, -0x2e8f1aba730d24b4, 0x5a2826af12b9b4c6, }, }, { - { 0xa212bc4408a5bb33, 0x8d5048c3c75eed02, 0xdd1beb0c5abfec44, 0x2945ccf146e206eb, }, - { 0x7f9182c3a447d6ba, 0xd50014d14b2729b7, 0xe33cf11cb864a087, 0x154a7e73eb1b55f3, }, - { 0xbcbbdbf1812a8285, 0x270e0807d0bdd1fc, 0xb41b670b1bbda72d, 0x43aabe696b3bb69a, }, + { -0x5ded43bbf75a44cd, -0x72afb73c38a112fe, -0x22e414f3a54013bc, 0x2945ccf146e206eb, }, + { 0x7f9182c3a447d6ba, -0x2affeb2eb4d8d649, -0x1cc30ee3479b5f79, 0x154a7e73eb1b55f3, }, + { -0x4344240e7ed57d7b, 0x270e0807d0bdd1fc, -0x4be498f4e44258d3, 0x43aabe696b3bb69a, }, }, { { 0x6b1a5cd0944ea3bf, 0x7470353ab39dc0d2, 0x71b2528228542e49, 0x461bea69283c927e, }, - { 0xba6f2c9aaa3221b1, 0x6ca021533bba23a7, 0x9dea764f92192c3a, 0x1d6edd5d2e5317e0, }, - { 0xf1836dc801b8b3a2, 0xb3035f47053ea49a, 0x529c41ba5877adf3, 0x7a9fbb1c6a0f90a7, }, + { -0x4590d36555cdde4f, 0x6ca021533bba23a7, -0x621589b06de6d3c6, 0x1d6edd5d2e5317e0, }, + { -0x0e7c9237fe474c5e, -0x4cfca0b8fac15b66, 0x529c41ba5877adf3, 0x7a9fbb1c6a0f90a7, }, }, { - { 0x9b2e678aa6a8632f, 0xa6509e6f51bc46c5, 0xceb233c9c686f5b5, 0x34b9ed338add7f59, }, - { 0xf36e217e039d8064, 0x98a081b6f520419b, 0x96cbc608e75eb044, 0x49c05a51fadc9c8f, }, - { 0x6b4e8bf9045af1b, 0xe2ff83e8a719d22f, 0xaaf6fc2993d4cf16, 0x73c172021b008b06, }, + { -0x64d1987559579cd1, -0x59af6190ae43b93b, -0x314dcc3639790a4b, 0x34b9ed338add7f59, }, + { -0x0c91de81fc627f9c, -0x675f7e490adfbe65, -0x693439f718a14fbc, 0x49c05a51fadc9c8f, }, + { 0x06b4e8bf9045af1b, -0x1d007c1758e62dd1, -0x550903d66c2b30ea, 0x73c172021b008b06, }, }, { - { 0x2fbf00848a802ade, 0xe5d9fecf02302e27, 0x113e847117703406, 0x4275aae2546d8faf, }, - { 0x315f5b0249864348, 0x3ed6b36977088381, 0xa3a075556a8deb95, 0x18ab598029d5c77f, }, - { 0xd82b2cc5fd6089e9, 0x31eb4a13282e4a4, 0x44311199b51a8622, 0x3dc65522b53df948, }, + { 0x2fbf00848a802ade, -0x1a260130fdcfd1d9, 0x113e847117703406, 0x4275aae2546d8faf, }, + { 0x315f5b0249864348, 0x3ed6b36977088381, -0x5c5f8aaa9572146b, 0x18ab598029d5c77f, }, + { -0x27d4d33a029f7617, 0x031eb4a13282e4a4, 0x44311199b51a8622, 0x3dc65522b53df948, }, }, { - { 0xbf70c222a2007f6d, 0xbf84b39ab5bcdedb, 0x537a0e12fb07ba07, 0x234fd7eec346f241, }, - { 0x506f013b327fbf93, 0xaefcebc99b776f6b, 0x9d12b232aaad5968, 0x267882d176024a7, }, - { 0x5360a119732ea378, 0x2437e6b1df8dd471, 0xa2ef37f891a7e533, 0x497ba6fdaa097863, }, + { -0x408f3ddd5dff8093, -0x407b4c654a432125, 0x537a0e12fb07ba07, 0x234fd7eec346f241, }, + { 0x506f013b327fbf93, -0x5103143664889095, -0x62ed4dcd5552a698, 0x0267882d176024a7, }, + { 0x5360a119732ea378, 0x2437e6b1df8dd471, -0x5d10c8076e581acd, 0x497ba6fdaa097863, }, }, { - { 0x24cecc0313cfeaa0, 0x8648c28d189c246d, 0x2dbdbdfac1f2d4d0, 0x61e22917f12de72b, }, - { 0x40bcd86468ccf0b, 0xd3829ba42a9910d6, 0x7508300807b25192, 0x43b5cd4218d05ebf, }, - { 0x5d9a762f9bd0b516, 0xeb38af4e373fdeee, 0x32e5a7d93d64270, 0x511d61210ae4d842, }, + { 0x24cecc0313cfeaa0, -0x79b73d72e763db93, 0x2dbdbdfac1f2d4d0, 0x61e22917f12de72b, }, + { 0x040bcd86468ccf0b, -0x2c7d645bd566ef2a, 0x7508300807b25192, 0x43b5cd4218d05ebf, }, + { 0x5d9a762f9bd0b516, -0x14c750b1c8c02112, 0x032e5a7d93d64270, 0x511d61210ae4d842, }, }, }; #elif defined(CURVED25519_128BIT) @@ -6693,7 +6693,7 @@ int ge_double_scalarmult_vartime(ge_p2 *r, const unsigned char *a, #ifdef CURVED25519_X64 static const ge d = { - 0x75eb4dca135978a3, 0x700a4d4141d8ab, 0x8cc740797779e898, 0x52036cee2b6ffe73, + 0x75eb4dca135978a3, 0x00700a4d4141d8ab, -0x7338bf8688861768, 0x52036cee2b6ffe73, }; #elif defined(CURVED25519_128BIT) static const ge d = { @@ -6710,7 +6710,7 @@ static const ge d = { #ifdef CURVED25519_X64 static const ge sqrtm1 = { - 0xc4ee1b274a0ea0b0, 0x2f431806ad2fe478, 0x2b4d00993dfbd7a7, 0x2b8324804fc1df0b, + -0x3b11e4d8b5f15f50, 0x2f431806ad2fe478, 0x2b4d00993dfbd7a7, 0x2b8324804fc1df0b, }; #elif defined(CURVED25519_128BIT) static const ge sqrtm1 = { @@ -6914,7 +6914,7 @@ r = p #ifdef CURVED25519_X64 static const ge d2 = { - 0xebd69b9426b2f159, 0xe0149a8283b156, 0x198e80f2eef3d130, 0x2406d9dc56dffce7, + -0x1429646bd94d0ea7, 0x00e0149a8283b156, 0x198e80f2eef3d130, 0x2406d9dc56dffce7, }; #elif defined(CURVED25519_128BIT) static const ge d2 = { diff --git a/wolfcrypt/src/include.am b/wolfcrypt/src/include.am index b79b0e1f5..936fb1252 100644 --- a/wolfcrypt/src/include.am +++ b/wolfcrypt/src/include.am @@ -40,8 +40,7 @@ EXTRA_DIST += \ wolfcrypt/src/fp_sqr_comba_8.i \ wolfcrypt/src/fp_sqr_comba_9.i \ wolfcrypt/src/fp_sqr_comba_small_set.i \ - wolfcrypt/src/fe_x25519_128.i \ - wolfcrypt/src/fe_x25519_x64.i + wolfcrypt/src/fe_x25519_128.i EXTRA_DIST += wolfcrypt/src/port/ti/ti-aes.c \ wolfcrypt/src/port/ti/ti-des3.c \ diff --git a/wolfssl/wolfcrypt/fe_operations.h b/wolfssl/wolfcrypt/fe_operations.h index 43ee7cbae..4c5624180 100644 --- a/wolfssl/wolfcrypt/fe_operations.h +++ b/wolfssl/wolfcrypt/fe_operations.h @@ -47,6 +47,10 @@ t[0]+2^26 t[1]+2^51 t[2]+2^77 t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on context. */ +#ifdef __cplusplus + extern "C" { +#endif + #if defined(CURVE25519_SMALL) || defined(ED25519_SMALL) #define F25519_SIZE 32 @@ -181,6 +185,11 @@ WOLFSSL_LOCAL void fprime_mul(byte *r, const byte *a, const byte *b, WOLFSSL_LOCAL void fprime_copy(byte *x, const byte *a); #endif /* CURVE25519_SMALL || ED25519_SMALL */ + +#ifdef __cplusplus + } /* extern "C" */ +#endif + #endif /* HAVE_CURVE25519 || HAVE_ED25519 */ #endif /* WOLF_CRYPT_FE_OPERATIONS_H */