From 6dcf45a5346a1cdedc394c24ee724988c88aa296 Mon Sep 17 00:00:00 2001 From: Samuel Neves Date: Fri, 17 Nov 2017 12:07:52 +0000 Subject: chacha20poly1305: import x86_64 primitives from OpenSSL x86_64 only at the moment. SSSE3, AVX, AVX2, AVX512. Signed-off-by: Samuel Neves Signed-off-by: Jason A. Donenfeld --- src/Kbuild | 12 +- src/compat/Kbuild.include | 23 + src/compat/compat.h | 11 +- src/crypto/blake2s-avx-x86_64.S | 586 ------- src/crypto/blake2s-x86_64.S | 588 +++++++ src/crypto/chacha20-avx2-x86_64.S | 446 ----- src/crypto/chacha20-ssse3-x86_64.S | 734 -------- src/crypto/chacha20-x86_64.S | 2084 +++++++++++++++++++++++ src/crypto/chacha20poly1305.c | 639 +++---- src/crypto/curve25519-avx-x86_64.S | 3257 ----------------------------------- src/crypto/curve25519-x86_64.S | 3259 ++++++++++++++++++++++++++++++++++++ src/crypto/poly1305-avx2-x86_64.S | 387 ----- src/crypto/poly1305-sse2-x86_64.S | 583 ------- src/crypto/poly1305-x86_64.S | 2814 +++++++++++++++++++++++++++++++ 14 files changed, 9110 insertions(+), 6313 deletions(-) delete mode 100644 src/crypto/blake2s-avx-x86_64.S create mode 100644 src/crypto/blake2s-x86_64.S delete mode 100644 src/crypto/chacha20-avx2-x86_64.S delete mode 100644 src/crypto/chacha20-ssse3-x86_64.S create mode 100644 src/crypto/chacha20-x86_64.S delete mode 100644 src/crypto/curve25519-avx-x86_64.S create mode 100644 src/crypto/curve25519-x86_64.S delete mode 100644 src/crypto/poly1305-avx2-x86_64.S delete mode 100644 src/crypto/poly1305-sse2-x86_64.S create mode 100644 src/crypto/poly1305-x86_64.S (limited to 'src') diff --git a/src/Kbuild b/src/Kbuild index 810debe..447c5ec 100644 --- a/src/Kbuild +++ b/src/Kbuild @@ -5,17 +5,7 @@ ccflags-y += -D'pr_fmt(fmt)=KBUILD_MODNAME ": " fmt' wireguard-y := main.o noise.o device.o peer.o timers.o queueing.o send.o receive.o socket.o hashtables.o allowedips.o ratelimiter.o cookie.o netlink.o wireguard-y += crypto/curve25519.o crypto/chacha20poly1305.o crypto/blake2s.o -ifeq ($(CONFIG_X86_64),y) - wireguard-y += crypto/chacha20-ssse3-x86_64.o crypto/poly1305-sse2-x86_64.o - avx_supported := $(call as-instr,vpxor %xmm0$(comma)%xmm0$(comma)%xmm0,yes,no) - ifeq ($(avx_supported),yes) - wireguard-y += crypto/blake2s-avx-x86_64.o crypto/curve25519-avx-x86_64.o - endif - avx2_supported := $(call as-instr,vpgatherdd %ymm0$(comma)(%eax$(comma)%ymm1$(comma)4)$(comma)%ymm2,yes,no) - ifeq ($(avx2_supported),yes) - wireguard-y += crypto/chacha20-avx2-x86_64.o crypto/poly1305-avx2-x86_64.o - endif -endif +wireguard-$(CONFIG_X86_64) += crypto/chacha20-x86_64.o crypto/poly1305-x86_64.o crypto/blake2s-x86_64.o crypto/curve25519-x86_64.o ifeq ($(CONFIG_ARM64),y) wireguard-$(CONFIG_KERNEL_MODE_NEON) += crypto/chacha20-neon-arm64.o diff --git a/src/compat/Kbuild.include b/src/compat/Kbuild.include index bee91ed..f952f6b 100644 --- a/src/compat/Kbuild.include +++ b/src/compat/Kbuild.include @@ -35,3 +35,26 @@ ifeq ($(shell grep -F "int crypto_memneq" "$(srctree)/include/crypto/algapi.h"), ccflags-y += -include $(src)/compat/memneq/include.h wireguard-y += compat/memneq/memneq.o endif + +ifeq ($(CONFIG_X86_64),y) + ifeq ($(ssse3_instr),) + ssse3_instr := $(call as-instr,pshufb %xmm0$(comma)%xmm0,-DCONFIG_AS_SSSE3=1) + ccflags-y += $(ssse3_instr) + asflags-y += $(ssse3_instr) + endif + ifeq ($(avx_instr),) + avx_instr := $(call as-instr,vxorps %ymm0$(comma)%ymm1$(comma)%ymm2,-DCONFIG_AS_AVX=1) + ccflags-y += $(avx_instr) + asflags-y += $(avx_instr) + endif + ifeq ($(avx2_instr),) + avx2_instr := $(call as-instr,vpbroadcastb %xmm0$(comma)%ymm1,-DCONFIG_AS_AVX2=1) + ccflags-y += $(avx2_instr) + asflags-y += $(avx2_instr) + endif + ifeq ($(avx512_instr),) + avx512_instr := $(call as-instr,vpmovm2b %k1$(comma)%zmm5,-DCONFIG_AS_AVX512=1) + ccflags-y += $(avx512_instr) + asflags-y += $(avx512_instr) + endif +endif diff --git a/src/compat/compat.h b/src/compat/compat.h index 5f2a867..68590fa 100644 --- a/src/compat/compat.h +++ b/src/compat/compat.h @@ -28,10 +28,6 @@ #error "WireGuard requires Linux >= 3.10" #endif -#if LINUX_VERSION_CODE < KERNEL_VERSION(4, 0, 0) && defined(CONFIG_X86_64) -#define CONFIG_AS_SSSE3 -#endif - #if LINUX_VERSION_CODE < KERNEL_VERSION(3, 18, 0) && !defined(ISRHEL7) #define headers_start data #define headers_end data @@ -507,6 +503,9 @@ static inline int cpu_has_xfeatures(u64 xfeatures_needed, const char **feature_n #ifndef XFEATURE_MASK_SSE #define XFEATURE_MASK_SSE XSTATE_SSE #endif +#ifndef XFEATURE_MASK_ZMM_Hi256 +#define XFEATURE_MASK_ZMM_Hi256 XSTATE_ZMM_Hi256 +#endif #endif #endif @@ -520,6 +519,10 @@ struct _____dummy_container { char dev; }; #define from_timer(var, callback_timer, timer_fieldname) container_of(callback_timer, typeof(*var), timer_fieldname) #endif +#if LINUX_VERSION_CODE < KERNEL_VERSION(3, 14, 3) +#define COMPAT_CANNOT_USE_AVX512 +#endif + /* https://lkml.org/lkml/2017/6/23/790 */ #if IS_ENABLED(CONFIG_NF_CONNTRACK) #include diff --git a/src/crypto/blake2s-avx-x86_64.S b/src/crypto/blake2s-avx-x86_64.S deleted file mode 100644 index 6b3f8a3..0000000 --- a/src/crypto/blake2s-avx-x86_64.S +++ /dev/null @@ -1,586 +0,0 @@ -/* - * Copyright (C) 2017 Jason A. Donenfeld . All Rights Reserved. - * Based on algorithms from Samuel Neves - */ - -#include - -.section .rodata.cst32.BLAKECONST, "aM", @progbits, 32 -.align 32 -IV: .octa 0xA54FF53A3C6EF372BB67AE856A09E667 - .octa 0x5BE0CD191F83D9AB9B05688C510E527F -.section .rodata.cst16.ROT16, "aM", @progbits, 16 -.align 16 -ROT16: .octa 0x0D0C0F0E09080B0A0504070601000302 -.section .rodata.cst16.ROR328, "aM", @progbits, 16 -.align 16 -ROR328: .octa 0x0C0F0E0D080B0A090407060500030201 - -.text -ENTRY(blake2s_compress_avx) - movl %ecx, %ecx - testq %rdx, %rdx - je .Lendofloop - .align 32 -.Lbeginofloop: - addq %rcx, 32(%rdi) - vmovdqu IV+16(%rip), %xmm1 - vmovdqu (%rsi), %xmm4 - vpxor 32(%rdi), %xmm1, %xmm1 - vmovdqu 16(%rsi), %xmm3 - vshufps $136, %xmm3, %xmm4, %xmm6 - vmovdqa ROT16(%rip), %xmm7 - vpaddd (%rdi), %xmm6, %xmm6 - vpaddd 16(%rdi), %xmm6, %xmm6 - vpxor %xmm6, %xmm1, %xmm1 - vmovdqu IV(%rip), %xmm8 - vpshufb %xmm7, %xmm1, %xmm1 - vmovdqu 48(%rsi), %xmm5 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor 16(%rdi), %xmm8, %xmm9 - vmovdqu 32(%rsi), %xmm2 - vpblendw $12, %xmm3, %xmm5, %xmm13 - vshufps $221, %xmm5, %xmm2, %xmm12 - vpunpckhqdq %xmm2, %xmm4, %xmm14 - vpslld $20, %xmm9, %xmm0 - vpsrld $12, %xmm9, %xmm9 - vpxor %xmm0, %xmm9, %xmm0 - vshufps $221, %xmm3, %xmm4, %xmm9 - vpaddd %xmm9, %xmm6, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vmovdqa ROR328(%rip), %xmm6 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor %xmm8, %xmm0, %xmm0 - vpshufd $147, %xmm1, %xmm1 - vpshufd $78, %xmm8, %xmm8 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm10, %xmm0, %xmm0 - vshufps $136, %xmm5, %xmm2, %xmm10 - vpshufd $57, %xmm0, %xmm0 - vpaddd %xmm10, %xmm9, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpaddd %xmm12, %xmm9, %xmm9 - vpblendw $12, %xmm2, %xmm3, %xmm12 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor %xmm8, %xmm0, %xmm10 - vpslld $20, %xmm10, %xmm0 - vpsrld $12, %xmm10, %xmm10 - vpxor %xmm0, %xmm10, %xmm0 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor %xmm8, %xmm0, %xmm0 - vpshufd $57, %xmm1, %xmm1 - vpshufd $78, %xmm8, %xmm8 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm10, %xmm0, %xmm0 - vpslldq $4, %xmm5, %xmm10 - vpblendw $240, %xmm10, %xmm12, %xmm12 - vpshufd $147, %xmm0, %xmm0 - vpshufd $147, %xmm12, %xmm12 - vpaddd %xmm9, %xmm12, %xmm12 - vpaddd %xmm0, %xmm12, %xmm12 - vpxor %xmm12, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor %xmm8, %xmm0, %xmm11 - vpslld $20, %xmm11, %xmm9 - vpsrld $12, %xmm11, %xmm11 - vpxor %xmm9, %xmm11, %xmm0 - vpshufd $8, %xmm2, %xmm9 - vpblendw $192, %xmm5, %xmm3, %xmm11 - vpblendw $240, %xmm11, %xmm9, %xmm9 - vpshufd $177, %xmm9, %xmm9 - vpaddd %xmm12, %xmm9, %xmm9 - vpaddd %xmm0, %xmm9, %xmm11 - vpxor %xmm11, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm8 - vpxor %xmm8, %xmm0, %xmm9 - vpshufd $147, %xmm1, %xmm1 - vpshufd $78, %xmm8, %xmm8 - vpslld $25, %xmm9, %xmm0 - vpsrld $7, %xmm9, %xmm9 - vpxor %xmm0, %xmm9, %xmm0 - vpslldq $4, %xmm3, %xmm9 - vpblendw $48, %xmm9, %xmm2, %xmm9 - vpblendw $240, %xmm9, %xmm4, %xmm9 - vpshufd $57, %xmm0, %xmm0 - vpshufd $177, %xmm9, %xmm9 - vpaddd %xmm11, %xmm9, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm8, %xmm11 - vpxor %xmm11, %xmm0, %xmm0 - vpslld $20, %xmm0, %xmm8 - vpsrld $12, %xmm0, %xmm0 - vpxor %xmm8, %xmm0, %xmm0 - vpunpckhdq %xmm3, %xmm4, %xmm8 - vpblendw $12, %xmm10, %xmm8, %xmm12 - vpshufd $177, %xmm12, %xmm12 - vpaddd %xmm9, %xmm12, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm0 - vpshufd $57, %xmm1, %xmm1 - vpshufd $78, %xmm11, %xmm11 - vpslld $25, %xmm0, %xmm12 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm12, %xmm0, %xmm0 - vpunpckhdq %xmm5, %xmm2, %xmm12 - vpshufd $147, %xmm0, %xmm0 - vpblendw $15, %xmm13, %xmm12, %xmm12 - vpslldq $8, %xmm5, %xmm13 - vpshufd $210, %xmm12, %xmm12 - vpaddd %xmm9, %xmm12, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm0 - vpslld $20, %xmm0, %xmm12 - vpsrld $12, %xmm0, %xmm0 - vpxor %xmm12, %xmm0, %xmm0 - vpunpckldq %xmm4, %xmm2, %xmm12 - vpblendw $240, %xmm4, %xmm12, %xmm12 - vpblendw $192, %xmm13, %xmm12, %xmm12 - vpsrldq $12, %xmm3, %xmm13 - vpaddd %xmm12, %xmm9, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm0 - vpshufd $147, %xmm1, %xmm1 - vpshufd $78, %xmm11, %xmm11 - vpslld $25, %xmm0, %xmm12 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm12, %xmm0, %xmm0 - vpblendw $60, %xmm2, %xmm4, %xmm12 - vpblendw $3, %xmm13, %xmm12, %xmm12 - vpshufd $57, %xmm0, %xmm0 - vpshufd $78, %xmm12, %xmm12 - vpaddd %xmm9, %xmm12, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm12 - vpslld $20, %xmm12, %xmm13 - vpsrld $12, %xmm12, %xmm0 - vpblendw $51, %xmm3, %xmm4, %xmm12 - vpxor %xmm13, %xmm0, %xmm0 - vpblendw $192, %xmm10, %xmm12, %xmm10 - vpslldq $8, %xmm2, %xmm12 - vpshufd $27, %xmm10, %xmm10 - vpaddd %xmm9, %xmm10, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm0 - vpshufd $57, %xmm1, %xmm1 - vpshufd $78, %xmm11, %xmm11 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm10, %xmm0, %xmm0 - vpunpckhdq %xmm2, %xmm8, %xmm10 - vpshufd $147, %xmm0, %xmm0 - vpblendw $12, %xmm5, %xmm10, %xmm10 - vpshufd $210, %xmm10, %xmm10 - vpaddd %xmm9, %xmm10, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm11 - vpxor %xmm11, %xmm0, %xmm10 - vpslld $20, %xmm10, %xmm0 - vpsrld $12, %xmm10, %xmm10 - vpxor %xmm0, %xmm10, %xmm0 - vpblendw $12, %xmm4, %xmm5, %xmm10 - vpblendw $192, %xmm12, %xmm10, %xmm10 - vpunpckldq %xmm2, %xmm4, %xmm12 - vpshufd $135, %xmm10, %xmm10 - vpaddd %xmm9, %xmm10, %xmm9 - vpaddd %xmm0, %xmm9, %xmm9 - vpxor %xmm9, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm11, %xmm13 - vpxor %xmm13, %xmm0, %xmm0 - vpshufd $147, %xmm1, %xmm1 - vpshufd $78, %xmm13, %xmm13 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm10, %xmm0, %xmm0 - vpblendw $15, %xmm3, %xmm4, %xmm10 - vpblendw $192, %xmm5, %xmm10, %xmm10 - vpshufd $57, %xmm0, %xmm0 - vpshufd $198, %xmm10, %xmm10 - vpaddd %xmm9, %xmm10, %xmm10 - vpaddd %xmm0, %xmm10, %xmm10 - vpxor %xmm10, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm13, %xmm13 - vpxor %xmm13, %xmm0, %xmm9 - vpslld $20, %xmm9, %xmm0 - vpsrld $12, %xmm9, %xmm9 - vpxor %xmm0, %xmm9, %xmm0 - vpunpckhdq %xmm2, %xmm3, %xmm9 - vpunpcklqdq %xmm12, %xmm9, %xmm15 - vpunpcklqdq %xmm12, %xmm8, %xmm12 - vpblendw $15, %xmm5, %xmm8, %xmm8 - vpaddd %xmm15, %xmm10, %xmm15 - vpaddd %xmm0, %xmm15, %xmm15 - vpxor %xmm15, %xmm1, %xmm1 - vpshufd $141, %xmm8, %xmm8 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm13, %xmm13 - vpxor %xmm13, %xmm0, %xmm0 - vpshufd $57, %xmm1, %xmm1 - vpshufd $78, %xmm13, %xmm13 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm10, %xmm0, %xmm0 - vpunpcklqdq %xmm2, %xmm3, %xmm10 - vpshufd $147, %xmm0, %xmm0 - vpblendw $51, %xmm14, %xmm10, %xmm14 - vpshufd $135, %xmm14, %xmm14 - vpaddd %xmm15, %xmm14, %xmm14 - vpaddd %xmm0, %xmm14, %xmm14 - vpxor %xmm14, %xmm1, %xmm1 - vpunpcklqdq %xmm3, %xmm4, %xmm15 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm13, %xmm13 - vpxor %xmm13, %xmm0, %xmm0 - vpslld $20, %xmm0, %xmm11 - vpsrld $12, %xmm0, %xmm0 - vpxor %xmm11, %xmm0, %xmm0 - vpunpckhqdq %xmm5, %xmm3, %xmm11 - vpblendw $51, %xmm15, %xmm11, %xmm11 - vpunpckhqdq %xmm3, %xmm5, %xmm15 - vpaddd %xmm11, %xmm14, %xmm11 - vpaddd %xmm0, %xmm11, %xmm11 - vpxor %xmm11, %xmm1, %xmm1 - vpshufb %xmm6, %xmm1, %xmm1 - vpaddd %xmm1, %xmm13, %xmm13 - vpxor %xmm13, %xmm0, %xmm0 - vpshufd $147, %xmm1, %xmm1 - vpshufd $78, %xmm13, %xmm13 - vpslld $25, %xmm0, %xmm14 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm14, %xmm0, %xmm14 - vpunpckhqdq %xmm4, %xmm2, %xmm0 - vpshufd $57, %xmm14, %xmm14 - vpblendw $51, %xmm15, %xmm0, %xmm15 - vpaddd %xmm15, %xmm11, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm1, %xmm1 - vpshufb %xmm7, %xmm1, %xmm1 - vpaddd %xmm1, %xmm13, %xmm13 - vpxor %xmm13, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm11 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm11, %xmm14, %xmm14 - vpblendw $3, %xmm2, %xmm4, %xmm11 - vpslldq $8, %xmm11, %xmm0 - vpblendw $15, %xmm5, %xmm0, %xmm0 - vpshufd $99, %xmm0, %xmm0 - vpaddd %xmm15, %xmm0, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm1, %xmm0 - vpaddd %xmm12, %xmm15, %xmm15 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm0, %xmm13, %xmm13 - vpxor %xmm13, %xmm14, %xmm14 - vpshufd $57, %xmm0, %xmm0 - vpshufd $78, %xmm13, %xmm13 - vpslld $25, %xmm14, %xmm1 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm1, %xmm14, %xmm14 - vpblendw $3, %xmm5, %xmm4, %xmm1 - vpshufd $147, %xmm14, %xmm14 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm13, %xmm13 - vpxor %xmm13, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm12 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm12, %xmm14, %xmm14 - vpsrldq $4, %xmm2, %xmm12 - vpblendw $60, %xmm12, %xmm1, %xmm1 - vpaddd %xmm1, %xmm15, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpblendw $12, %xmm4, %xmm3, %xmm1 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm0, %xmm13, %xmm13 - vpxor %xmm13, %xmm14, %xmm14 - vpshufd $147, %xmm0, %xmm0 - vpshufd $78, %xmm13, %xmm13 - vpslld $25, %xmm14, %xmm12 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm12, %xmm14, %xmm14 - vpsrldq $4, %xmm5, %xmm12 - vpblendw $48, %xmm12, %xmm1, %xmm1 - vpshufd $33, %xmm5, %xmm12 - vpshufd $57, %xmm14, %xmm14 - vpshufd $108, %xmm1, %xmm1 - vpblendw $51, %xmm12, %xmm10, %xmm12 - vpaddd %xmm15, %xmm1, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpaddd %xmm12, %xmm15, %xmm15 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm13, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm13 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm13, %xmm14, %xmm14 - vpslldq $12, %xmm3, %xmm13 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpshufd $57, %xmm0, %xmm0 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm14, %xmm12 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm12, %xmm14, %xmm14 - vpblendw $51, %xmm5, %xmm4, %xmm12 - vpshufd $147, %xmm14, %xmm14 - vpblendw $192, %xmm13, %xmm12, %xmm12 - vpaddd %xmm12, %xmm15, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpsrldq $4, %xmm3, %xmm12 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm13 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm13, %xmm14, %xmm14 - vpblendw $48, %xmm2, %xmm5, %xmm13 - vpblendw $3, %xmm12, %xmm13, %xmm13 - vpshufd $156, %xmm13, %xmm13 - vpaddd %xmm15, %xmm13, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpshufd $147, %xmm0, %xmm0 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm14, %xmm13 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm13, %xmm14, %xmm14 - vpunpcklqdq %xmm2, %xmm4, %xmm13 - vpshufd $57, %xmm14, %xmm14 - vpblendw $12, %xmm12, %xmm13, %xmm12 - vpshufd $180, %xmm12, %xmm12 - vpaddd %xmm15, %xmm12, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm12 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm12, %xmm14, %xmm14 - vpunpckhqdq %xmm9, %xmm4, %xmm12 - vpshufd $198, %xmm12, %xmm12 - vpaddd %xmm15, %xmm12, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpaddd %xmm15, %xmm8, %xmm15 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpshufd $57, %xmm0, %xmm0 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm14, %xmm12 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm12, %xmm14, %xmm14 - vpsrldq $4, %xmm4, %xmm12 - vpshufd $147, %xmm14, %xmm14 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm15, %xmm0, %xmm0 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpslld $20, %xmm14, %xmm8 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm14, %xmm8, %xmm14 - vpblendw $48, %xmm5, %xmm2, %xmm8 - vpblendw $3, %xmm12, %xmm8, %xmm8 - vpunpckhqdq %xmm5, %xmm4, %xmm12 - vpshufd $75, %xmm8, %xmm8 - vpblendw $60, %xmm10, %xmm12, %xmm10 - vpaddd %xmm15, %xmm8, %xmm15 - vpaddd %xmm14, %xmm15, %xmm15 - vpxor %xmm0, %xmm15, %xmm0 - vpshufd $45, %xmm10, %xmm10 - vpshufb %xmm6, %xmm0, %xmm0 - vpaddd %xmm15, %xmm10, %xmm15 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm1, %xmm14, %xmm14 - vpshufd $147, %xmm0, %xmm0 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm14, %xmm8 - vpsrld $7, %xmm14, %xmm14 - vpxor %xmm14, %xmm8, %xmm8 - vpshufd $57, %xmm8, %xmm8 - vpaddd %xmm8, %xmm15, %xmm15 - vpxor %xmm0, %xmm15, %xmm0 - vpshufb %xmm7, %xmm0, %xmm0 - vpaddd %xmm0, %xmm1, %xmm1 - vpxor %xmm8, %xmm1, %xmm8 - vpslld $20, %xmm8, %xmm10 - vpsrld $12, %xmm8, %xmm8 - vpxor %xmm8, %xmm10, %xmm10 - vpunpckldq %xmm3, %xmm4, %xmm8 - vpunpcklqdq %xmm9, %xmm8, %xmm9 - vpaddd %xmm9, %xmm15, %xmm9 - vpaddd %xmm10, %xmm9, %xmm9 - vpxor %xmm0, %xmm9, %xmm8 - vpshufb %xmm6, %xmm8, %xmm8 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm1, %xmm10, %xmm10 - vpshufd $57, %xmm8, %xmm8 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm10, %xmm12 - vpsrld $7, %xmm10, %xmm10 - vpxor %xmm10, %xmm12, %xmm10 - vpblendw $48, %xmm4, %xmm3, %xmm12 - vpshufd $147, %xmm10, %xmm0 - vpunpckhdq %xmm5, %xmm3, %xmm10 - vpshufd $78, %xmm12, %xmm12 - vpunpcklqdq %xmm4, %xmm10, %xmm10 - vpblendw $192, %xmm2, %xmm10, %xmm10 - vpshufhw $78, %xmm10, %xmm10 - vpaddd %xmm10, %xmm9, %xmm10 - vpaddd %xmm0, %xmm10, %xmm10 - vpxor %xmm8, %xmm10, %xmm8 - vpshufb %xmm7, %xmm8, %xmm8 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm0, %xmm1, %xmm9 - vpslld $20, %xmm9, %xmm0 - vpsrld $12, %xmm9, %xmm9 - vpxor %xmm9, %xmm0, %xmm0 - vpunpckhdq %xmm5, %xmm4, %xmm9 - vpblendw $240, %xmm9, %xmm2, %xmm13 - vpshufd $39, %xmm13, %xmm13 - vpaddd %xmm10, %xmm13, %xmm10 - vpaddd %xmm0, %xmm10, %xmm10 - vpxor %xmm8, %xmm10, %xmm8 - vpblendw $12, %xmm4, %xmm2, %xmm13 - vpshufb %xmm6, %xmm8, %xmm8 - vpslldq $4, %xmm13, %xmm13 - vpblendw $15, %xmm5, %xmm13, %xmm13 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm1, %xmm0, %xmm0 - vpaddd %xmm13, %xmm10, %xmm13 - vpshufd $147, %xmm8, %xmm8 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm0, %xmm14 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm0, %xmm14, %xmm14 - vpshufd $57, %xmm14, %xmm14 - vpaddd %xmm14, %xmm13, %xmm13 - vpxor %xmm8, %xmm13, %xmm8 - vpaddd %xmm13, %xmm12, %xmm12 - vpshufb %xmm7, %xmm8, %xmm8 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm14, %xmm1, %xmm14 - vpslld $20, %xmm14, %xmm10 - vpsrld $12, %xmm14, %xmm14 - vpxor %xmm14, %xmm10, %xmm10 - vpaddd %xmm10, %xmm12, %xmm12 - vpxor %xmm8, %xmm12, %xmm8 - vpshufb %xmm6, %xmm8, %xmm8 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm1, %xmm10, %xmm0 - vpshufd $57, %xmm8, %xmm8 - vpshufd $78, %xmm1, %xmm1 - vpslld $25, %xmm0, %xmm10 - vpsrld $7, %xmm0, %xmm0 - vpxor %xmm0, %xmm10, %xmm10 - vpblendw $48, %xmm2, %xmm3, %xmm0 - vpblendw $15, %xmm11, %xmm0, %xmm0 - vpshufd $147, %xmm10, %xmm10 - vpshufd $114, %xmm0, %xmm0 - vpaddd %xmm12, %xmm0, %xmm0 - vpaddd %xmm10, %xmm0, %xmm0 - vpxor %xmm8, %xmm0, %xmm8 - vpshufb %xmm7, %xmm8, %xmm8 - vpaddd %xmm8, %xmm1, %xmm1 - vpxor %xmm10, %xmm1, %xmm10 - vpslld $20, %xmm10, %xmm11 - vpsrld $12, %xmm10, %xmm10 - vpxor %xmm10, %xmm11, %xmm10 - vpslldq $4, %xmm4, %xmm11 - vpblendw $192, %xmm11, %xmm3, %xmm3 - vpunpckldq %xmm5, %xmm4, %xmm4 - vpshufd $99, %xmm3, %xmm3 - vpaddd %xmm0, %xmm3, %xmm3 - vpaddd %xmm10, %xmm3, %xmm3 - vpxor %xmm8, %xmm3, %xmm11 - vpunpckldq %xmm5, %xmm2, %xmm0 - vpblendw $192, %xmm2, %xmm5, %xmm2 - vpshufb %xmm6, %xmm11, %xmm11 - vpunpckhqdq %xmm0, %xmm9, %xmm0 - vpblendw $15, %xmm4, %xmm2, %xmm4 - vpaddd %xmm11, %xmm1, %xmm1 - vpxor %xmm1, %xmm10, %xmm10 - vpshufd $147, %xmm11, %xmm11 - vpshufd $201, %xmm0, %xmm0 - vpslld $25, %xmm10, %xmm8 - vpsrld $7, %xmm10, %xmm10 - vpxor %xmm10, %xmm8, %xmm10 - vpshufd $78, %xmm1, %xmm1 - vpaddd %xmm3, %xmm0, %xmm0 - vpshufd $27, %xmm4, %xmm4 - vpshufd $57, %xmm10, %xmm10 - vpaddd %xmm10, %xmm0, %xmm0 - vpxor %xmm11, %xmm0, %xmm11 - vpaddd %xmm0, %xmm4, %xmm0 - vpshufb %xmm7, %xmm11, %xmm7 - vpaddd %xmm7, %xmm1, %xmm1 - vpxor %xmm10, %xmm1, %xmm10 - vpslld $20, %xmm10, %xmm8 - vpsrld $12, %xmm10, %xmm10 - vpxor %xmm10, %xmm8, %xmm8 - vpaddd %xmm8, %xmm0, %xmm0 - vpxor %xmm7, %xmm0, %xmm7 - vpshufb %xmm6, %xmm7, %xmm6 - vpaddd %xmm6, %xmm1, %xmm1 - vpxor %xmm1, %xmm8, %xmm8 - vpshufd $78, %xmm1, %xmm1 - vpshufd $57, %xmm6, %xmm6 - vpslld $25, %xmm8, %xmm2 - vpsrld $7, %xmm8, %xmm8 - vpxor %xmm8, %xmm2, %xmm8 - vpxor (%rdi), %xmm1, %xmm1 - vpshufd $147, %xmm8, %xmm8 - vpxor %xmm0, %xmm1, %xmm0 - vmovups %xmm0, (%rdi) - vpxor 16(%rdi), %xmm8, %xmm0 - vpxor %xmm6, %xmm0, %xmm6 - vmovups %xmm6, 16(%rdi) - addq $64, %rsi - decq %rdx - jnz .Lbeginofloop -.Lendofloop: - ret -ENDPROC(blake2s_compress_avx) diff --git a/src/crypto/blake2s-x86_64.S b/src/crypto/blake2s-x86_64.S new file mode 100644 index 0000000..e86afd3 --- /dev/null +++ b/src/crypto/blake2s-x86_64.S @@ -0,0 +1,588 @@ +/* + * Copyright (C) 2017 Jason A. Donenfeld . All Rights Reserved. + * Based on algorithms from Samuel Neves + */ + +#include + +.section .rodata.cst32.BLAKECONST, "aM", @progbits, 32 +.align 32 +IV: .octa 0xA54FF53A3C6EF372BB67AE856A09E667 + .octa 0x5BE0CD191F83D9AB9B05688C510E527F +.section .rodata.cst16.ROT16, "aM", @progbits, 16 +.align 16 +ROT16: .octa 0x0D0C0F0E09080B0A0504070601000302 +.section .rodata.cst16.ROR328, "aM", @progbits, 16 +.align 16 +ROR328: .octa 0x0C0F0E0D080B0A090407060500030201 + +.text +#ifdef CONFIG_AS_AVX +ENTRY(blake2s_compress_avx) + movl %ecx, %ecx + testq %rdx, %rdx + je .Lendofloop + .align 32 +.Lbeginofloop: + addq %rcx, 32(%rdi) + vmovdqu IV+16(%rip), %xmm1 + vmovdqu (%rsi), %xmm4 + vpxor 32(%rdi), %xmm1, %xmm1 + vmovdqu 16(%rsi), %xmm3 + vshufps $136, %xmm3, %xmm4, %xmm6 + vmovdqa ROT16(%rip), %xmm7 + vpaddd (%rdi), %xmm6, %xmm6 + vpaddd 16(%rdi), %xmm6, %xmm6 + vpxor %xmm6, %xmm1, %xmm1 + vmovdqu IV(%rip), %xmm8 + vpshufb %xmm7, %xmm1, %xmm1 + vmovdqu 48(%rsi), %xmm5 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor 16(%rdi), %xmm8, %xmm9 + vmovdqu 32(%rsi), %xmm2 + vpblendw $12, %xmm3, %xmm5, %xmm13 + vshufps $221, %xmm5, %xmm2, %xmm12 + vpunpckhqdq %xmm2, %xmm4, %xmm14 + vpslld $20, %xmm9, %xmm0 + vpsrld $12, %xmm9, %xmm9 + vpxor %xmm0, %xmm9, %xmm0 + vshufps $221, %xmm3, %xmm4, %xmm9 + vpaddd %xmm9, %xmm6, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vmovdqa ROR328(%rip), %xmm6 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor %xmm8, %xmm0, %xmm0 + vpshufd $147, %xmm1, %xmm1 + vpshufd $78, %xmm8, %xmm8 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm10, %xmm0, %xmm0 + vshufps $136, %xmm5, %xmm2, %xmm10 + vpshufd $57, %xmm0, %xmm0 + vpaddd %xmm10, %xmm9, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpaddd %xmm12, %xmm9, %xmm9 + vpblendw $12, %xmm2, %xmm3, %xmm12 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor %xmm8, %xmm0, %xmm10 + vpslld $20, %xmm10, %xmm0 + vpsrld $12, %xmm10, %xmm10 + vpxor %xmm0, %xmm10, %xmm0 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor %xmm8, %xmm0, %xmm0 + vpshufd $57, %xmm1, %xmm1 + vpshufd $78, %xmm8, %xmm8 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm10, %xmm0, %xmm0 + vpslldq $4, %xmm5, %xmm10 + vpblendw $240, %xmm10, %xmm12, %xmm12 + vpshufd $147, %xmm0, %xmm0 + vpshufd $147, %xmm12, %xmm12 + vpaddd %xmm9, %xmm12, %xmm12 + vpaddd %xmm0, %xmm12, %xmm12 + vpxor %xmm12, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor %xmm8, %xmm0, %xmm11 + vpslld $20, %xmm11, %xmm9 + vpsrld $12, %xmm11, %xmm11 + vpxor %xmm9, %xmm11, %xmm0 + vpshufd $8, %xmm2, %xmm9 + vpblendw $192, %xmm5, %xmm3, %xmm11 + vpblendw $240, %xmm11, %xmm9, %xmm9 + vpshufd $177, %xmm9, %xmm9 + vpaddd %xmm12, %xmm9, %xmm9 + vpaddd %xmm0, %xmm9, %xmm11 + vpxor %xmm11, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm8 + vpxor %xmm8, %xmm0, %xmm9 + vpshufd $147, %xmm1, %xmm1 + vpshufd $78, %xmm8, %xmm8 + vpslld $25, %xmm9, %xmm0 + vpsrld $7, %xmm9, %xmm9 + vpxor %xmm0, %xmm9, %xmm0 + vpslldq $4, %xmm3, %xmm9 + vpblendw $48, %xmm9, %xmm2, %xmm9 + vpblendw $240, %xmm9, %xmm4, %xmm9 + vpshufd $57, %xmm0, %xmm0 + vpshufd $177, %xmm9, %xmm9 + vpaddd %xmm11, %xmm9, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm8, %xmm11 + vpxor %xmm11, %xmm0, %xmm0 + vpslld $20, %xmm0, %xmm8 + vpsrld $12, %xmm0, %xmm0 + vpxor %xmm8, %xmm0, %xmm0 + vpunpckhdq %xmm3, %xmm4, %xmm8 + vpblendw $12, %xmm10, %xmm8, %xmm12 + vpshufd $177, %xmm12, %xmm12 + vpaddd %xmm9, %xmm12, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm0 + vpshufd $57, %xmm1, %xmm1 + vpshufd $78, %xmm11, %xmm11 + vpslld $25, %xmm0, %xmm12 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm12, %xmm0, %xmm0 + vpunpckhdq %xmm5, %xmm2, %xmm12 + vpshufd $147, %xmm0, %xmm0 + vpblendw $15, %xmm13, %xmm12, %xmm12 + vpslldq $8, %xmm5, %xmm13 + vpshufd $210, %xmm12, %xmm12 + vpaddd %xmm9, %xmm12, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm0 + vpslld $20, %xmm0, %xmm12 + vpsrld $12, %xmm0, %xmm0 + vpxor %xmm12, %xmm0, %xmm0 + vpunpckldq %xmm4, %xmm2, %xmm12 + vpblendw $240, %xmm4, %xmm12, %xmm12 + vpblendw $192, %xmm13, %xmm12, %xmm12 + vpsrldq $12, %xmm3, %xmm13 + vpaddd %xmm12, %xmm9, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm0 + vpshufd $147, %xmm1, %xmm1 + vpshufd $78, %xmm11, %xmm11 + vpslld $25, %xmm0, %xmm12 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm12, %xmm0, %xmm0 + vpblendw $60, %xmm2, %xmm4, %xmm12 + vpblendw $3, %xmm13, %xmm12, %xmm12 + vpshufd $57, %xmm0, %xmm0 + vpshufd $78, %xmm12, %xmm12 + vpaddd %xmm9, %xmm12, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm12 + vpslld $20, %xmm12, %xmm13 + vpsrld $12, %xmm12, %xmm0 + vpblendw $51, %xmm3, %xmm4, %xmm12 + vpxor %xmm13, %xmm0, %xmm0 + vpblendw $192, %xmm10, %xmm12, %xmm10 + vpslldq $8, %xmm2, %xmm12 + vpshufd $27, %xmm10, %xmm10 + vpaddd %xmm9, %xmm10, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm0 + vpshufd $57, %xmm1, %xmm1 + vpshufd $78, %xmm11, %xmm11 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm10, %xmm0, %xmm0 + vpunpckhdq %xmm2, %xmm8, %xmm10 + vpshufd $147, %xmm0, %xmm0 + vpblendw $12, %xmm5, %xmm10, %xmm10 + vpshufd $210, %xmm10, %xmm10 + vpaddd %xmm9, %xmm10, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm11 + vpxor %xmm11, %xmm0, %xmm10 + vpslld $20, %xmm10, %xmm0 + vpsrld $12, %xmm10, %xmm10 + vpxor %xmm0, %xmm10, %xmm0 + vpblendw $12, %xmm4, %xmm5, %xmm10 + vpblendw $192, %xmm12, %xmm10, %xmm10 + vpunpckldq %xmm2, %xmm4, %xmm12 + vpshufd $135, %xmm10, %xmm10 + vpaddd %xmm9, %xmm10, %xmm9 + vpaddd %xmm0, %xmm9, %xmm9 + vpxor %xmm9, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm11, %xmm13 + vpxor %xmm13, %xmm0, %xmm0 + vpshufd $147, %xmm1, %xmm1 + vpshufd $78, %xmm13, %xmm13 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm10, %xmm0, %xmm0 + vpblendw $15, %xmm3, %xmm4, %xmm10 + vpblendw $192, %xmm5, %xmm10, %xmm10 + vpshufd $57, %xmm0, %xmm0 + vpshufd $198, %xmm10, %xmm10 + vpaddd %xmm9, %xmm10, %xmm10 + vpaddd %xmm0, %xmm10, %xmm10 + vpxor %xmm10, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm13, %xmm13 + vpxor %xmm13, %xmm0, %xmm9 + vpslld $20, %xmm9, %xmm0 + vpsrld $12, %xmm9, %xmm9 + vpxor %xmm0, %xmm9, %xmm0 + vpunpckhdq %xmm2, %xmm3, %xmm9 + vpunpcklqdq %xmm12, %xmm9, %xmm15 + vpunpcklqdq %xmm12, %xmm8, %xmm12 + vpblendw $15, %xmm5, %xmm8, %xmm8 + vpaddd %xmm15, %xmm10, %xmm15 + vpaddd %xmm0, %xmm15, %xmm15 + vpxor %xmm15, %xmm1, %xmm1 + vpshufd $141, %xmm8, %xmm8 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm13, %xmm13 + vpxor %xmm13, %xmm0, %xmm0 + vpshufd $57, %xmm1, %xmm1 + vpshufd $78, %xmm13, %xmm13 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm10, %xmm0, %xmm0 + vpunpcklqdq %xmm2, %xmm3, %xmm10 + vpshufd $147, %xmm0, %xmm0 + vpblendw $51, %xmm14, %xmm10, %xmm14 + vpshufd $135, %xmm14, %xmm14 + vpaddd %xmm15, %xmm14, %xmm14 + vpaddd %xmm0, %xmm14, %xmm14 + vpxor %xmm14, %xmm1, %xmm1 + vpunpcklqdq %xmm3, %xmm4, %xmm15 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm13, %xmm13 + vpxor %xmm13, %xmm0, %xmm0 + vpslld $20, %xmm0, %xmm11 + vpsrld $12, %xmm0, %xmm0 + vpxor %xmm11, %xmm0, %xmm0 + vpunpckhqdq %xmm5, %xmm3, %xmm11 + vpblendw $51, %xmm15, %xmm11, %xmm11 + vpunpckhqdq %xmm3, %xmm5, %xmm15 + vpaddd %xmm11, %xmm14, %xmm11 + vpaddd %xmm0, %xmm11, %xmm11 + vpxor %xmm11, %xmm1, %xmm1 + vpshufb %xmm6, %xmm1, %xmm1 + vpaddd %xmm1, %xmm13, %xmm13 + vpxor %xmm13, %xmm0, %xmm0 + vpshufd $147, %xmm1, %xmm1 + vpshufd $78, %xmm13, %xmm13 + vpslld $25, %xmm0, %xmm14 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm14, %xmm0, %xmm14 + vpunpckhqdq %xmm4, %xmm2, %xmm0 + vpshufd $57, %xmm14, %xmm14 + vpblendw $51, %xmm15, %xmm0, %xmm15 + vpaddd %xmm15, %xmm11, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm1, %xmm1 + vpshufb %xmm7, %xmm1, %xmm1 + vpaddd %xmm1, %xmm13, %xmm13 + vpxor %xmm13, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm11 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm11, %xmm14, %xmm14 + vpblendw $3, %xmm2, %xmm4, %xmm11 + vpslldq $8, %xmm11, %xmm0 + vpblendw $15, %xmm5, %xmm0, %xmm0 + vpshufd $99, %xmm0, %xmm0 + vpaddd %xmm15, %xmm0, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm1, %xmm0 + vpaddd %xmm12, %xmm15, %xmm15 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm0, %xmm13, %xmm13 + vpxor %xmm13, %xmm14, %xmm14 + vpshufd $57, %xmm0, %xmm0 + vpshufd $78, %xmm13, %xmm13 + vpslld $25, %xmm14, %xmm1 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm1, %xmm14, %xmm14 + vpblendw $3, %xmm5, %xmm4, %xmm1 + vpshufd $147, %xmm14, %xmm14 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm13, %xmm13 + vpxor %xmm13, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm12 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm12, %xmm14, %xmm14 + vpsrldq $4, %xmm2, %xmm12 + vpblendw $60, %xmm12, %xmm1, %xmm1 + vpaddd %xmm1, %xmm15, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpblendw $12, %xmm4, %xmm3, %xmm1 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm0, %xmm13, %xmm13 + vpxor %xmm13, %xmm14, %xmm14 + vpshufd $147, %xmm0, %xmm0 + vpshufd $78, %xmm13, %xmm13 + vpslld $25, %xmm14, %xmm12 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm12, %xmm14, %xmm14 + vpsrldq $4, %xmm5, %xmm12 + vpblendw $48, %xmm12, %xmm1, %xmm1 + vpshufd $33, %xmm5, %xmm12 + vpshufd $57, %xmm14, %xmm14 + vpshufd $108, %xmm1, %xmm1 + vpblendw $51, %xmm12, %xmm10, %xmm12 + vpaddd %xmm15, %xmm1, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpaddd %xmm12, %xmm15, %xmm15 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm13, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm13 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm13, %xmm14, %xmm14 + vpslldq $12, %xmm3, %xmm13 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpshufd $57, %xmm0, %xmm0 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm14, %xmm12 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm12, %xmm14, %xmm14 + vpblendw $51, %xmm5, %xmm4, %xmm12 + vpshufd $147, %xmm14, %xmm14 + vpblendw $192, %xmm13, %xmm12, %xmm12 + vpaddd %xmm12, %xmm15, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpsrldq $4, %xmm3, %xmm12 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm13 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm13, %xmm14, %xmm14 + vpblendw $48, %xmm2, %xmm5, %xmm13 + vpblendw $3, %xmm12, %xmm13, %xmm13 + vpshufd $156, %xmm13, %xmm13 + vpaddd %xmm15, %xmm13, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpshufd $147, %xmm0, %xmm0 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm14, %xmm13 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm13, %xmm14, %xmm14 + vpunpcklqdq %xmm2, %xmm4, %xmm13 + vpshufd $57, %xmm14, %xmm14 + vpblendw $12, %xmm12, %xmm13, %xmm12 + vpshufd $180, %xmm12, %xmm12 + vpaddd %xmm15, %xmm12, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm12 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm12, %xmm14, %xmm14 + vpunpckhqdq %xmm9, %xmm4, %xmm12 + vpshufd $198, %xmm12, %xmm12 + vpaddd %xmm15, %xmm12, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpaddd %xmm15, %xmm8, %xmm15 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpshufd $57, %xmm0, %xmm0 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm14, %xmm12 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm12, %xmm14, %xmm14 + vpsrldq $4, %xmm4, %xmm12 + vpshufd $147, %xmm14, %xmm14 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm15, %xmm0, %xmm0 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpslld $20, %xmm14, %xmm8 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm14, %xmm8, %xmm14 + vpblendw $48, %xmm5, %xmm2, %xmm8 + vpblendw $3, %xmm12, %xmm8, %xmm8 + vpunpckhqdq %xmm5, %xmm4, %xmm12 + vpshufd $75, %xmm8, %xmm8 + vpblendw $60, %xmm10, %xmm12, %xmm10 + vpaddd %xmm15, %xmm8, %xmm15 + vpaddd %xmm14, %xmm15, %xmm15 + vpxor %xmm0, %xmm15, %xmm0 + vpshufd $45, %xmm10, %xmm10 + vpshufb %xmm6, %xmm0, %xmm0 + vpaddd %xmm15, %xmm10, %xmm15 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm1, %xmm14, %xmm14 + vpshufd $147, %xmm0, %xmm0 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm14, %xmm8 + vpsrld $7, %xmm14, %xmm14 + vpxor %xmm14, %xmm8, %xmm8 + vpshufd $57, %xmm8, %xmm8 + vpaddd %xmm8, %xmm15, %xmm15 + vpxor %xmm0, %xmm15, %xmm0 + vpshufb %xmm7, %xmm0, %xmm0 + vpaddd %xmm0, %xmm1, %xmm1 + vpxor %xmm8, %xmm1, %xmm8 + vpslld $20, %xmm8, %xmm10 + vpsrld $12, %xmm8, %xmm8 + vpxor %xmm8, %xmm10, %xmm10 + vpunpckldq %xmm3, %xmm4, %xmm8 + vpunpcklqdq %xmm9, %xmm8, %xmm9 + vpaddd %xmm9, %xmm15, %xmm9 + vpaddd %xmm10, %xmm9, %xmm9 + vpxor %xmm0, %xmm9, %xmm8 + vpshufb %xmm6, %xmm8, %xmm8 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm1, %xmm10, %xmm10 + vpshufd $57, %xmm8, %xmm8 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm10, %xmm12 + vpsrld $7, %xmm10, %xmm10 + vpxor %xmm10, %xmm12, %xmm10 + vpblendw $48, %xmm4, %xmm3, %xmm12 + vpshufd $147, %xmm10, %xmm0 + vpunpckhdq %xmm5, %xmm3, %xmm10 + vpshufd $78, %xmm12, %xmm12 + vpunpcklqdq %xmm4, %xmm10, %xmm10 + vpblendw $192, %xmm2, %xmm10, %xmm10 + vpshufhw $78, %xmm10, %xmm10 + vpaddd %xmm10, %xmm9, %xmm10 + vpaddd %xmm0, %xmm10, %xmm10 + vpxor %xmm8, %xmm10, %xmm8 + vpshufb %xmm7, %xmm8, %xmm8 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm0, %xmm1, %xmm9 + vpslld $20, %xmm9, %xmm0 + vpsrld $12, %xmm9, %xmm9 + vpxor %xmm9, %xmm0, %xmm0 + vpunpckhdq %xmm5, %xmm4, %xmm9 + vpblendw $240, %xmm9, %xmm2, %xmm13 + vpshufd $39, %xmm13, %xmm13 + vpaddd %xmm10, %xmm13, %xmm10 + vpaddd %xmm0, %xmm10, %xmm10 + vpxor %xmm8, %xmm10, %xmm8 + vpblendw $12, %xmm4, %xmm2, %xmm13 + vpshufb %xmm6, %xmm8, %xmm8 + vpslldq $4, %xmm13, %xmm13 + vpblendw $15, %xmm5, %xmm13, %xmm13 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm1, %xmm0, %xmm0 + vpaddd %xmm13, %xmm10, %xmm13 + vpshufd $147, %xmm8, %xmm8 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm0, %xmm14 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm0, %xmm14, %xmm14 + vpshufd $57, %xmm14, %xmm14 + vpaddd %xmm14, %xmm13, %xmm13 + vpxor %xmm8, %xmm13, %xmm8 + vpaddd %xmm13, %xmm12, %xmm12 + vpshufb %xmm7, %xmm8, %xmm8 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm14, %xmm1, %xmm14 + vpslld $20, %xmm14, %xmm10 + vpsrld $12, %xmm14, %xmm14 + vpxor %xmm14, %xmm10, %xmm10 + vpaddd %xmm10, %xmm12, %xmm12 + vpxor %xmm8, %xmm12, %xmm8 + vpshufb %xmm6, %xmm8, %xmm8 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm1, %xmm10, %xmm0 + vpshufd $57, %xmm8, %xmm8 + vpshufd $78, %xmm1, %xmm1 + vpslld $25, %xmm0, %xmm10 + vpsrld $7, %xmm0, %xmm0 + vpxor %xmm0, %xmm10, %xmm10 + vpblendw $48, %xmm2, %xmm3, %xmm0 + vpblendw $15, %xmm11, %xmm0, %xmm0 + vpshufd $147, %xmm10, %xmm10 + vpshufd $114, %xmm0, %xmm0 + vpaddd %xmm12, %xmm0, %xmm0 + vpaddd %xmm10, %xmm0, %xmm0 + vpxor %xmm8, %xmm0, %xmm8 + vpshufb %xmm7, %xmm8, %xmm8 + vpaddd %xmm8, %xmm1, %xmm1 + vpxor %xmm10, %xmm1, %xmm10 + vpslld $20, %xmm10, %xmm11 + vpsrld $12, %xmm10, %xmm10 + vpxor %xmm10, %xmm11, %xmm10 + vpslldq $4, %xmm4, %xmm11 + vpblendw $192, %xmm11, %xmm3, %xmm3 + vpunpckldq %xmm5, %xmm4, %xmm4 + vpshufd $99, %xmm3, %xmm3 + vpaddd %xmm0, %xmm3, %xmm3 + vpaddd %xmm10, %xmm3, %xmm3 + vpxor %xmm8, %xmm3, %xmm11 + vpunpckldq %xmm5, %xmm2, %xmm0 + vpblendw $192, %xmm2, %xmm5, %xmm2 + vpshufb %xmm6, %xmm11, %xmm11 + vpunpckhqdq %xmm0, %xmm9, %xmm0 + vpblendw $15, %xmm4, %xmm2, %xmm4 + vpaddd %xmm11, %xmm1, %xmm1 + vpxor %xmm1, %xmm10, %xmm10 + vpshufd $147, %xmm11, %xmm11 + vpshufd $201, %xmm0, %xmm0 + vpslld $25, %xmm10, %xmm8 + vpsrld $7, %xmm10, %xmm10 + vpxor %xmm10, %xmm8, %xmm10 + vpshufd $78, %xmm1, %xmm1 + vpaddd %xmm3, %xmm0, %xmm0 + vpshufd $27, %xmm4, %xmm4 + vpshufd $57, %xmm10, %xmm10 + vpaddd %xmm10, %xmm0, %xmm0 + vpxor %xmm11, %xmm0, %xmm11 + vpaddd %xmm0, %xmm4, %xmm0 + vpshufb %xmm7, %xmm11, %xmm7 + vpaddd %xmm7, %xmm1, %xmm1 + vpxor %xmm10, %xmm1, %xmm10 + vpslld $20, %xmm10, %xmm8 + vpsrld $12, %xmm10, %xmm10 + vpxor %xmm10, %xmm8, %xmm8 + vpaddd %xmm8, %xmm0, %xmm0 + vpxor %xmm7, %xmm0, %xmm7 + vpshufb %xmm6, %xmm7, %xmm6 + vpaddd %xmm6, %xmm1, %xmm1 + vpxor %xmm1, %xmm8, %xmm8 + vpshufd $78, %xmm1, %xmm1 + vpshufd $57, %xmm6, %xmm6 + vpslld $25, %xmm8, %xmm2 + vpsrld $7, %xmm8, %xmm8 + vpxor %xmm8, %xmm2, %xmm8 + vpxor (%rdi), %xmm1, %xmm1 + vpshufd $147, %xmm8, %xmm8 + vpxor %xmm0, %xmm1, %xmm0 + vmovups %xmm0, (%rdi) + vpxor 16(%rdi), %xmm8, %xmm0 + vpxor %xmm6, %xmm0, %xmm6 + vmovups %xmm6, 16(%rdi) + addq $64, %rsi + decq %rdx + jnz .Lbeginofloop +.Lendofloop: + ret +ENDPROC(blake2s_compress_avx) +#endif /* CONFIG_AS_AVX */ diff --git a/src/crypto/chacha20-avx2-x86_64.S b/src/crypto/chacha20-avx2-x86_64.S deleted file mode 100644 index 48d6cc4..0000000 --- a/src/crypto/chacha20-avx2-x86_64.S +++ /dev/null @@ -1,446 +0,0 @@ -/* - * ChaCha20 256-bit cipher algorithm, RFC7539, x64 AVX2 functions - * - * Copyright (C) 2015 Martin Willi - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - */ - -#include - -.section .rodata.cst32.ROT8, "aM", @progbits, 32 -.align 32 -ROT8: .octa 0x0e0d0c0f0a09080b0605040702010003 - .octa 0x0e0d0c0f0a09080b0605040702010003 -.section .rodata.cst32.ROT16, "aM", @progbits, 32 -.align 32 -ROT16: .octa 0x0d0c0f0e09080b0a0504070601000302 - .octa 0x0d0c0f0e09080b0a0504070601000302 -.section .rodata.cst32.CTRINC, "aM", @progbits, 32 -.align 32 -CTRINC: .octa 0x00000003000000020000000100000000 - .octa 0x00000007000000060000000500000004 - -.text - -ENTRY(chacha20_asm_8block_xor_avx2) - # %rdi: Input state matrix, s - # %rsi: 8 data blocks output, o - # %rdx: 8 data blocks input, i - - # This function encrypts eight consecutive ChaCha20 blocks by loading - # the state matrix in AVX registers eight times. As we need some - # scratch registers, we save the first four registers on the stack. The - # algorithm performs each operation on the corresponding word of each - # state matrix, hence requires no word shuffling. For final XORing step - # we transpose the matrix by interleaving 32-, 64- and then 128-bit - # words, which allows us to do XOR in AVX registers. 8/16-bit word - # rotation is done with the slightly better performing byte shuffling, - # 7/12-bit word rotation uses traditional shift+OR. - - vzeroupper - # 4 * 32 byte stack, 32-byte aligned - lea 8(%rsp),%r10 - and $~31, %rsp - sub $0x80, %rsp - - # x0..15[0-7] = s[0..15] - vpbroadcastd 0x00(%rdi),%ymm0 - vpbroadcastd 0x04(%rdi),%ymm1 - vpbroadcastd 0x08(%rdi),%ymm2 - vpbroadcastd 0x0c(%rdi),%ymm3 - vpbroadcastd 0x10(%rdi),%ymm4 - vpbroadcastd 0x14(%rdi),%ymm5 - vpbroadcastd 0x18(%rdi),%ymm6 - vpbroadcastd 0x1c(%rdi),%ymm7 - vpbroadcastd 0x20(%rdi),%ymm8 - vpbroadcastd 0x24(%rdi),%ymm9 - vpbroadcastd 0x28(%rdi),%ymm10 - vpbroadcastd 0x2c(%rdi),%ymm11 - vpbroadcastd 0x30(%rdi),%ymm12 - vpbroadcastd 0x34(%rdi),%ymm13 - vpbroadcastd 0x38(%rdi),%ymm14 - vpbroadcastd 0x3c(%rdi),%ymm15 - # x0..3 on stack - vmovdqa %ymm0,0x00(%rsp) - vmovdqa %ymm1,0x20(%rsp) - vmovdqa %ymm2,0x40(%rsp) - vmovdqa %ymm3,0x60(%rsp) - - vmovdqa CTRINC(%rip),%ymm1 - vmovdqa ROT8(%rip),%ymm2 - vmovdqa ROT16(%rip),%ymm3 - - # x12 += counter values 0-3 - vpaddd %ymm1,%ymm12,%ymm12 - - mov $10,%ecx - -.Ldoubleround8: - # x0 += x4, x12 = rotl32(x12 ^ x0, 16) - vpaddd 0x00(%rsp),%ymm4,%ymm0 - vmovdqa %ymm0,0x00(%rsp) - vpxor %ymm0,%ymm12,%ymm12 - vpshufb %ymm3,%ymm12,%ymm12 - # x1 += x5, x13 = rotl32(x13 ^ x1, 16) - vpaddd 0x20(%rsp),%ymm5,%ymm0 - vmovdqa %ymm0,0x20(%rsp) - vpxor %ymm0,%ymm13,%ymm13 - vpshufb %ymm3,%ymm13,%ymm13 - # x2 += x6, x14 = rotl32(x14 ^ x2, 16) - vpaddd 0x40(%rsp),%ymm6,%ymm0 - vmovdqa %ymm0,0x40(%rsp) - vpxor %ymm0,%ymm14,%ymm14 - vpshufb %ymm3,%ymm14,%ymm14 - # x3 += x7, x15 = rotl32(x15 ^ x3, 16) - vpaddd 0x60(%rsp),%ymm7,%ymm0 - vmovdqa %ymm0,0x60(%rsp) - vpxor %ymm0,%ymm15,%ymm15 - vpshufb %ymm3,%ymm15,%ymm15 - - # x8 += x12, x4 = rotl32(x4 ^ x8, 12) - vpaddd %ymm12,%ymm8,%ymm8 - vpxor %ymm8,%ymm4,%ymm4 - vpslld $12,%ymm4,%ymm0 - vpsrld $20,%ymm4,%ymm4 - vpor %ymm0,%ymm4,%ymm4 - # x9 += x13, x5 = rotl32(x5 ^ x9, 12) - vpaddd %ymm13,%ymm9,%ymm9 - vpxor %ymm9,%ymm5,%ymm5 - vpslld $12,%ymm5,%ymm0 - vpsrld $20,%ymm5,%ymm5 - vpor %ymm0,%ymm5,%ymm5 - # x10 += x14, x6 = rotl32(x6 ^ x10, 12) - vpaddd %ymm14,%ymm10,%ymm10 - vpxor %ymm10,%ymm6,%ymm6 - vpslld $12,%ymm6,%ymm0 - vpsrld $20,%ymm6,%ymm6 - vpor %ymm0,%ymm6,%ymm6 - # x11 += x15, x7 = rotl32(x7 ^ x11, 12) - vpaddd %ymm15,%ymm11,%ymm11 - vpxor %ymm11,%ymm7,%ymm7 - vpslld $12,%ymm7,%ymm0 - vpsrld $20,%ymm7,%ymm7 - vpor %ymm0,%ymm7,%ymm7 - - # x0 += x4, x12 = rotl32(x12 ^ x0, 8) - vpaddd 0x00(%rsp),%ymm4,%ymm0 - vmovdqa %ymm0,0x00(%rsp) - vpxor %ymm0,%ymm12,%ymm12 - vpshufb %ymm2,%ymm12,%ymm12 - # x1 += x5, x13 = rotl32(x13 ^ x1, 8) - vpaddd 0x20(%rsp),%ymm5,%ymm0 - vmovdqa %ymm0,0x20(%rsp) - vpxor %ymm0,%ymm13,%ymm13 - vpshufb %ymm2,%ymm13,%ymm13 - # x2 += x6, x14 = rotl32(x14 ^ x2, 8) - vpaddd 0x40(%rsp),%ymm6,%ymm0 - vmovdqa %ymm0,0x40(%rsp) - vpxor %ymm0,%ymm14,%ymm14 - vpshufb %ymm2,%ymm14,%ymm14 - # x3 += x7, x15 = rotl32(x15 ^ x3, 8) - vpaddd 0x60(%rsp),%ymm7,%ymm0 - vmovdqa %ymm0,0x60(%rsp) - vpxor %ymm0,%ymm15,%ymm15 - vpshufb %ymm2,%ymm15,%ymm15 - - # x8 += x12, x4 = rotl32(x4 ^ x8, 7) - vpaddd %ymm12,%ymm8,%ymm8 - vpxor %ymm8,%ymm4,%ymm4 - vpslld $7,%ymm4,%ymm0 - vpsrld $25,%ymm4,%ymm4 - vpor %ymm0,%ymm4,%ymm4 - # x9 += x13, x5 = rotl32(x5 ^ x9, 7) - vpaddd %ymm13,%ymm9,%ymm9 - vpxor %ymm9,%ymm5,%ymm5 - vpslld $7,%ymm5,%ymm0 - vpsrld $25,%ymm5,%ymm5 - vpor %ymm0,%ymm5,%ymm5 - # x10 += x14, x6 = rotl32(x6 ^ x10, 7) - vpaddd %ymm14,%ymm10,%ymm10 - vpxor %ymm10,%ymm6,%ymm6 - vpslld $7,%ymm6,%ymm0 - vpsrld $25,%ymm6,%ymm6 - vpor %ymm0,%ymm6,%ymm6 - # x11 += x15, x7 = rotl32(x7 ^ x11, 7) - vpaddd %ymm15,%ymm11,%ymm11 - vpxor %ymm11,%ymm7,%ymm7 - vpslld $7,%ymm7,%ymm0 - vpsrld $25,%ymm7,%ymm7 - vpor %ymm0,%ymm7,%ymm7 - - # x0 += x5, x15 = rotl32(x15 ^ x0, 16) - vpaddd 0x00(%rsp),%ymm5,%ymm0 - vmovdqa %ymm0,0x00(%rsp) - vpxor %ymm0,%ymm15,%ymm15 - vpshufb %ymm3,%ymm15,%ymm15 - # x1 += x6, x12 = rotl32(x12 ^ x1, 16)%ymm0 - vpaddd 0x20(%rsp),%ymm6,%ymm0 - vmovdqa %ymm0,0x20(%rsp) - vpxor %ymm0,%ymm12,%ymm12 - vpshufb %ymm3,%ymm12,%ymm12 - # x2 += x7, x13 = rotl32(x13 ^ x2, 16) - vpaddd 0x40(%rsp),%ymm7,%ymm0 - vmovdqa %ymm0,0x40(%rsp) - vpxor %ymm0,%ymm13,%ymm13 - vpshufb %ymm3,%ymm13,%ymm13 - # x3 += x4, x14 = rotl32(x14 ^ x3, 16) - vpaddd 0x60(%rsp),%ymm4,%ymm0 - vmovdqa %ymm0,0x60(%rsp) - vpxor %ymm0,%ymm14,%ymm14 - vpshufb %ymm3,%ymm14,%ymm14 - - # x10 += x15, x5 = rotl32(x5 ^ x10, 12) - vpaddd %ymm15,%ymm10,%ymm10 - vpxor %ymm10,%ymm5,%ymm5 - vpslld $12,%ymm5,%ymm0 - vpsrld $20,%ymm5,%ymm5 - vpor %ymm0,%ymm5,%ymm5 - # x11 += x12, x6 = rotl32(x6 ^ x11, 12) - vpaddd %ymm12,%ymm11,%ymm11 - vpxor %ymm11,%ymm6,%ymm6 - vpslld $12,%ymm6,%ymm0 - vpsrld $20,%ymm6,%ymm6 - vpor %ymm0,%ymm6,%ymm6 - # x8 += x13, x7 = rotl32(x7 ^ x8, 12) - vpaddd %ymm13,%ymm8,%ymm8 - vpxor %ymm8,%ymm7,%ymm7 - vpslld $12,%ymm7,%ymm0 - vpsrld $20,%ymm7,%ymm7 - vpor %ymm0,%ymm7,%ymm7 - # x9 += x14, x4 = rotl32(x4 ^ x9, 12) - vpaddd %ymm14,%ymm9,%ymm9 - vpxor %ymm9,%ymm4,%ymm4 - vpslld $12,%ymm4,%ymm0 - vpsrld $20,%ymm4,%ymm4 - vpor %ymm0,%ymm4,%ymm4 - - # x0 += x5, x15 = rotl32(x15 ^ x0, 8) - vpaddd 0x00(%rsp),%ymm5,%ymm0 - vmovdqa %ymm0,0x00(%rsp) - vpxor %ymm0,%ymm15,%ymm15 - vpshufb %ymm2,%ymm15,%ymm15 - # x1 += x6, x12 = rotl32(x12 ^ x1, 8) - vpaddd 0x20(%rsp),%ymm6,%ymm0 - vmovdqa %ymm0,0x20(%rsp) - vpxor %ymm0,%ymm12,%ymm12 - vpshufb %ymm2,%ymm12,%ymm12 - # x2 += x7, x13 = rotl32(x13 ^ x2, 8) - vpaddd 0x40(%rsp),%ymm7,%ymm0 - vmovdqa %ymm0,0x40(%rsp) - vpxor %ymm0,%ymm13,%ymm13 - vpshufb %ymm2,%ymm13,%ymm13 - # x3 += x4, x14 = rotl32(x14 ^ x3, 8) - vpaddd 0x60(%rsp),%ymm4,%ymm0 - vmovdqa %ymm0,0x60(%rsp) - vpxor %ymm0,%ymm14,%ymm14 - vpshufb %ymm2,%ymm14,%ymm14 - - # x10 += x15, x5 = rotl32(x5 ^ x10, 7) - vpaddd %ymm15,%ymm10,%ymm10 - vpxor %ymm10,%ymm5,%ymm5 - vpslld $7,%ymm5,%ymm0 - vpsrld $25,%ymm5,%ymm5 - vpor %ymm0,%ymm5,%ymm5 - # x11 += x12, x6 = rotl32(x6 ^ x11, 7) - vpaddd %ymm12,%ymm11,%ymm11 - vpxor %ymm11,%ymm6,%ymm6 - vpslld $7,%ymm6,%ymm0 - vpsrld $25,%ymm6,%ymm6 - vpor %ymm0,%ymm6,%ymm6 - # x8 += x13, x7 = rotl32(x7 ^ x8, 7) - vpaddd %ymm13,%ymm8,%ymm8 - vpxor %ymm8,%ymm7,%ymm7 - vpslld $7,%ymm7,%ymm0 - vpsrld $25,%ymm7,%ymm7 - vpor %ymm0,%ymm7,%ymm7 - # x9 += x14, x4 = rotl32(x4 ^ x9, 7) - vpaddd %ymm14,%ymm9,%ymm9 - vpxor %ymm9,%ymm4,%ymm4 - vpslld $7,%ymm4,%ymm0 - vpsrld $25,%ymm4,%ymm4 - vpor %ymm0,%ymm4,%ymm4 - - dec %ecx - jnz .Ldoubleround8 - - # x0..15[0-3] += s[0..15] - vpbroadcastd 0x00(%rdi),%ymm0 - vpaddd 0x00(%rsp),%ymm0,%ymm0 - vmovdqa %ymm0,0x00(%rsp) - vpbroadcastd 0x04(%rdi),%ymm0 - vpaddd 0x20(%rsp),%ymm0,%ymm0 - vmovdqa %ymm0,0x20(%rsp) - vpbroadcastd 0x08(%rdi),%ymm0 - vpaddd 0x40(%rsp),%ymm0,%ymm0 - vmovdqa %ymm0,0x40(%rsp) - vpbroadcastd 0x0c(%rdi),%ymm0 - vpaddd 0x60(%rsp),%ymm0,%ymm0 - vmovdqa %ymm0,0x60(%rsp) - vpbroadcastd 0x10(%rdi),%ymm0 - vpaddd %ymm0,%ymm4,%ymm4 - vpbroadcastd 0x14(%rdi),%ymm0 - vpaddd %ymm0,%ymm5,%ymm5 - vpbroadcastd 0x18(%rdi),%ymm0 - vpaddd %ymm0,%ymm6,%ymm6 - vpbroadcastd 0x1c(%rdi),%ymm0 - vpaddd %ymm0,%ymm7,%ymm7 - vpbroadcastd 0x20(%rdi),%ymm0 - vpaddd %ymm0,%ymm8,%ymm8 - vpbroadcastd 0x24(%rdi),%ymm0 - vpaddd %ymm0,%ymm9,%ymm9 - vpbroadcastd 0x28(%rdi),%ymm0 - vpaddd %ymm0,%ymm10,%ymm10 - vpbroadcastd 0x2c(%rdi),%ymm0 - vpaddd %ymm0,%ymm11,%ymm11 - vpbroadcastd 0x30(%rdi),%ymm0 - vpaddd %ymm0,%ymm12,%ymm12 - vpbroadcastd 0x34(%rdi),%ymm0 - vpaddd %ymm0,%ymm13,%ymm13 - vpbroadcastd 0x38(%rdi),%ymm0 - vpaddd %ymm0,%ymm14,%ymm14 - vpbroadcastd 0x3c(%rdi),%ymm0 - vpaddd %ymm0,%ymm15,%ymm15 - - # x12 += counter values 0-3 - vpaddd %ymm1,%ymm12,%ymm12 - - # interleave 32-bit words in state n, n+1 - vmovdqa 0x00(%rsp),%ymm0 - vmovdqa 0x20(%rsp),%ymm1 - vpunpckldq %ymm1,%ymm0,%ymm2 - vpunpckhdq %ymm1,%ymm0,%ymm1 - vmovdqa %ymm2,0x00(%rsp) - vmovdqa %ymm1,0x20(%rsp) - vmovdqa 0x40(%rsp),%ymm0 - vmovdqa 0x60(%rsp),%ymm1 - vpunpckldq %ymm1,%ymm0,%ymm2 - vpunpckhdq %ymm1,%ymm0,%ymm1 - vmovdqa %ymm2,0x40(%rsp) - vmovdqa %ymm1,0x60(%rsp) - vmovdqa %ymm4,%ymm0 - vpunpckldq %ymm5,%ymm0,%ymm4 - vpunpckhdq %ymm5,%ymm0,%ymm5 - vmovdqa %ymm6,%ymm0 - vpunpckldq %ymm7,%ymm0,%ymm6 - vpunpckhdq %ymm7,%ymm0,%ymm7 - vmovdqa %ymm8,%ymm0 - vpunpckldq %ymm9,%ymm0,%ymm8 - vpunpckhdq %ymm9,%ymm0,%ymm9 - vmovdqa %ymm10,%ymm0 - vpunpckldq %ymm11,%ymm0,%ymm10 - vpunpckhdq %ymm11,%ymm0,%ymm11 - vmovdqa %ymm12,%ymm0 - vpunpckldq %ymm13,%ymm0,%ymm12 - vpunpckhdq %ymm13,%ymm0,%ymm13 - vmovdqa %ymm14,%ymm0 - vpunpckldq %ymm15,%ymm0,%ymm14 - vpunpckhdq %ymm15,%ymm0,%ymm15 - - # interleave 64-bit words in state n, n+2 - vmovdqa 0x00(%rsp),%ymm0 - vmovdqa 0x40(%rsp),%ymm2 - vpunpcklqdq %ymm2,%ymm0,%ymm1 - vpunpckhqdq %ymm2,%ymm0,%ymm2 - vmovdqa %ymm1,0x00(%rsp) - vmovdqa %ymm2,0x40(%rsp) - vmovdqa 0x20(%rsp),%ymm0 - vmovdqa 0x60(%rsp),%ymm2 - vpunpcklqdq %ymm2,%ymm0,%ymm1 - vpunpckhqdq %ymm2,%ymm0,%ymm2 - vmovdqa %ymm1,0x20(%rsp) - vmovdqa %ymm2,0x60(%rsp) - vmovdqa %ymm4,%ymm0 - vpunpcklqdq %ymm6,%ymm0,%ymm4 - vpunpckhqdq %ymm6,%ymm0,%ymm6 - vmovdqa %ymm5,%ymm0 - vpunpcklqdq %ymm7,%ymm0,%ymm5 - vpunpckhqdq %ymm7,%ymm0,%ymm7 - vmovdqa %ymm8,%ymm0 - vpunpcklqdq %ymm10,%ymm0,%ymm8 - vpunpckhqdq %ymm10,%ymm0,%ymm10 - vmovdqa %ymm9,%ymm0 - vpunpcklqdq %ymm11,%ymm0,%ymm9 - vpunpckhqdq %ymm11,%ymm0,%ymm11 - vmovdqa %ymm12,%ymm0 - vpunpcklqdq %ymm14,%ymm0,%ymm12 - vpunpckhqdq %ymm14,%ymm0,%ymm14 - vmovdqa %ymm13,%ymm0 - vpunpcklqdq %ymm15,%ymm0,%ymm13 - vpunpckhqdq %ymm15,%ymm0,%ymm15 - - # interleave 128-bit words in state n, n+4 - vmovdqa 0x00(%rsp),%ymm0 - vperm2i128 $0x20,%ymm4,%ymm0,%ymm1 - vperm2i128 $0x31,%ymm4,%ymm0,%ymm4 - vmovdqa %ymm1,0x00(%rsp) - vmovdqa 0x20(%rsp),%ymm0 - vperm2i128 $0x20,%ymm5,%ymm0,%ymm1 - vperm2i128 $0x31,%ymm5,%ymm0,%ymm5 - vmovdqa %ymm1,0x20(%rsp) - vmovdqa 0x40(%rsp),%ymm0 - vperm2i128 $0x20,%ymm6,%ymm0,%ymm1 - vperm2i128 $0x31,%ymm6,%ymm0,%ymm6 - vmovdqa %ymm1,0x40(%rsp) - vmovdqa 0x60(%rsp),%ymm0 - vperm2i128 $0x20,%ymm7,%ymm0,%ymm1 - vperm2i128 $0x31,%ymm7,%ymm0,%ymm7 - vmovdqa %ymm1,0x60(%rsp) - vperm2i128 $0x20,%ymm12,%ymm8,%ymm0 - vperm2i128 $0x31,%ymm12,%ymm8,%ymm12 - vmovdqa %ymm0,%ymm8 - vperm2i128 $0x20,%ymm13,%ymm9,%ymm0 - vperm2i128 $0x31,%ymm13,%ymm9,%ymm13 - vmovdqa %ymm0,%ymm9 - vperm2i128 $0x20,%ymm14,%ymm10,%ymm0 - vperm2i128 $0x31,%ymm14,%ymm10,%ymm14 - vmovdqa %ymm0,%ymm10 - vperm2i128 $0x20,%ymm15,%ymm11,%ymm0 - vperm2i128 $0x31,%ymm15,%ymm11,%ymm15 - vmovdqa %ymm0,%ymm11 - - # xor with corresponding input, write to output - vmovdqa 0x00(%rsp),%ymm0 - vpxor 0x0000(%rdx),%ymm0,%ymm0 - vmovdqu %ymm0,0x0000(%rsi) - vmovdqa 0x20(%rsp),%ymm0 - vpxor 0x0080(%rdx),%ymm0,%ymm0 - vmovdqu %ymm0,0x0080(%rsi) - vmovdqa 0x40(%rsp),%ymm0 - vpxor 0x0040(%rdx),%ymm0,%ymm0 - vmovdqu %ymm0,0x0040(%rsi) - vmovdqa 0x60(%rsp),%ymm0 - vpxor 0x00c0(%rdx),%ymm0,%ymm0 - vmovdqu %ymm0,0x00c0(%rsi) - vpxor 0x0100(%rdx),%ymm4,%ymm4 - vmovdqu %ymm4,0x0100(%rsi) - vpxor 0x0180(%rdx),%ymm5,%ymm5 - vmovdqu %ymm5,0x00180(%rsi) - vpxor 0x0140(%rdx),%ymm6,%ymm6 - vmovdqu %ymm6,0x0140(%rsi) - vpxor 0x01c0(%rdx),%ymm7,%ymm7 - vmovdqu %ymm7,0x01c0(%rsi) - vpxor 0x0020(%rdx),%ymm8,%ymm8 - vmovdqu %ymm8,0x0020(%rsi) - vpxor 0x00a0(%rdx),%ymm9,%ymm9 - vmovdqu %ymm9,0x00a0(%rsi) - vpxor 0x0060(%rdx),%ymm10,%ymm10 - vmovdqu %ymm10,0x0060(%rsi) - vpxor 0x00e0(%rdx),%ymm11,%ymm11 - vmovdqu %ymm11,0x00e0(%rsi) - vpxor 0x0120(%rdx),%ymm12,%ymm12 - vmovdqu %ymm12,0x0120(%rsi) - vpxor 0x01a0(%rdx),%ymm13,%ymm13 - vmovdqu %ymm13,0x01a0(%rsi) - vpxor 0x0160(%rdx),%ymm14,%ymm14 - vmovdqu %ymm14,0x0160(%rsi) - vpxor 0x01e0(%rdx),%ymm15,%ymm15 - vmovdqu %ymm15,0x01e0(%rsi) - - vzeroupper - lea -8(%r10),%rsp - ret -ENDPROC(chacha20_asm_8block_xor_avx2) diff --git a/src/crypto/chacha20-ssse3-x86_64.S b/src/crypto/chacha20-ssse3-x86_64.S deleted file mode 100644 index 483f79a..0000000 --- a/src/crypto/chacha20-ssse3-x86_64.S +++ /dev/null @@ -1,734 +0,0 @@ -/* - * ChaCha20 256-bit cipher algorithm, RFC7539, x64 SSSE3 functions - * - * Copyright (C) 2015 Martin Willi - * Copyright (C) 2017 Jason A. Donenfeld . All Rights Reserved. - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - */ - -#include - -.section .rodata.cst16.ROT8, "aM", @progbits, 16 -.align 16 -ROT8:.octa 0x0e0d0c0f0a09080b0605040702010003 -.section .rodata.cst16.ROT16, "aM", @progbits, 16 -.align 16 -ROT16: .octa 0x0d0c0f0e09080b0a0504070601000302 -.section .rodata.cst16.CTRINC, "aM", @progbits, 16 -.align 16 -CTRINC: .octa 0x00000003000000020000000100000000 -.section .rodata.cst16.CHACONST, "aM", @progbits, 16 -.align 16 -CONST: .ascii "expand 32-byte k" - -.text - -ENTRY(chacha20_asm_block_xor_ssse3) - # %rdi: Input state matrix, s - # %rsi: 1 data block output, o - # %rdx: 1 data block input, i - - # This function encrypts one ChaCha20 block by loading the state matrix - # in four SSE registers. It performs matrix operation on four words in - # parallel, but requireds shuffling to rearrange the words after each - # round. 8/16-bit word rotation is done with the slightly better - # performing SSSE3 byte shuffling, 7/12-bit word rotation uses - # traditional shift+OR. - - # x0..3 = s0..3 - movdqa 0x00(%rdi),%xmm0 - movdqa 0x10(%rdi),%xmm1 - movdqa 0x20(%rdi),%xmm2 - movdqa 0x30(%rdi),%xmm3 - movdqa %xmm0,%xmm8 - movdqa %xmm1,%xmm9 - movdqa %xmm2,%xmm10 - movdqa %xmm3,%xmm11 - - movdqa ROT8(%rip),%xmm4 - movdqa ROT16(%rip),%xmm5 - - mov $10,%ecx - -.Ldoubleround: - - # x0 += x1, x3 = rotl32(x3 ^ x0, 16) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm5,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 12) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm6 - pslld $12,%xmm6 - psrld $20,%xmm1 - por %xmm6,%xmm1 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 8) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm4,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 7) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm7 - pslld $7,%xmm7 - psrld $25,%xmm1 - por %xmm7,%xmm1 - - # x1 = shuffle32(x1, MASK(0, 3, 2, 1)) - pshufd $0x39,%xmm1,%xmm1 - # x2 = shuffle32(x2, MASK(1, 0, 3, 2)) - pshufd $0x4e,%xmm2,%xmm2 - # x3 = shuffle32(x3, MASK(2, 1, 0, 3)) - pshufd $0x93,%xmm3,%xmm3 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 16) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm5,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 12) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm6 - pslld $12,%xmm6 - psrld $20,%xmm1 - por %xmm6,%xmm1 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 8) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm4,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 7) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm7 - pslld $7,%xmm7 - psrld $25,%xmm1 - por %xmm7,%xmm1 - - # x1 = shuffle32(x1, MASK(2, 1, 0, 3)) - pshufd $0x93,%xmm1,%xmm1 - # x2 = shuffle32(x2, MASK(1, 0, 3, 2)) - pshufd $0x4e,%xmm2,%xmm2 - # x3 = shuffle32(x3, MASK(0, 3, 2, 1)) - pshufd $0x39,%xmm3,%xmm3 - - dec %ecx - jnz .Ldoubleround - - # o0 = i0 ^ (x0 + s0) - movdqu 0x00(%rdx),%xmm4 - paddd %xmm8,%xmm0 - pxor %xmm4,%xmm0 - movdqu %xmm0,0x00(%rsi) - # o1 = i1 ^ (x1 + s1) - movdqu 0x10(%rdx),%xmm5 - paddd %xmm9,%xmm1 - pxor %xmm5,%xmm1 - movdqu %xmm1,0x10(%rsi) - # o2 = i2 ^ (x2 + s2) - movdqu 0x20(%rdx),%xmm6 - paddd %xmm10,%xmm2 - pxor %xmm6,%xmm2 - movdqu %xmm2,0x20(%rsi) - # o3 = i3 ^ (x3 + s3) - movdqu 0x30(%rdx),%xmm7 - paddd %xmm11,%xmm3 - pxor %xmm7,%xmm3 - movdqu %xmm3,0x30(%rsi) - - ret -ENDPROC(chacha20_asm_block_xor_ssse3) - -ENTRY(chacha20_asm_4block_xor_ssse3) - # %rdi: Input state matrix, s - # %rsi: 4 data blocks output, o - # %rdx: 4 data blocks input, i - - # This function encrypts four consecutive ChaCha20 blocks by loading the - # the state matrix in SSE registers four times. As we need some scratch - # registers, we save the first four registers on the stack. The - # algorithm performs each operation on the corresponding word of each - # state matrix, hence requires no word shuffling. For final XORing step - # we transpose the matrix by interleaving 32- and then 64-bit words, - # which allows us to do XOR in SSE registers. 8/16-bit word rotation is - # done with the slightly better performing SSSE3 byte shuffling, - # 7/12-bit word rotation uses traditional shift+OR. - - lea 8(%rsp),%r10 - sub $0x80,%rsp - and $~63,%rsp - - # x0..15[0-3] = s0..3[0..3] - movq 0x00(%rdi),%xmm1 - pshufd $0x00,%xmm1,%xmm0 - pshufd $0x55,%xmm1,%xmm1 - movq 0x08(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - movq 0x10(%rdi),%xmm5 - pshufd $0x00,%xmm5,%xmm4 - pshufd $0x55,%xmm5,%xmm5 - movq 0x18(%rdi),%xmm7 - pshufd $0x00,%xmm7,%xmm6 - pshufd $0x55,%xmm7,%xmm7 - movq 0x20(%rdi),%xmm9 - pshufd $0x00,%xmm9,%xmm8 - pshufd $0x55,%xmm9,%xmm9 - movq 0x28(%rdi),%xmm11 - pshufd $0x00,%xmm11,%xmm10 - pshufd $0x55,%xmm11,%xmm11 - movq 0x30(%rdi),%xmm13 - pshufd $0x00,%xmm13,%xmm12 - pshufd $0x55,%xmm13,%xmm13 - movq 0x38(%rdi),%xmm15 - pshufd $0x00,%xmm15,%xmm14 - pshufd $0x55,%xmm15,%xmm15 - # x0..3 on stack - movdqa %xmm0,0x00(%rsp) - movdqa %xmm1,0x10(%rsp) - movdqa %xmm2,0x20(%rsp) - movdqa %xmm3,0x30(%rsp) - - movdqa CTRINC(%rip),%xmm1 - movdqa ROT8(%rip),%xmm2 - movdqa ROT16(%rip),%xmm3 - - # x12 += counter values 0-3 - paddd %xmm1,%xmm12 - - mov $10,%ecx - -.Ldoubleround4: - # x0 += x4, x12 = rotl32(x12 ^ x0, 16) - movdqa 0x00(%rsp),%xmm0 - paddd %xmm4,%xmm0 - movdqa %xmm0,0x00(%rsp) - pxor %xmm0,%xmm12 - pshufb %xmm3,%xmm12 - # x1 += x5, x13 = rotl32(x13 ^ x1, 16) - movdqa 0x10(%rsp),%xmm0 - paddd %xmm5,%xmm0 - movdqa %xmm0,0x10(%rsp) - pxor %xmm0,%xmm13 - pshufb %xmm3,%xmm13 - # x2 += x6, x14 = rotl32(x14 ^ x2, 16) - movdqa 0x20(%rsp),%xmm0 - paddd %xmm6,%xmm0 - movdqa %xmm0,0x20(%rsp) - pxor %xmm0,%xmm14 - pshufb %xmm3,%xmm14 - # x3 += x7, x15 = rotl32(x15 ^ x3, 16) - movdqa 0x30(%rsp),%xmm0 - paddd %xmm7,%xmm0 - movdqa %xmm0,0x30(%rsp) - pxor %xmm0,%xmm15 - pshufb %xmm3,%xmm15 - - # x8 += x12, x4 = rotl32(x4 ^ x8, 12) - paddd %xmm12,%xmm8 - pxor %xmm8,%xmm4 - movdqa %xmm4,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm4 - por %xmm0,%xmm4 - # x9 += x13, x5 = rotl32(x5 ^ x9, 12) - paddd %xmm13,%xmm9 - pxor %xmm9,%xmm5 - movdqa %xmm5,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm5 - por %xmm0,%xmm5 - # x10 += x14, x6 = rotl32(x6 ^ x10, 12) - paddd %xmm14,%xmm10 - pxor %xmm10,%xmm6 - movdqa %xmm6,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm6 - por %xmm0,%xmm6 - # x11 += x15, x7 = rotl32(x7 ^ x11, 12) - paddd %xmm15,%xmm11 - pxor %xmm11,%xmm7 - movdqa %xmm7,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm7 - por %xmm0,%xmm7 - - # x0 += x4, x12 = rotl32(x12 ^ x0, 8) - movdqa 0x00(%rsp),%xmm0 - paddd %xmm4,%xmm0 - movdqa %xmm0,0x00(%rsp) - pxor %xmm0,%xmm12 - pshufb %xmm2,%xmm12 - # x1 += x5, x13 = rotl32(x13 ^ x1, 8) - movdqa 0x10(%rsp),%xmm0 - paddd %xmm5,%xmm0 - movdqa %xmm0,0x10(%rsp) - pxor %xmm0,%xmm13 - pshufb %xmm2,%xmm13 - # x2 += x6, x14 = rotl32(x14 ^ x2, 8) - movdqa 0x20(%rsp),%xmm0 - paddd %xmm6,%xmm0 - movdqa %xmm0,0x20(%rsp) - pxor %xmm0,%xmm14 - pshufb %xmm2,%xmm14 - # x3 += x7, x15 = rotl32(x15 ^ x3, 8) - movdqa 0x30(%rsp),%xmm0 - paddd %xmm7,%xmm0 - movdqa %xmm0,0x30(%rsp) - pxor %xmm0,%xmm15 - pshufb %xmm2,%xmm15 - - # x8 += x12, x4 = rotl32(x4 ^ x8, 7) - paddd %xmm12,%xmm8 - pxor %xmm8,%xmm4 - movdqa %xmm4,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm4 - por %xmm0,%xmm4 - # x9 += x13, x5 = rotl32(x5 ^ x9, 7) - paddd %xmm13,%xmm9 - pxor %xmm9,%xmm5 - movdqa %xmm5,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm5 - por %xmm0,%xmm5 - # x10 += x14, x6 = rotl32(x6 ^ x10, 7) - paddd %xmm14,%xmm10 - pxor %xmm10,%xmm6 - movdqa %xmm6,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm6 - por %xmm0,%xmm6 - # x11 += x15, x7 = rotl32(x7 ^ x11, 7) - paddd %xmm15,%xmm11 - pxor %xmm11,%xmm7 - movdqa %xmm7,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm7 - por %xmm0,%xmm7 - - # x0 += x5, x15 = rotl32(x15 ^ x0, 16) - movdqa 0x00(%rsp),%xmm0 - paddd %xmm5,%xmm0 - movdqa %xmm0,0x00(%rsp) - pxor %xmm0,%xmm15 - pshufb %xmm3,%xmm15 - # x1 += x6, x12 = rotl32(x12 ^ x1, 16) - movdqa 0x10(%rsp),%xmm0 - paddd %xmm6,%xmm0 - movdqa %xmm0,0x10(%rsp) - pxor %xmm0,%xmm12 - pshufb %xmm3,%xmm12 - # x2 += x7, x13 = rotl32(x13 ^ x2, 16) - movdqa 0x20(%rsp),%xmm0 - paddd %xmm7,%xmm0 - movdqa %xmm0,0x20(%rsp) - pxor %xmm0,%xmm13 - pshufb %xmm3,%xmm13 - # x3 += x4, x14 = rotl32(x14 ^ x3, 16) - movdqa 0x30(%rsp),%xmm0 - paddd %xmm4,%xmm0 - movdqa %xmm0,0x30(%rsp) - pxor %xmm0,%xmm14 - pshufb %xmm3,%xmm14 - - # x10 += x15, x5 = rotl32(x5 ^ x10, 12) - paddd %xmm15,%xmm10 - pxor %xmm10,%xmm5 - movdqa %xmm5,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm5 - por %xmm0,%xmm5 - # x11 += x12, x6 = rotl32(x6 ^ x11, 12) - paddd %xmm12,%xmm11 - pxor %xmm11,%xmm6 - movdqa %xmm6,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm6 - por %xmm0,%xmm6 - # x8 += x13, x7 = rotl32(x7 ^ x8, 12) - paddd %xmm13,%xmm8 - pxor %xmm8,%xmm7 - movdqa %xmm7,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm7 - por %xmm0,%xmm7 - # x9 += x14, x4 = rotl32(x4 ^ x9, 12) - paddd %xmm14,%xmm9 - pxor %xmm9,%xmm4 - movdqa %xmm4,%xmm0 - pslld $12,%xmm0 - psrld $20,%xmm4 - por %xmm0,%xmm4 - - # x0 += x5, x15 = rotl32(x15 ^ x0, 8) - movdqa 0x00(%rsp),%xmm0 - paddd %xmm5,%xmm0 - movdqa %xmm0,0x00(%rsp) - pxor %xmm0,%xmm15 - pshufb %xmm2,%xmm15 - # x1 += x6, x12 = rotl32(x12 ^ x1, 8) - movdqa 0x10(%rsp),%xmm0 - paddd %xmm6,%xmm0 - movdqa %xmm0,0x10(%rsp) - pxor %xmm0,%xmm12 - pshufb %xmm2,%xmm12 - # x2 += x7, x13 = rotl32(x13 ^ x2, 8) - movdqa 0x20(%rsp),%xmm0 - paddd %xmm7,%xmm0 - movdqa %xmm0,0x20(%rsp) - pxor %xmm0,%xmm13 - pshufb %xmm2,%xmm13 - # x3 += x4, x14 = rotl32(x14 ^ x3, 8) - movdqa 0x30(%rsp),%xmm0 - paddd %xmm4,%xmm0 - movdqa %xmm0,0x30(%rsp) - pxor %xmm0,%xmm14 - pshufb %xmm2,%xmm14 - - # x10 += x15, x5 = rotl32(x5 ^ x10, 7) - paddd %xmm15,%xmm10 - pxor %xmm10,%xmm5 - movdqa %xmm5,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm5 - por %xmm0,%xmm5 - # x11 += x12, x6 = rotl32(x6 ^ x11, 7) - paddd %xmm12,%xmm11 - pxor %xmm11,%xmm6 - movdqa %xmm6,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm6 - por %xmm0,%xmm6 - # x8 += x13, x7 = rotl32(x7 ^ x8, 7) - paddd %xmm13,%xmm8 - pxor %xmm8,%xmm7 - movdqa %xmm7,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm7 - por %xmm0,%xmm7 - # x9 += x14, x4 = rotl32(x4 ^ x9, 7) - paddd %xmm14,%xmm9 - pxor %xmm9,%xmm4 - movdqa %xmm4,%xmm0 - pslld $7,%xmm0 - psrld $25,%xmm4 - por %xmm0,%xmm4 - - dec %ecx - jnz .Ldoubleround4 - - # x0[0-3] += s0[0] - # x1[0-3] += s0[1] - movq 0x00(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd 0x00(%rsp),%xmm2 - movdqa %xmm2,0x00(%rsp) - paddd 0x10(%rsp),%xmm3 - movdqa %xmm3,0x10(%rsp) - # x2[0-3] += s0[2] - # x3[0-3] += s0[3] - movq 0x08(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd 0x20(%rsp),%xmm2 - movdqa %xmm2,0x20(%rsp) - paddd 0x30(%rsp),%xmm3 - movdqa %xmm3,0x30(%rsp) - - # x4[0-3] += s1[0] - # x5[0-3] += s1[1] - movq 0x10(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm4 - paddd %xmm3,%xmm5 - # x6[0-3] += s1[2] - # x7[0-3] += s1[3] - movq 0x18(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm6 - paddd %xmm3,%xmm7 - - # x8[0-3] += s2[0] - # x9[0-3] += s2[1] - movq 0x20(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm8 - paddd %xmm3,%xmm9 - # x10[0-3] += s2[2] - # x11[0-3] += s2[3] - movq 0x28(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm10 - paddd %xmm3,%xmm11 - - # x12[0-3] += s3[0] - # x13[0-3] += s3[1] - movq 0x30(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm12 - paddd %xmm3,%xmm13 - # x14[0-3] += s3[2] - # x15[0-3] += s3[3] - movq 0x38(%rdi),%xmm3 - pshufd $0x00,%xmm3,%xmm2 - pshufd $0x55,%xmm3,%xmm3 - paddd %xmm2,%xmm14 - paddd %xmm3,%xmm15 - - # x12 += counter values 0-3 - paddd %xmm1,%xmm12 - - # interleave 32-bit words in state n, n+1 - movdqa 0x00(%rsp),%xmm0 - movdqa 0x10(%rsp),%xmm1 - movdqa %xmm0,%xmm2 - punpckldq %xmm1,%xmm2 - punpckhdq %xmm1,%xmm0 - movdqa %xmm2,0x00(%rsp) - movdqa %xmm0,0x10(%rsp) - movdqa 0x20(%rsp),%xmm0 - movdqa 0x30(%rsp),%xmm1 - movdqa %xmm0,%xmm2 - punpckldq %xmm1,%xmm2 - punpckhdq %xmm1,%xmm0 - movdqa %xmm2,0x20(%rsp) - movdqa %xmm0,0x30(%rsp) - movdqa %xmm4,%xmm0 - punpckldq %xmm5,%xmm4 - punpckhdq %xmm5,%xmm0 - movdqa %xmm0,%xmm5 - movdqa %xmm6,%xmm0 - punpckldq %xmm7,%xmm6 - punpckhdq %xmm7,%xmm0 - movdqa %xmm0,%xmm7 - movdqa %xmm8,%xmm0 - punpckldq %xmm9,%xmm8 - punpckhdq %xmm9,%xmm0 - movdqa %xmm0,%xmm9 - movdqa %xmm10,%xmm0 - punpckldq %xmm11,%xmm10 - punpckhdq %xmm11,%xmm0 - movdqa %xmm0,%xmm11 - movdqa %xmm12,%xmm0 - punpckldq %xmm13,%xmm12 - punpckhdq %xmm13,%xmm0 - movdqa %xmm0,%xmm13 - movdqa %xmm14,%xmm0 - punpckldq %xmm15,%xmm14 - punpckhdq %xmm15,%xmm0 - movdqa %xmm0,%xmm15 - - # interleave 64-bit words in state n, n+2 - movdqa 0x00(%rsp),%xmm0 - movdqa 0x20(%rsp),%xmm1 - movdqa %xmm0,%xmm2 - punpcklqdq %xmm1,%xmm2 - punpckhqdq %xmm1,%xmm0 - movdqa %xmm2,0x00(%rsp) - movdqa %xmm0,0x20(%rsp) - movdqa 0x10(%rsp),%xmm0 - movdqa 0x30(%rsp),%xmm1 - movdqa %xmm0,%xmm2 - punpcklqdq %xmm1,%xmm2 - punpckhqdq %xmm1,%xmm0 - movdqa %xmm2,0x10(%rsp) - movdqa %xmm0,0x30(%rsp) - movdqa %xmm4,%xmm0 - punpcklqdq %xmm6,%xmm4 - punpckhqdq %xmm6,%xmm0 - movdqa %xmm0,%xmm6 - movdqa %xmm5,%xmm0 - punpcklqdq %xmm7,%xmm5 - punpckhqdq %xmm7,%xmm0 - movdqa %xmm0,%xmm7 - movdqa %xmm8,%xmm0 - punpcklqdq %xmm10,%xmm8 - punpckhqdq %xmm10,%xmm0 - movdqa %xmm0,%xmm10 - movdqa %xmm9,%xmm0 - punpcklqdq %xmm11,%xmm9 - punpckhqdq %xmm11,%xmm0 - movdqa %xmm0,%xmm11 - movdqa %xmm12,%xmm0 - punpcklqdq %xmm14,%xmm12 - punpckhqdq %xmm14,%xmm0 - movdqa %xmm0,%xmm14 - movdqa %xmm13,%xmm0 - punpcklqdq %xmm15,%xmm13 - punpckhqdq %xmm15,%xmm0 - movdqa %xmm0,%xmm15 - - # xor with corresponding input, write to output - movdqa 0x00(%rsp),%xmm0 - movdqu 0x00(%rdx),%xmm1 - pxor %xmm1,%xmm0 - movdqu %xmm0,0x00(%rsi) - movdqa 0x10(%rsp),%xmm0 - movdqu 0x80(%rdx),%xmm1 - pxor %xmm1,%xmm0 - movdqu %xmm0,0x80(%rsi) - movdqa 0x20(%rsp),%xmm0 - movdqu 0x40(%rdx),%xmm1 - pxor %xmm1,%xmm0 - movdqu %xmm0,0x40(%rsi) - movdqa 0x30(%rsp),%xmm0 - movdqu 0xc0(%rdx),%xmm1 - pxor %xmm1,%xmm0 - movdqu %xmm0,0xc0(%rsi) - movdqu 0x10(%rdx),%xmm1 - pxor %xmm1,%xmm4 - movdqu %xmm4,0x10(%rsi) - movdqu 0x90(%rdx),%xmm1 - pxor %xmm1,%xmm5 - movdqu %xmm5,0x90(%rsi) - movdqu 0x50(%rdx),%xmm1 - pxor %xmm1,%xmm6 - movdqu %xmm6,0x50(%rsi) - movdqu 0xd0(%rdx),%xmm1 - pxor %xmm1,%xmm7 - movdqu %xmm7,0xd0(%rsi) - movdqu 0x20(%rdx),%xmm1 - pxor %xmm1,%xmm8 - movdqu %xmm8,0x20(%rsi) - movdqu 0xa0(%rdx),%xmm1 - pxor %xmm1,%xmm9 - movdqu %xmm9,0xa0(%rsi) - movdqu 0x60(%rdx),%xmm1 - pxor %xmm1,%xmm10 - movdqu %xmm10,0x60(%rsi) - movdqu 0xe0(%rdx),%xmm1 - pxor %xmm1,%xmm11 - movdqu %xmm11,0xe0(%rsi) - movdqu 0x30(%rdx),%xmm1 - pxor %xmm1,%xmm12 - movdqu %xmm12,0x30(%rsi) - movdqu 0xb0(%rdx),%xmm1 - pxor %xmm1,%xmm13 - movdqu %xmm13,0xb0(%rsi) - movdqu 0x70(%rdx),%xmm1 - pxor %xmm1,%xmm14 - movdqu %xmm14,0x70(%rsi) - movdqu 0xf0(%rdx),%xmm1 - pxor %xmm1,%xmm15 - movdqu %xmm15,0xf0(%rsi) - - lea -8(%r10),%rsp - ret -ENDPROC(chacha20_asm_4block_xor_ssse3) - -ENTRY(hchacha20_asm_ssse3) - # %rdi: 32 byte output key, o - # %rsi: 16 byte nonce, n - # %rdx: 32 byte input key, i - - # x0 = constant - movdqa CONST(%rip),%xmm0 - # x1, x2 = i - movdqu 0x00(%rdx),%xmm1 - movdqu 0x10(%rdx),%xmm2 - # x3 = n - movdqu 0x00(%rsi),%xmm3 - - movdqa %xmm0,%xmm8 - movdqa %xmm1,%xmm9 - movdqa %xmm2,%xmm10 - movdqa %xmm3,%xmm11 - movdqa ROT8(%rip),%xmm4 - movdqa ROT16(%rip),%xmm5 - - mov $10,%ecx - -.Lhdoubleround: - - # x0 += x1, x3 = rotl32(x3 ^ x0, 16) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm5,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 12) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm6 - pslld $12,%xmm6 - psrld $20,%xmm1 - por %xmm6,%xmm1 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 8) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm4,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 7) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm7 - pslld $7,%xmm7 - psrld $25,%xmm1 - por %xmm7,%xmm1 - - # x1 = shuffle32(x1, MASK(0, 3, 2, 1)) - pshufd $0x39,%xmm1,%xmm1 - # x2 = shuffle32(x2, MASK(1, 0, 3, 2)) - pshufd $0x4e,%xmm2,%xmm2 - # x3 = shuffle32(x3, MASK(2, 1, 0, 3)) - pshufd $0x93,%xmm3,%xmm3 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 16) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm5,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 12) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm6 - pslld $12,%xmm6 - psrld $20,%xmm1 - por %xmm6,%xmm1 - - # x0 += x1, x3 = rotl32(x3 ^ x0, 8) - paddd %xmm1,%xmm0 - pxor %xmm0,%xmm3 - pshufb %xmm4,%xmm3 - - # x2 += x3, x1 = rotl32(x1 ^ x2, 7) - paddd %xmm3,%xmm2 - pxor %xmm2,%xmm1 - movdqa %xmm1,%xmm7 - pslld $7,%xmm7 - psrld $25,%xmm1 - por %xmm7,%xmm1 - - # x1 = shuffle32(x1, MASK(2, 1, 0, 3)) - pshufd $0x93,%xmm1,%xmm1 - # x2 = shuffle32(x2, MASK(1, 0, 3, 2)) - pshufd $0x4e,%xmm2,%xmm2 - # x3 = shuffle32(x3, MASK(0, 3, 2, 1)) - pshufd $0x39,%xmm3,%xmm3 - - dec %ecx - jnz .Lhdoubleround - - # o0 = x0 - movdqu %xmm0,0x00(%rdi) - # o1 = x3 - movdqu %xmm3,0x10(%rdi) - ret -ENDPROC(hchacha20_asm_ssse3) diff --git a/src/crypto/chacha20-x86_64.S b/src/crypto/chacha20-x86_64.S new file mode 100644 index 0000000..64cbc83 --- /dev/null +++ b/src/crypto/chacha20-x86_64.S @@ -0,0 +1,2084 @@ +/* Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * - Redistributions of source code must retain copyright notices, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * - Neither the name of the CRYPTOGAMS nor the names of its + * copyright holder and contributors may be used to endorse or + * promote products derived from this software without specific + * prior written permission. + * ALTERNATIVELY, provided that this notice is retained in full, this + * product may be distributed under the terms of the GNU General Public + * License (GPL), in which case the provisions of the GPL apply INSTEAD OF + * those given above. + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include + +.section .rodata.cst16.Lzero, "aM", @progbits, 16 +.align 16 +.Lzero: +.long 0,0,0,0 +.section .rodata.cst16.Lone, "aM", @progbits, 16 +.align 16 +.Lone: +.long 1,0,0,0 +.section .rodata.cst16.Linc, "aM", @progbits, 16 +.align 16 +.Linc: +.long 0,1,2,3 +.section .rodata.cst16.Lfour, "aM", @progbits, 16 +.align 16 +.Lfour: +.long 4,4,4,4 +.section .rodata.cst32.Lincy, "aM", @progbits, 32 +.align 32 +.Lincy: +.long 0,2,4,6,1,3,5,7 +.section .rodata.cst32.Leight, "aM", @progbits, 32 +.align 32 +.Leight: +.long 8,8,8,8,8,8,8,8 +.section .rodata.cst16.Lrot16, "aM", @progbits, 16 +.align 16 +.Lrot16: +.byte 0x2,0x3,0x0,0x1, 0x6,0x7,0x4,0x5, 0xa,0xb,0x8,0x9, 0xe,0xf,0xc,0xd +.section .rodata.cst16.Lrot24, "aM", @progbits, 16 +.align 16 +.Lrot24: +.byte 0x3,0x0,0x1,0x2, 0x7,0x4,0x5,0x6, 0xb,0x8,0x9,0xa, 0xf,0xc,0xd,0xe +.section .rodata.cst16.Lsigma, "aM", @progbits, 16 +.align 16 +.Lsigma: +.byte 101,120,112,97,110,100,32,51,50,45,98,121,116,101,32,107,0 +.section .rodata.cst64.Lzeroz, "aM", @progbits, 64 +.align 64 +.Lzeroz: +.long 0,0,0,0, 1,0,0,0, 2,0,0,0, 3,0,0,0 +.section .rodata.cst64.Lfourz, "aM", @progbits, 64 +.align 64 +.Lfourz: +.long 4,0,0,0, 4,0,0,0, 4,0,0,0, 4,0,0,0 +.section .rodata.cst64.Lincz, "aM", @progbits, 64 +.align 64 +.Lincz: +.long 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 +.section .rodata.cst64.Lsixteen, "aM", @progbits, 64 +.align 64 +.Lsixteen: +.long 16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16 + +.text + +#ifdef CONFIG_AS_SSSE3 +.align 32 +ENTRY(hchacha20_ssse3) + movdqa .Lsigma(%rip),%xmm0 + movdqu (%rdx),%xmm1 + movdqu 16(%rdx),%xmm2 + movdqu (%rsi),%xmm3 + movdqa .Lrot16(%rip),%xmm6 + movdqa .Lrot24(%rip),%xmm7 + movq $10,%r8 + .align 32 +.Loop_hssse3: + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm6,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $20,%xmm1 + pslld $12,%xmm4 + por %xmm4,%xmm1 + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm7,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $25,%xmm1 + pslld $7,%xmm4 + por %xmm4,%xmm1 + pshufd $78,%xmm2,%xmm2 + pshufd $57,%xmm1,%xmm1 + pshufd $147,%xmm3,%xmm3 + nop + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm6,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $20,%xmm1 + pslld $12,%xmm4 + por %xmm4,%xmm1 + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm7,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $25,%xmm1 + pslld $7,%xmm4 + por %xmm4,%xmm1 + pshufd $78,%xmm2,%xmm2 + pshufd $147,%xmm1,%xmm1 + pshufd $57,%xmm3,%xmm3 + decq %r8 + jnz .Loop_hssse3 + movdqu %xmm0,0(%rdi) + movdqu %xmm3,16(%rdi) + ret +ENDPROC(hchacha20_ssse3) + +.align 32 +ENTRY(chacha20_ssse3) +.Lchacha20_ssse3: + cmpq $0,%rdx + je .Lssse3_epilogue + leaq 8(%rsp),%r10 + + cmpq $128,%rdx + ja .Lchacha20_4x + +.Ldo_sse3_after_all: + subq $64+8,%rsp + andq $-32,%rsp + movdqa .Lsigma(%rip),%xmm0 + movdqu (%rcx),%xmm1 + movdqu 16(%rcx),%xmm2 + movdqu (%r8),%xmm3 + movdqa .Lrot16(%rip),%xmm6 + movdqa .Lrot24(%rip),%xmm7 + + movdqa %xmm0,0(%rsp) + movdqa %xmm1,16(%rsp) + movdqa %xmm2,32(%rsp) + movdqa %xmm3,48(%rsp) + movq $10,%r8 + jmp .Loop_ssse3 + +.align 32 +.Loop_outer_ssse3: + movdqa .Lone(%rip),%xmm3 + movdqa 0(%rsp),%xmm0 + movdqa 16(%rsp),%xmm1 + movdqa 32(%rsp),%xmm2 + paddd 48(%rsp),%xmm3 + movq $10,%r8 + movdqa %xmm3,48(%rsp) + jmp .Loop_ssse3 + +.align 32 +.Loop_ssse3: + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm6,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $20,%xmm1 + pslld $12,%xmm4 + por %xmm4,%xmm1 + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm7,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $25,%xmm1 + pslld $7,%xmm4 + por %xmm4,%xmm1 + pshufd $78,%xmm2,%xmm2 + pshufd $57,%xmm1,%xmm1 + pshufd $147,%xmm3,%xmm3 + nop + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm6,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $20,%xmm1 + pslld $12,%xmm4 + por %xmm4,%xmm1 + paddd %xmm1,%xmm0 + pxor %xmm0,%xmm3 + pshufb %xmm7,%xmm3 + paddd %xmm3,%xmm2 + pxor %xmm2,%xmm1 + movdqa %xmm1,%xmm4 + psrld $25,%xmm1 + pslld $7,%xmm4 + por %xmm4,%xmm1 + pshufd $78,%xmm2,%xmm2 + pshufd $147,%xmm1,%xmm1 + pshufd $57,%xmm3,%xmm3 + decq %r8 + jnz .Loop_ssse3 + paddd 0(%rsp),%xmm0 + paddd 16(%rsp),%xmm1 + paddd 32(%rsp),%xmm2 + paddd 48(%rsp),%xmm3 + + cmpq $64,%rdx + jb .Ltail_ssse3 + + movdqu 0(%rsi),%xmm4 + movdqu 16(%rsi),%xmm5 + pxor %xmm4,%xmm0 + movdqu 32(%rsi),%xmm4 + pxor %xmm5,%xmm1 + movdqu 48(%rsi),%xmm5 + leaq 64(%rsi),%rsi + pxor %xmm4,%xmm2 + pxor %xmm5,%xmm3 + + movdqu %xmm0,0(%rdi) + movdqu %xmm1,16(%rdi) + movdqu %xmm2,32(%rdi) + movdqu %xmm3,48(%rdi) + leaq 64(%rdi),%rdi + + subq $64,%rdx + jnz .Loop_outer_ssse3 + + jmp .Ldone_ssse3 + +.align 16 +.Ltail_ssse3: + movdqa %xmm0,0(%rsp) + movdqa %xmm1,16(%rsp) + movdqa %xmm2,32(%rsp) + movdqa %xmm3,48(%rsp) + xorq %r8,%r8 + +.Loop_tail_ssse3: + movzbl (%rsi,%r8,1),%eax + movzbl (%rsp,%r8,1),%ecx + leaq 1(%r8),%r8 + xorl %ecx,%eax + movb %al,-1(%rdi,%r8,1) + decq %rdx + jnz .Loop_tail_ssse3 + +.Ldone_ssse3: + leaq -8(%r10),%rsp + +.Lssse3_epilogue: + ret + +.align 32 +.Lchacha20_4x: + leaq 8(%rsp),%r10 + +.Lproceed4x: + subq $0x140+8,%rsp + andq $-32,%rsp + movdqa .Lsigma(%rip),%xmm11 + movdqu (%rcx),%xmm15 + movdqu 16(%rcx),%xmm7 + movdqu (%r8),%xmm3 + leaq 256(%rsp),%rcx + leaq .Lrot16(%rip),%r9 + leaq .Lrot24(%rip),%r11 + + pshufd $0x00,%xmm11,%xmm8 + pshufd $0x55,%xmm11,%xmm9 + movdqa %xmm8,64(%rsp) + pshufd $0xaa,%xmm11,%xmm10 + movdqa %xmm9,80(%rsp) + pshufd $0xff,%xmm11,%xmm11 + movdqa %xmm10,96(%rsp) + movdqa %xmm11,112(%rsp) + + pshufd $0x00,%xmm15,%xmm12 + pshufd $0x55,%xmm15,%xmm13 + movdqa %xmm12,128-256(%rcx) + pshufd $0xaa,%xmm15,%xmm14 + movdqa %xmm13,144-256(%rcx) + pshufd $0xff,%xmm15,%xmm15 + movdqa %xmm14,160-256(%rcx) + movdqa %xmm15,176-256(%rcx) + + pshufd $0x00,%xmm7,%xmm4 + pshufd $0x55,%xmm7,%xmm5 + movdqa %xmm4,192-256(%rcx) + pshufd $0xaa,%xmm7,%xmm6 + movdqa %xmm5,208-256(%rcx) + pshufd $0xff,%xmm7,%xmm7 + movdqa %xmm6,224-256(%rcx) + movdqa %xmm7,240-256(%rcx) + + pshufd $0x00,%xmm3,%xmm0 + pshufd $0x55,%xmm3,%xmm1 + paddd .Linc(%rip),%xmm0 + pshufd $0xaa,%xmm3,%xmm2 + movdqa %xmm1,272-256(%rcx) + pshufd $0xff,%xmm3,%xmm3 + movdqa %xmm2,288-256(%rcx) + movdqa %xmm3,304-256(%rcx) + + jmp .Loop_enter4x + +.align 32 +.Loop_outer4x: + movdqa 64(%rsp),%xmm8 + movdqa 80(%rsp),%xmm9 + movdqa 96(%rsp),%xmm10 + movdqa 112(%rsp),%xmm11 + movdqa 128-256(%rcx),%xmm12 + movdqa 144-256(%rcx),%xmm13 + movdqa 160-256(%rcx),%xmm14 + movdqa 176-256(%rcx),%xmm15 + movdqa 192-256(%rcx),%xmm4 + movdqa 208-256(%rcx),%xmm5 + movdqa 224-256(%rcx),%xmm6 + movdqa 240-256(%rcx),%xmm7 + movdqa 256-256(%rcx),%xmm0 + movdqa 272-256(%rcx),%xmm1 + movdqa 288-256(%rcx),%xmm2 + movdqa 304-256(%rcx),%xmm3 + paddd .Lfour(%rip),%xmm0 + +.Loop_enter4x: + movdqa %xmm6,32(%rsp) + movdqa %xmm7,48(%rsp) + movdqa (%r9),%xmm7 + movl $10,%eax + movdqa %xmm0,256-256(%rcx) + jmp .Loop4x + +.align 32 +.Loop4x: + paddd %xmm12,%xmm8 + paddd %xmm13,%xmm9 + pxor %xmm8,%xmm0 + pxor %xmm9,%xmm1 + pshufb %xmm7,%xmm0 + pshufb %xmm7,%xmm1 + paddd %xmm0,%xmm4 + paddd %xmm1,%xmm5 + pxor %xmm4,%xmm12 + pxor %xmm5,%xmm13 + movdqa %xmm12,%xmm6 + pslld $12,%xmm12 + psrld $20,%xmm6 + movdqa %xmm13,%xmm7 + pslld $12,%xmm13 + por %xmm6,%xmm12 + psrld $20,%xmm7 + movdqa (%r11),%xmm6 + por %xmm7,%xmm13 + paddd %xmm12,%xmm8 + paddd %xmm13,%xmm9 + pxor %xmm8,%xmm0 + pxor %xmm9,%xmm1 + pshufb %xmm6,%xmm0 + pshufb %xmm6,%xmm1 + paddd %xmm0,%xmm4 + paddd %xmm1,%xmm5 + pxor %xmm4,%xmm12 + pxor %xmm5,%xmm13 + movdqa %xmm12,%xmm7 + pslld $7,%xmm12 + psrld $25,%xmm7 + movdqa %xmm13,%xmm6 + pslld $7,%xmm13 + por %xmm7,%xmm12 + psrld $25,%xmm6 + movdqa (%r9),%xmm7 + por %xmm6,%xmm13 + movdqa %xmm4,0(%rsp) + movdqa %xmm5,16(%rsp) + movdqa 32(%rsp),%xmm4 + movdqa 48(%rsp),%xmm5 + paddd %xmm14,%xmm10 + paddd %xmm15,%xmm11 + pxor %xmm10,%xmm2 + pxor %xmm11,%xmm3 + pshufb %xmm7,%xmm2 + pshufb %xmm7,%xmm3 + paddd %xmm2,%xmm4 + paddd %xmm3,%xmm5 + pxor %xmm4,%xmm14 + pxor %xmm5,%xmm15 + movdqa %xmm14,%xmm6 + pslld $12,%xmm14 + psrld $20,%xmm6 + movdqa %xmm15,%xmm7 + pslld $12,%xmm15 + por %xmm6,%xmm14 + psrld $20,%xmm7 + movdqa (%r11),%xmm6 + por %xmm7,%xmm15 + paddd %xmm14,%xmm10 + paddd %xmm15,%xmm11 + pxor %xmm10,%xmm2 + pxor %xmm11,%xmm3 + pshufb %xmm6,%xmm2 + pshufb %xmm6,%xmm3 + paddd %xmm2,%xmm4 + paddd %xmm3,%xmm5 + pxor %xmm4,%xmm14 + pxor %xmm5,%xmm15 + movdqa %xmm14,%xmm7 + pslld $7,%xmm14 + psrld $25,%xmm7 + movdqa %xmm15,%xmm6 + pslld $7,%xmm15 + por %xmm7,%xmm14 + psrld $25,%xmm6 + movdqa (%r9),%xmm7 + por %xmm6,%xmm15 + paddd %xmm13,%xmm8 + paddd %xmm14,%xmm9 + pxor %xmm8,%xmm3 + pxor %xmm9,%xmm0 + pshufb %xmm7,%xmm3 + pshufb %xmm7,%xmm0 + paddd %xmm3,%xmm4 + paddd %xmm0,%xmm5 + pxor %xmm4,%xmm13 + pxor %xmm5,%xmm14 + movdqa %xmm13,%xmm6 + pslld $12,%xmm13 + psrld $20,%xmm6 + movdqa %xmm14,%xmm7 + pslld $12,%xmm14 + por %xmm6,%xmm13 + psrld $20,%xmm7 + movdqa (%r11),%xmm6 + por %xmm7,%xmm14 + paddd %xmm13,%xmm8 + paddd %xmm14,%xmm9 + pxor %xmm8,%xmm3 + pxor %xmm9,%xmm0 + pshufb %xmm6,%xmm3 + pshufb %xmm6,%xmm0 + paddd %xmm3,%xmm4 + paddd %xmm0,%xmm5 + pxor %xmm4,%xmm13 + pxor %xmm5,%xmm14 + movdqa %xmm13,%xmm7 + pslld $7,%xmm13 + psrld $25,%xmm7 + movdqa %xmm14,%xmm6 + pslld $7,%xmm14 + por %xmm7,%xmm13 + psrld $25,%xmm6 + movdqa (%r9),%xmm7 + por %xmm6,%xmm14 + movdqa %xmm4,32(%rsp) + movdqa %xmm5,48(%rsp) + movdqa 0(%rsp),%xmm4 + movdqa 16(%rsp),%xmm5 + paddd %xmm15,%xmm10 + paddd %xmm12,%xmm11 + pxor %xmm10,%xmm1 + pxor %xmm11,%xmm2 + pshufb %xmm7,%xmm1 + pshufb %xmm7,%xmm2 + paddd %xmm1,%xmm4 + paddd %xmm2,%xmm5 + pxor %xmm4,%xmm15 + pxor %xmm5,%xmm12 + movdqa %xmm15,%xmm6 + pslld $12,%xmm15 + psrld $20,%xmm6 + movdqa %xmm12,%xmm7 + pslld $12,%xmm12 + por %xmm6,%xmm15 + psrld $20,%xmm7 + movdqa (%r11),%xmm6 + por %xmm7,%xmm12 + paddd %xmm15,%xmm10 + paddd %xmm12,%xmm11 + pxor %xmm10,%xmm1 + pxor %xmm11,%xmm2 + pshufb %xmm6,%xmm1 + pshufb %xmm6,%xmm2 + paddd %xmm1,%xmm4 + paddd %xmm2,%xmm5 + pxor %xmm4,%xmm15 + pxor %xmm5,%xmm12 + movdqa %xmm15,%xmm7 + pslld $7,%xmm15 + psrld $25,%xmm7 + movdqa %xmm12,%xmm6 + pslld $7,%xmm12 + por %xmm7,%xmm15 + psrld $25,%xmm6 + movdqa (%r9),%xmm7 + por %xmm6,%xmm12 + decl %eax + jnz .Loop4x + + paddd 64(%rsp),%xmm8 + paddd 80(%rsp),%xmm9 + paddd 96(%rsp),%xmm10 + paddd 112(%rsp),%xmm11 + + movdqa %xmm8,%xmm6 + punpckldq %xmm9,%xmm8 + movdqa %xmm10,%xmm7 + punpckldq %xmm11,%xmm10 + punpckhdq %xmm9,%xmm6 + punpckhdq %xmm11,%xmm7 + movdqa %xmm8,%xmm9 + punpcklqdq %xmm10,%xmm8 + movdqa %xmm6,%xmm11 + punpcklqdq %xmm7,%xmm6 + punpckhqdq %xmm10,%xmm9 + punpckhqdq %xmm7,%xmm11 + paddd 128-256(%rcx),%xmm12 + paddd 144-256(%rcx),%xmm13 + paddd 160-256(%rcx),%xmm14 + paddd 176-256(%rcx),%xmm15 + + movdqa %xmm8,0(%rsp) + movdqa %xmm9,16(%rsp) + movdqa 32(%rsp),%xmm8 + movdqa 48(%rsp),%xmm9 + + movdqa %xmm12,%xmm10 + punpckldq %xmm13,%xmm12 + movdqa %xmm14,%xmm7 + punpckldq %xmm15,%xmm14 + punpckhdq %xmm13,%xmm10 + punpckhdq %xmm15,%xmm7 + movdqa %xmm12,%xmm13 + punpcklqdq %xmm14,%xmm12 + movdqa %xmm10,%xmm15 + punpcklqdq %xmm7,%xmm10 + punpckhqdq %xmm14,%xmm13 + punpckhqdq %xmm7,%xmm15 + paddd 192-256(%rcx),%xmm4 + paddd 208-256(%rcx),%xmm5 + paddd 224-256(%rcx),%xmm8 + paddd 240-256(%rcx),%xmm9 + + movdqa %xmm6,32(%rsp) + movdqa %xmm11,48(%rsp) + + movdqa %xmm4,%xmm14 + punpckldq %xmm5,%xmm4 + movdqa %xmm8,%xmm7 + punpckldq %xmm9,%xmm8 + punpckhdq %xmm5,%xmm14 + punpckhdq %xmm9,%xmm7 + movdqa %xmm4,%xmm5 + punpcklqdq %xmm8,%xmm4 + movdqa %xmm14,%xmm9 + punpcklqdq %xmm7,%xmm14 + punpckhqdq %xmm8,%xmm5 + punpckhqdq %xmm7,%xmm9 + paddd 256-256(%rcx),%xmm0 + paddd 272-256(%rcx),%xmm1 + paddd 288-256(%rcx),%xmm2 + paddd 304-256(%rcx),%xmm3 + + movdqa %xmm0,%xmm8 + punpckldq %xmm1,%xmm0 + movdqa %xmm2,%xmm7 + punpckldq %xmm3,%xmm2 + punpckhdq %xmm1,%xmm8 + punpckhdq %xmm3,%xmm7 + movdqa %xmm0,%xmm1 + punpcklqdq %xmm2,%xmm0 + movdqa %xmm8,%xmm3 + punpcklqdq %xmm7,%xmm8 + punpckhqdq %xmm2,%xmm1 + punpckhqdq %xmm7,%xmm3 + cmpq $256,%rdx + jb .Ltail4x + + movdqu 0(%rsi),%xmm6 + movdqu 16(%rsi),%xmm11 + movdqu 32(%rsi),%xmm2 + movdqu 48(%rsi),%xmm7 + pxor 0(%rsp),%xmm6 + pxor %xmm12,%xmm11 + pxor %xmm4,%xmm2 + pxor %xmm0,%xmm7 + + movdqu %xmm6,0(%rdi) + movdqu 64(%rsi),%xmm6 + movdqu %xmm11,16(%rdi) + movdqu 80(%rsi),%xmm11 + movdqu %xmm2,32(%rdi) + movdqu 96(%rsi),%xmm2 + movdqu %xmm7,48(%rdi) + movdqu 112(%rsi),%xmm7 + leaq 128(%rsi),%rsi + pxor 16(%rsp),%xmm6 + pxor %xmm13,%xmm11 + pxor %xmm5,%xmm2 + pxor %xmm1,%xmm7 + + movdqu %xmm6,64(%rdi) + movdqu 0(%rsi),%xmm6 + movdqu %xmm11,80(%rdi) + movdqu 16(%rsi),%xmm11 + movdqu %xmm2,96(%rdi) + movdqu 32(%rsi),%xmm2 + movdqu %xmm7,112(%rdi) + leaq 128(%rdi),%rdi + movdqu 48(%rsi),%xmm7 + pxor 32(%rsp),%xmm6 + pxor %xmm10,%xmm11 + pxor %xmm14,%xmm2 + pxor %xmm8,%xmm7 + + movdqu %xmm6,0(%rdi) + movdqu 64(%rsi),%xmm6 + movdqu %xmm11,16(%rdi) + movdqu 80(%rsi),%xmm11 + movdqu %xmm2,32(%rdi) + movdqu 96(%rsi),%xmm2 + movdqu %xmm7,48(%rdi) + movdqu 112(%rsi),%xmm7 + leaq 128(%rsi),%rsi + pxor 48(%rsp),%xmm6 + pxor %xmm15,%xmm11 + pxor %xmm9,%xmm2 + pxor %xmm3,%xmm7 + movdqu %xmm6,64(%rdi) + movdqu %xmm11,80(%rdi) + movdqu %xmm2,96(%rdi) + movdqu %xmm7,112(%rdi) + leaq 128(%rdi),%rdi + + subq $256,%rdx + jnz .Loop_outer4x + + jmp .Ldone4x + +.Ltail4x: + cmpq $192,%rdx + jae .L192_or_more4x + cmpq $128,%rdx + jae .L128_or_more4x + cmpq $64,%rdx + jae .L64_or_more4x + + + xorq %r9,%r9 + + movdqa %xmm12,16(%rsp) + movdqa %xmm4,32(%rsp) + movdqa %xmm0,48(%rsp) + jmp .Loop_tail4x + +.align 32 +.L64_or_more4x: + movdqu 0(%rsi),%xmm6 + movdqu 16(%rsi),%xmm11 + movdqu 32(%rsi),%xmm2 + movdqu 48(%rsi),%xmm7 + pxor 0(%rsp),%xmm6 + pxor %xmm12,%xmm11 + pxor %xmm4,%xmm2 + pxor %xmm0,%xmm7 + movdqu %xmm6,0(%rdi) + movdqu %xmm11,16(%rdi) + movdqu %xmm2,32(%rdi) + movdqu %xmm7,48(%rdi) + je .Ldone4x + + movdqa 16(%rsp),%xmm6 + leaq 64(%rsi),%rsi + xorq %r9,%r9 + movdqa %xmm6,0(%rsp) + movdqa %xmm13,16(%rsp) + leaq 64(%rdi),%rdi + movdqa %xmm5,32(%rsp) + subq $64,%rdx + movdqa %xmm1,48(%rsp) + jmp .Loop_tail4x + +.align 32 +.L128_or_more4x: + movdqu 0(%rsi),%xmm6 + movdqu 16(%rsi),%xmm11 + movdqu 32(%rsi),%xmm2 + movdqu 48(%rsi),%xmm7 + pxor 0(%rsp),%xmm6 + pxor %xmm12,%xmm11 + pxor %xmm4,%xmm2 + pxor %xmm0,%xmm7 + + movdqu %xmm6,0(%rdi) + movdqu 64(%rsi),%xmm6 + movdqu %xmm11,16(%rdi) + movdqu 80(%rsi),%xmm11 + movdqu %xmm2,32(%rdi) + movdqu 96(%rsi),%xmm2 + movdqu %xmm7,48(%rdi) + movdqu 112(%rsi),%xmm7 + pxor 16(%rsp),%xmm6 + pxor %xmm13,%xmm11 + pxor %xmm5,%xmm2 + pxor %xmm1,%xmm7 + movdqu %xmm6,64(%rdi) + movdqu %xmm11,80(%rdi) + movdqu %xmm2,96(%rdi) + movdqu %xmm7,112(%rdi) + je .Ldone4x + + movdqa 32(%rsp),%xmm6 + leaq 128(%rsi),%rsi + xorq %r9,%r9 + movdqa %xmm6,0(%rsp) + movdqa %xmm10,16(%rsp) + leaq 128(%rdi),%rdi + movdqa %xmm14,32(%rsp) + subq $128,%rdx + movdqa %xmm8,48(%rsp) + jmp .Loop_tail4x + +.align 32 +.L192_or_more4x: + movdqu 0(%rsi),%xmm6 + movdqu 16(%rsi),%xmm11 + movdqu 32(%rsi),%xmm2 + movdqu 48(%rsi),%xmm7 + pxor 0(%rsp),%xmm6 + pxor %xmm12,%xmm11 + pxor %xmm4,%xmm2 + pxor %xmm0,%xmm7 + + movdqu %xmm6,0(%rdi) + movdqu 64(%rsi),%xmm6 + movdqu %xmm11,16(%rdi) + movdqu 80(%rsi),%xmm11 + movdqu %xmm2,32(%rdi) + movdqu 96(%rsi),%xmm2 + movdqu %xmm7,48(%rdi) + movdqu 112(%rsi),%xmm7 + leaq 128(%rsi),%rsi + pxor 16(%rsp),%xmm6 + pxor %xmm13,%xmm11 + pxor %xmm5,%xmm2 + pxor %xmm1,%xmm7 + + movdqu %xmm6,64(%rdi) + movdqu 0(%rsi),%xmm6 + movdqu %xmm11,80(%rdi) + movdqu 16(%rsi),%xmm11 + movdqu %xmm2,96(%rdi) + movdqu 32(%rsi),%xmm2 + movdqu %xmm7,112(%rdi) + leaq 128(%rdi),%rdi + movdqu 48(%rsi),%xmm7 + pxor 32(%rsp),%xmm6 + pxor %xmm10,%xmm11 + pxor %xmm14,%xmm2 + pxor %xmm8,%xmm7 + movdqu %xmm6,0(%rdi) + movdqu %xmm11,16(%rdi) + movdqu %xmm2,32(%rdi) + movdqu %xmm7,48(%rdi) + je .Ldone4x + + movdqa 48(%rsp),%xmm6 + leaq 64(%rsi),%rsi + xorq %r9,%r9 + movdqa %xmm6,0(%rsp) + movdqa %xmm15,16(%rsp) + leaq 64(%rdi),%rdi + movdqa %xmm9,32(%rsp) + subq $192,%rdx + movdqa %xmm3,48(%rsp) + +.Loop_tail4x: + movzbl (%rsi,%r9,1),%eax + movzbl (%rsp,%r9,1),%ecx + leaq 1(%r9),%r9 + xorl %ecx,%eax + movb %al,-1(%rdi,%r9,1) + decq %rdx + jnz .Loop_tail4x + +.Ldone4x: + leaq -8(%r10),%rsp + +.L4x_epilogue: + ret +ENDPROC(chacha20_ssse3) +#endif /* CONFIG_AS_SSSE3 */ + +#ifdef CONFIG_AS_AVX2 +.align 32 +ENTRY(chacha20_avx2) +.Lchacha20_avx2: + cmpq $0,%rdx + je .L8x_epilogue + leaq 8(%rsp),%r10 + + subq $0x280+8,%rsp + andq $-32,%rsp + vzeroupper + + vbroadcasti128 .Lsigma(%rip),%ymm11 + vbroadcasti128 (%rcx),%ymm3 + vbroadcasti128 16(%rcx),%ymm15 + vbroadcasti128 (%r8),%ymm7 + leaq 256(%rsp),%rcx + leaq 512(%rsp),%rax + leaq .Lrot16(%rip),%r9 + leaq .Lrot24(%rip),%r11 + + vpshufd $0x00,%ymm11,%ymm8 + vpshufd $0x55,%ymm11,%ymm9 + vmovdqa %ymm8,128-256(%rcx) + vpshufd $0xaa,%ymm11,%ymm10 + vmovdqa %ymm9,160-256(%rcx) + vpshufd $0xff,%ymm11,%ymm11 + vmovdqa %ymm10,192-256(%rcx) + vmovdqa %ymm11,224-256(%rcx) + + vpshufd $0x00,%ymm3,%ymm0 + vpshufd $0x55,%ymm3,%ymm1 + vmovdqa %ymm0,256-256(%rcx) + vpshufd $0xaa,%ymm3,%ymm2 + vmovdqa %ymm1,288-256(%rcx) + vpshufd $0xff,%ymm3,%ymm3 + vmovdqa %ymm2,320-256(%rcx) + vmovdqa %ymm3,352-256(%rcx) + + vpshufd $0x00,%ymm15,%ymm12 + vpshufd $0x55,%ymm15,%ymm13 + vmovdqa %ymm12,384-512(%rax) + vpshufd $0xaa,%ymm15,%ymm14 + vmovdqa %ymm13,416-512(%rax) + vpshufd $0xff,%ymm15,%ymm15 + vmovdqa %ymm14,448-512(%rax) + vmovdqa %ymm15,480-512(%rax) + + vpshufd $0x00,%ymm7,%ymm4 + vpshufd $0x55,%ymm7,%ymm5 + vpaddd .Lincy(%rip),%ymm4,%ymm4 + vpshufd $0xaa,%ymm7,%ymm6 + vmovdqa %ymm5,544-512(%rax) + vpshufd $0xff,%ymm7,%ymm7 + vmovdqa %ymm6,576-512(%rax) + vmovdqa %ymm7,608-512(%rax) + + jmp .Loop_enter8x + +.align 32 +.Loop_outer8x: + vmovdqa 128-256(%rcx),%ymm8 + vmovdqa 160-256(%rcx),%ymm9 + vmovdqa 192-256(%rcx),%ymm10 + vmovdqa 224-256(%rcx),%ymm11 + vmovdqa 256-256(%rcx),%ymm0 + vmovdqa 288-256(%rcx),%ymm1 + vmovdqa 320-256(%rcx),%ymm2 + vmovdqa 352-256(%rcx),%ymm3 + vmovdqa 384-512(%rax),%ymm12 + vmovdqa 416-512(%rax),%ymm13 + vmovdqa 448-512(%rax),%ymm14 + vmovdqa 480-512(%rax),%ymm15 + vmovdqa 512-512(%rax),%ymm4 + vmovdqa 544-512(%rax),%ymm5 + vmovdqa 576-512(%rax),%ymm6 + vmovdqa 608-512(%rax),%ymm7 + vpaddd .Leight(%rip),%ymm4,%ymm4 + +.Loop_enter8x: + vmovdqa %ymm14,64(%rsp) + vmovdqa %ymm15,96(%rsp) + vbroadcasti128 (%r9),%ymm15 + vmovdqa %ymm4,512-512(%rax) + movl $10,%eax + jmp .Loop8x + +.align 32 +.Loop8x: + vpaddd %ymm0,%ymm8,%ymm8 + vpxor %ymm4,%ymm8,%ymm4 + vpshufb %ymm15,%ymm4,%ymm4 + vpaddd %ymm1,%ymm9,%ymm9 + vpxor %ymm5,%ymm9,%ymm5 + vpshufb %ymm15,%ymm5,%ymm5 + vpaddd %ymm4,%ymm12,%ymm12 + vpxor %ymm0,%ymm12,%ymm0 + vpslld $12,%ymm0,%ymm14 + vpsrld $20,%ymm0,%ymm0 + vpor %ymm0,%ymm14,%ymm0 + vbroadcasti128 (%r11),%ymm14 + vpaddd %ymm5,%ymm13,%ymm13 + vpxor %ymm1,%ymm13,%ymm1 + vpslld $12,%ymm1,%ymm15 + vpsrld $20,%ymm1,%ymm1 + vpor %ymm1,%ymm15,%ymm1 + vpaddd %ymm0,%ymm8,%ymm8 + vpxor %ymm4,%ymm8,%ymm4 + vpshufb %ymm14,%ymm4,%ymm4 + vpaddd %ymm1,%ymm9,%ymm9 + vpxor %ymm5,%ymm9,%ymm5 + vpshufb %ymm14,%ymm5,%ymm5 + vpaddd %ymm4,%ymm12,%ymm12 + vpxor %ymm0,%ymm12,%ymm0 + vpslld $7,%ymm0,%ymm15 + vpsrld $25,%ymm0,%ymm0 + vpor %ymm0,%ymm15,%ymm0 + vbroadcasti128 (%r9),%ymm15 + vpaddd %ymm5,%ymm13,%ymm13 + vpxor %ymm1,%ymm13,%ymm1 + vpslld $7,%ymm1,%ymm14 + vpsrld $25,%ymm1,%ymm1 + vpor %ymm1,%ymm14,%ymm1 + vmovdqa %ymm12,0(%rsp) + vmovdqa %ymm13,32(%rsp) + vmovdqa 64(%rsp),%ymm12 + vmovdqa 96(%rsp),%ymm13 + vpaddd %ymm2,%ymm10,%ymm10 + vpxor %ymm6,%ymm10,%ymm6 + vpshufb %ymm15,%ymm6,%ymm6 + vpaddd %ymm3,%ymm11,%ymm11 + vpxor %ymm7,%ymm11,%ymm7 + vpshufb %ymm15,%ymm7,%ymm7 + vpaddd %ymm6,%ymm12,%ymm12 + vpxor %ymm2,%ymm12,%ymm2 + vpslld $12,%ymm2,%ymm14 + vpsrld $20,%ymm2,%ymm2 + vpor %ymm2,%ymm14,%ymm2 + vbroadcasti128 (%r11),%ymm14 + vpaddd %ymm7,%ymm13,%ymm13 + vpxor %ymm3,%ymm13,%ymm3 + vpslld $12,%ymm3,%ymm15 + vpsrld $20,%ymm3,%ymm3 + vpor %ymm3,%ymm15,%ymm3 + vpaddd %ymm2,%ymm10,%ymm10 + vpxor %ymm6,%ymm10,%ymm6 + vpshufb %ymm14,%ymm6,%ymm6 + vpaddd %ymm3,%ymm11,%ymm11 + vpxor %ymm7,%ymm11,%ymm7 + vpshufb %ymm14,%ymm7,%ymm7 + vpaddd %ymm6,%ymm12,%ymm12 + vpxor %ymm2,%ymm12,%ymm2 + vpslld $7,%ymm2,%ymm15 + vpsrld $25,%ymm2,%ymm2 + vpor %ymm2,%ymm15,%ymm2 + vbroadcasti128 (%r9),%ymm15 + vpaddd %ymm7,%ymm13,%ymm13 + vpxor %ymm3,%ymm13,%ymm3 + vpslld $7,%ymm3,%ymm14 + vpsrld $25,%ymm3,%ymm3 + vpor %ymm3,%ymm14,%ymm3 + vpaddd %ymm1,%ymm8,%ymm8 + vpxor %ymm7,%ymm8,%ymm7 + vpshufb %ymm15,%ymm7,%ymm7 + vpaddd %ymm2,%ymm9,%ymm9 + vpxor %ymm4,%ymm9,%ymm4 + vpshufb %ymm15,%ymm4,%ymm4 + vpaddd %ymm7,%ymm12,%ymm12 + vpxor %ymm1,%ymm12,%ymm1 + vpslld $12,%ymm1,%ymm14 + vpsrld $20,%ymm1,%ymm1 + vpor %ymm1,%ymm14,%ymm1 + vbroadcasti128 (%r11),%ymm14 + vpaddd %ymm4,%ymm13,%ymm13 + vpxor %ymm2,%ymm13,%ymm2 + vpslld $12,%ymm2,%ymm15 + vpsrld $20,%ymm2,%ymm2 + vpor %ymm2,%ymm15,%ymm2 + vpaddd %ymm1,%ymm8,%ymm8 + vpxor %ymm7,%ymm8,%ymm7 + vpshufb %ymm14,%ymm7,%ymm7 + vpaddd %ymm2,%ymm9,%ymm9 + vpxor %ymm4,%ymm9,%ymm4 + vpshufb %ymm14,%ymm4,%ymm4 + vpaddd %ymm7,%ymm12,%ymm12 + vpxor %ymm1,%ymm12,%ymm1 + vpslld $7,%ymm1,%ymm15 + vpsrld $25,%ymm1,%ymm1 + vpor %ymm1,%ymm15,%ymm1 + vbroadcasti128 (%r9),%ymm15 + vpaddd %ymm4,%ymm13,%ymm13 + vpxor %ymm2,%ymm13,%ymm2 + vpslld $7,%ymm2,%ymm14 + vpsrld $25,%ymm2,%ymm2 + vpor %ymm2,%ymm14,%ymm2 + vmovdqa %ymm12,64(%rsp) + vmovdqa %ymm13,96(%rsp) + vmovdqa 0(%rsp),%ymm12 + vmovdqa 32(%rsp),%ymm13 + vpaddd %ymm3,%ymm10,%ymm10 + vpxor %ymm5,%ymm10,%ymm5 + vpshufb %ymm15,%ymm5,%ymm5 + vpaddd %ymm0,%ymm11,%ymm11 + vpxor %ymm6,%ymm11,%ymm6 + vpshufb %ymm15,%ymm6,%ymm6 + vpaddd %ymm5,%ymm12,%ymm12 + vpxor %ymm3,%ymm12,%ymm3 + vpslld $12,%ymm3,%ymm14 + vpsrld $20,%ymm3,%ymm3 + vpor %ymm3,%ymm14,%ymm3 + vbroadcasti128 (%r11),%ymm14 + vpaddd %ymm6,%ymm13,%ymm13 + vpxor %ymm0,%ymm13,%ymm0 + vpslld $12,%ymm0,%ymm15 + vpsrld $20,%ymm0,%ymm0 + vpor %ymm0,%ymm15,%ymm0 + vpaddd %ymm3,%ymm10,%ymm10 + vpxor %ymm5,%ymm10,%ymm5 + vpshufb %ymm14,%ymm5,%ymm5 + vpaddd %ymm0,%ymm11,%ymm11 + vpxor %ymm6,%ymm11,%ymm6 + vpshufb %ymm14,%ymm6,%ymm6 + vpaddd %ymm5,%ymm12,%ymm12 + vpxor %ymm3,%ymm12,%ymm3 + vpslld $7,%ymm3,%ymm15 + vpsrld $25,%ymm3,%ymm3 + vpor %ymm3,%ymm15,%ymm3 + vbroadcasti128 (%r9),%ymm15 + vpaddd %ymm6,%ymm13,%ymm13 + vpxor %ymm0,%ymm13,%ymm0 + vpslld $7,%ymm0,%ymm14 + vpsrld $25,%ymm0,%ymm0 + vpor %ymm0,%ymm14,%ymm0 + decl %eax + jnz .Loop8x + + leaq 512(%rsp),%rax + vpaddd 128-256(%rcx),%ymm8,%ymm8 + vpaddd 160-256(%rcx),%ymm9,%ymm9 + vpaddd 192-256(%rcx),%ymm10,%ymm10 + vpaddd 224-256(%rcx),%ymm11,%ymm11 + + vpunpckldq %ymm9,%ymm8,%ymm14 + vpunpckldq %ymm11,%ymm10,%ymm15 + vpunpckhdq %ymm9,%ymm8,%ymm8 + vpunpckhdq %ymm11,%ymm10,%ymm10 + vpunpcklqdq %ymm15,%ymm14,%ymm9 + vpunpckhqdq %ymm15,%ymm14,%ymm14 + vpunpcklqdq %ymm10,%ymm8,%ymm11 + vpunpckhqdq %ymm10,%ymm8,%ymm8 + vpaddd 256-256(%rcx),%ymm0,%ymm0 + vpaddd 288-256(%rcx),%ymm1,%ymm1 + vpaddd 320-256(%rcx),%ymm2,%ymm2 + vpaddd 352-256(%rcx),%ymm3,%ymm3 + + vpunpckldq %ymm1,%ymm0,%ymm10 + vpunpckldq %ymm3,%ymm2,%ymm15 + vpunpckhdq %ymm1,%ymm0,%ymm0 + vpunpckhdq %ymm3,%ymm2,%ymm2 + vpunpcklqdq %ymm15,%ymm10,%ymm1 + vpunpckhqdq %ymm15,%ymm10,%ymm10 + vpunpcklqdq %ymm2,%ymm0,%ymm3 + vpunpckhqdq %ymm2,%ymm0,%ymm0 + vperm2i128 $0x20,%ymm1,%ymm9,%ymm15 + vperm2i128 $0x31,%ymm1,%ymm9,%ymm1 + vperm2i128 $0x20,%ymm10,%ymm14,%ymm9 + vperm2i128 $0x31,%ymm10,%ymm14,%ymm10 + vperm2i128 $0x20,%ymm3,%ymm11,%ymm14 + vperm2i128 $0x31,%ymm3,%ymm11,%ymm3 + vperm2i128 $0x20,%ymm0,%ymm8,%ymm11 + vperm2i128 $0x31,%ymm0,%ymm8,%ymm0 + vmovdqa %ymm15,0(%rsp) + vmovdqa %ymm9,32(%rsp) + vmovdqa 64(%rsp),%ymm15 + vmovdqa 96(%rsp),%ymm9 + + vpaddd 384-512(%rax),%ymm12,%ymm12 + vpaddd 416-512(%rax),%ymm13,%ymm13 + vpaddd 448-512(%rax),%ymm15,%ymm15 + vpaddd 480-512(%rax),%ymm9,%ymm9 + + vpunpckldq %ymm13,%ymm12,%ymm2 + vpunpckldq %ymm9,%ymm15,%ymm8 + vpunpckhdq %ymm13,%ymm12,%ymm12 + vpunpckhdq %ymm9,%ymm15,%ymm15 + vpunpcklqdq %ymm8,%ymm2,%ymm13 + vpunpckhqdq %ymm8,%ymm2,%ymm2 + vpunpcklqdq %ymm15,%ymm12,%ymm9 + vpunpckhqdq %ymm15,%ymm12,%ymm12 + vpaddd 512-512(%rax),%ymm4,%ymm4 + vpaddd 544-512(%rax),%ymm5,%ymm5 + vpaddd 576-512(%rax),%ymm6,%ymm6 + vpaddd 608-512(%rax),%ymm7,%ymm7 + + vpunpckldq %ymm5,%ymm4,%ymm15 + vpunpckldq %ymm7,%ymm6,%ymm8 + vpunpckhdq %ymm5,%ymm4,%ymm4 + vpunpckhdq %ymm7,%ymm6,%ymm6 + vpunpcklqdq %ymm8,%ymm15,%ymm5 + vpunpckhqdq %ymm8,%ymm15,%ymm15 + vpunpcklqdq %ymm6,%ymm4,%ymm7 + vpunpckhqdq %ymm6,%ymm4,%ymm4 + vperm2i128 $0x20,%ymm5,%ymm13,%ymm8 + vperm2i128 $0x31,%ymm5,%ymm13,%ymm5 + vperm2i128 $0x20,%ymm15,%ymm2,%ymm13 + vperm2i128 $0x31,%ymm15,%ymm2,%ymm15 + vperm2i128 $0x20,%ymm7,%ymm9,%ymm2 + vperm2i128 $0x31,%ymm7,%ymm9,%ymm7 + vperm2i128 $0x20,%ymm4,%ymm12,%ymm9 + vperm2i128 $0x31,%ymm4,%ymm12,%ymm4 + vmovdqa 0(%rsp),%ymm6 + vmovdqa 32(%rsp),%ymm12 + + cmpq $512,%rdx + jb .Ltail8x + + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + leaq 128(%rsi),%rsi + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + leaq 128(%rdi),%rdi + + vpxor 0(%rsi),%ymm12,%ymm12 + vpxor 32(%rsi),%ymm13,%ymm13 + vpxor 64(%rsi),%ymm10,%ymm10 + vpxor 96(%rsi),%ymm15,%ymm15 + leaq 128(%rsi),%rsi + vmovdqu %ymm12,0(%rdi) + vmovdqu %ymm13,32(%rdi) + vmovdqu %ymm10,64(%rdi) + vmovdqu %ymm15,96(%rdi) + leaq 128(%rdi),%rdi + + vpxor 0(%rsi),%ymm14,%ymm14 + vpxor 32(%rsi),%ymm2,%ymm2 + vpxor 64(%rsi),%ymm3,%ymm3 + vpxor 96(%rsi),%ymm7,%ymm7 + leaq 128(%rsi),%rsi + vmovdqu %ymm14,0(%rdi) + vmovdqu %ymm2,32(%rdi) + vmovdqu %ymm3,64(%rdi) + vmovdqu %ymm7,96(%rdi) + leaq 128(%rdi),%rdi + + vpxor 0(%rsi),%ymm11,%ymm11 + vpxor 32(%rsi),%ymm9,%ymm9 + vpxor 64(%rsi),%ymm0,%ymm0 + vpxor 96(%rsi),%ymm4,%ymm4 + leaq 128(%rsi),%rsi + vmovdqu %ymm11,0(%rdi) + vmovdqu %ymm9,32(%rdi) + vmovdqu %ymm0,64(%rdi) + vmovdqu %ymm4,96(%rdi) + leaq 128(%rdi),%rdi + + subq $512,%rdx + jnz .Loop_outer8x + + jmp .Ldone8x + +.Ltail8x: + cmpq $448,%rdx + jae .L448_or_more8x + cmpq $384,%rdx + jae .L384_or_more8x + cmpq $320,%rdx + jae .L320_or_more8x + cmpq $256,%rdx + jae .L256_or_more8x + cmpq $192,%rdx + jae .L192_or_more8x + cmpq $128,%rdx + jae .L128_or_more8x + cmpq $64,%rdx + jae .L64_or_more8x + + xorq %r9,%r9 + vmovdqa %ymm6,0(%rsp) + vmovdqa %ymm8,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L64_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + je .Ldone8x + + leaq 64(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm1,0(%rsp) + leaq 64(%rdi),%rdi + subq $64,%rdx + vmovdqa %ymm5,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L128_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + je .Ldone8x + + leaq 128(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm12,0(%rsp) + leaq 128(%rdi),%rdi + subq $128,%rdx + vmovdqa %ymm13,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L192_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vpxor 128(%rsi),%ymm12,%ymm12 + vpxor 160(%rsi),%ymm13,%ymm13 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + vmovdqu %ymm12,128(%rdi) + vmovdqu %ymm13,160(%rdi) + je .Ldone8x + + leaq 192(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm10,0(%rsp) + leaq 192(%rdi),%rdi + subq $192,%rdx + vmovdqa %ymm15,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L256_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vpxor 128(%rsi),%ymm12,%ymm12 + vpxor 160(%rsi),%ymm13,%ymm13 + vpxor 192(%rsi),%ymm10,%ymm10 + vpxor 224(%rsi),%ymm15,%ymm15 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + vmovdqu %ymm12,128(%rdi) + vmovdqu %ymm13,160(%rdi) + vmovdqu %ymm10,192(%rdi) + vmovdqu %ymm15,224(%rdi) + je .Ldone8x + + leaq 256(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm14,0(%rsp) + leaq 256(%rdi),%rdi + subq $256,%rdx + vmovdqa %ymm2,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L320_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vpxor 128(%rsi),%ymm12,%ymm12 + vpxor 160(%rsi),%ymm13,%ymm13 + vpxor 192(%rsi),%ymm10,%ymm10 + vpxor 224(%rsi),%ymm15,%ymm15 + vpxor 256(%rsi),%ymm14,%ymm14 + vpxor 288(%rsi),%ymm2,%ymm2 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + vmovdqu %ymm12,128(%rdi) + vmovdqu %ymm13,160(%rdi) + vmovdqu %ymm10,192(%rdi) + vmovdqu %ymm15,224(%rdi) + vmovdqu %ymm14,256(%rdi) + vmovdqu %ymm2,288(%rdi) + je .Ldone8x + + leaq 320(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm3,0(%rsp) + leaq 320(%rdi),%rdi + subq $320,%rdx + vmovdqa %ymm7,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L384_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vpxor 128(%rsi),%ymm12,%ymm12 + vpxor 160(%rsi),%ymm13,%ymm13 + vpxor 192(%rsi),%ymm10,%ymm10 + vpxor 224(%rsi),%ymm15,%ymm15 + vpxor 256(%rsi),%ymm14,%ymm14 + vpxor 288(%rsi),%ymm2,%ymm2 + vpxor 320(%rsi),%ymm3,%ymm3 + vpxor 352(%rsi),%ymm7,%ymm7 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + vmovdqu %ymm12,128(%rdi) + vmovdqu %ymm13,160(%rdi) + vmovdqu %ymm10,192(%rdi) + vmovdqu %ymm15,224(%rdi) + vmovdqu %ymm14,256(%rdi) + vmovdqu %ymm2,288(%rdi) + vmovdqu %ymm3,320(%rdi) + vmovdqu %ymm7,352(%rdi) + je .Ldone8x + + leaq 384(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm11,0(%rsp) + leaq 384(%rdi),%rdi + subq $384,%rdx + vmovdqa %ymm9,32(%rsp) + jmp .Loop_tail8x + +.align 32 +.L448_or_more8x: + vpxor 0(%rsi),%ymm6,%ymm6 + vpxor 32(%rsi),%ymm8,%ymm8 + vpxor 64(%rsi),%ymm1,%ymm1 + vpxor 96(%rsi),%ymm5,%ymm5 + vpxor 128(%rsi),%ymm12,%ymm12 + vpxor 160(%rsi),%ymm13,%ymm13 + vpxor 192(%rsi),%ymm10,%ymm10 + vpxor 224(%rsi),%ymm15,%ymm15 + vpxor 256(%rsi),%ymm14,%ymm14 + vpxor 288(%rsi),%ymm2,%ymm2 + vpxor 320(%rsi),%ymm3,%ymm3 + vpxor 352(%rsi),%ymm7,%ymm7 + vpxor 384(%rsi),%ymm11,%ymm11 + vpxor 416(%rsi),%ymm9,%ymm9 + vmovdqu %ymm6,0(%rdi) + vmovdqu %ymm8,32(%rdi) + vmovdqu %ymm1,64(%rdi) + vmovdqu %ymm5,96(%rdi) + vmovdqu %ymm12,128(%rdi) + vmovdqu %ymm13,160(%rdi) + vmovdqu %ymm10,192(%rdi) + vmovdqu %ymm15,224(%rdi) + vmovdqu %ymm14,256(%rdi) + vmovdqu %ymm2,288(%rdi) + vmovdqu %ymm3,320(%rdi) + vmovdqu %ymm7,352(%rdi) + vmovdqu %ymm11,384(%rdi) + vmovdqu %ymm9,416(%rdi) + je .Ldone8x + + leaq 448(%rsi),%rsi + xorq %r9,%r9 + vmovdqa %ymm0,0(%rsp) + leaq 448(%rdi),%rdi + subq $448,%rdx + vmovdqa %ymm4,32(%rsp) + +.Loop_tail8x: + movzbl (%rsi,%r9,1),%eax + movzbl (%rsp,%r9,1),%ecx + leaq 1(%r9),%r9 + xorl %ecx,%eax + movb %al,-1(%rdi,%r9,1) + decq %rdx + jnz .Loop_tail8x + +.Ldone8x: + vzeroall + leaq -8(%r10),%rsp + +.L8x_epilogue: + ret +ENDPROC(chacha20_avx2) +#endif /* CONFIG_AS_AVX2 */ + +#ifdef CONFIG_AS_AVX512 +.align 32 +ENTRY(chacha20_avx512) +.Lchacha20_avx512: + cmpq $0,%rdx + je .Lavx512_epilogue + leaq 8(%rsp),%r10 + + cmpq $512,%rdx + ja .Lchacha20_16x + + subq $64+8,%rsp + andq $-64,%rsp + vbroadcasti32x4 .Lsigma(%rip),%zmm0 + vbroadcasti32x4 (%rcx),%zmm1 + vbroadcasti32x4 16(%rcx),%zmm2 + vbroadcasti32x4 (%r8),%zmm3 + + vmovdqa32 %zmm0,%zmm16 + vmovdqa32 %zmm1,%zmm17 + vmovdqa32 %zmm2,%zmm18 + vpaddd .Lzeroz(%rip),%zmm3,%zmm3 + vmovdqa32 .Lfourz(%rip),%zmm20 + movq $10,%r8 + vmovdqa32 %zmm3,%zmm19 + jmp .Loop_avx512 + +.align 16 +.Loop_outer_avx512: + vmovdqa32 %zmm16,%zmm0 + vmovdqa32 %zmm17,%zmm1 + vmovdqa32 %zmm18,%zmm2 + vpaddd %zmm20,%zmm19,%zmm3 + movq $10,%r8 + vmovdqa32 %zmm3,%zmm19 + jmp .Loop_avx512 + +.align 32 +.Loop_avx512: + vpaddd %zmm1,%zmm0,%zmm0 + vpxord %zmm0,%zmm3,%zmm3 + vprold $16,%zmm3,%zmm3 + vpaddd %zmm3,%zmm2,%zmm2 + vpxord %zmm2,%zmm1,%zmm1 + vprold $12,%zmm1,%zmm1 + vpaddd %zmm1,%zmm0,%zmm0 + vpxord %zmm0,%zmm3,%zmm3 + vprold $8,%zmm3,%zmm3 + vpaddd %zmm3,%zmm2,%zmm2 + vpxord %zmm2,%zmm1,%zmm1 + vprold $7,%zmm1,%zmm1 + vpshufd $78,%zmm2,%zmm2 + vpshufd $57,%zmm1,%zmm1 + vpshufd $147,%zmm3,%zmm3 + vpaddd %zmm1,%zmm0,%zmm0 + vpxord %zmm0,%zmm3,%zmm3 + vprold $16,%zmm3,%zmm3 + vpaddd %zmm3,%zmm2,%zmm2 + vpxord %zmm2,%zmm1,%zmm1 + vprold $12,%zmm1,%zmm1 + vpaddd %zmm1,%zmm0,%zmm0 + vpxord %zmm0,%zmm3,%zmm3 + vprold $8,%zmm3,%zmm3 + vpaddd %zmm3,%zmm2,%zmm2 + vpxord %zmm2,%zmm1,%zmm1 + vprold $7,%zmm1,%zmm1 + vpshufd $78,%zmm2,%zmm2 + vpshufd $147,%zmm1,%zmm1 + vpshufd $57,%zmm3,%zmm3 + decq %r8 + jnz .Loop_avx512 + vpaddd %zmm16,%zmm0,%zmm0 + vpaddd %zmm17,%zmm1,%zmm1 + vpaddd %zmm18,%zmm2,%zmm2 + vpaddd %zmm19,%zmm3,%zmm3 + + subq $64,%rdx + jb .Ltail64_avx512 + + vpxor 0(%rsi),%xmm0,%xmm4 + vpxor 16(%rsi),%xmm1,%xmm5 + vpxor 32(%rsi),%xmm2,%xmm6 + vpxor 48(%rsi),%xmm3,%xmm7 + leaq 64(%rsi),%rsi + + vmovdqu %xmm4,0(%rdi) + vmovdqu %xmm5,16(%rdi) + vmovdqu %xmm6,32(%rdi) + vmovdqu %xmm7,48(%rdi) + leaq 64(%rdi),%rdi + + jz .Ldone_avx512 + + vextracti32x4 $1,%zmm0,%xmm4 + vextracti32x4 $1,%zmm1,%xmm5 + vextracti32x4 $1,%zmm2,%xmm6 + vextracti32x4 $1,%zmm3,%xmm7 + + subq $64,%rdx + jb .Ltail_avx512 + + vpxor 0(%rsi),%xmm4,%xmm4 + vpxor 16(%rsi),%xmm5,%xmm5 + vpxor 32(%rsi),%xmm6,%xmm6 + vpxor 48(%rsi),%xmm7,%xmm7 + leaq 64(%rsi),%rsi + + vmovdqu %xmm4,0(%rdi) + vmovdqu %xmm5,16(%rdi) + vmovdqu %xmm6,32(%rdi) + vmovdqu %xmm7,48(%rdi) + leaq 64(%rdi),%rdi + + jz .Ldone_avx512 + + vextracti32x4 $2,%zmm0,%xmm4 + vextracti32x4 $2,%zmm1,%xmm5 + vextracti32x4 $2,%zmm2,%xmm6 + vextracti32x4 $2,%zmm3,%xmm7 + + subq $64,%rdx + jb .Ltail_avx512 + + vpxor 0(%rsi),%xmm4,%xmm4 + vpxor 16(%rsi),%xmm5,%xmm5 + vpxor 32(%rsi),%xmm6,%xmm6 + vpxor 48(%rsi),%xmm7,%xmm7 + leaq 64(%rsi),%rsi + + vmovdqu %xmm4,0(%rdi) + vmovdqu %xmm5,16(%rdi) + vmovdqu %xmm6,32(%rdi) + vmovdqu %xmm7,48(%rdi) + leaq 64(%rdi),%rdi + + jz .Ldone_avx512 + + vextracti32x4 $3,%zmm0,%xmm4 + vextracti32x4 $3,%zmm1,%xmm5 + vextracti32x4 $3,%zmm2,%xmm6 + vextracti32x4 $3,%zmm3,%xmm7 + + subq $64,%rdx + jb .Ltail_avx512 + + vpxor 0(%rsi),%xmm4,%xmm4 + vpxor 16(%rsi),%xmm5,%xmm5 + vpxor 32(%rsi),%xmm6,%xmm6 + vpxor 48(%rsi),%xmm7,%xmm7 + leaq 64(%rsi),%rsi + + vmovdqu %xmm4,0(%rdi) + vmovdqu %xmm5,16(%rdi) + vmovdqu %xmm6,32(%rdi) + vmovdqu %xmm7,48(%rdi) + leaq 64(%rdi),%rdi + + jnz .Loop_outer_avx512 + + jmp .Ldone_avx512 + +.align 16 +.Ltail64_avx512: + vmovdqa %xmm0,0(%rsp) + vmovdqa %xmm1,16(%rsp) + vmovdqa %xmm2,32(%rsp) + vmovdqa %xmm3,48(%rsp) + addq $64,%rdx + jmp .Loop_tail_avx512 + +.align 16 +.Ltail_avx512: + vmovdqa %xmm4,0(%rsp) + vmovdqa %xmm5,16(%rsp) + vmovdqa %xmm6,32(%rsp) + vmovdqa %xmm7,48(%rsp) + addq $64,%rdx + +.Loop_tail_avx512: + movzbl (%rsi,%r8,1),%eax + movzbl (%rsp,%r8,1),%ecx + leaq 1(%r8),%r8 + xorl %ecx,%eax + movb %al,-1(%rdi,%r8,1) + decq %rdx + jnz .Loop_tail_avx512 + + vmovdqa32 %zmm16,0(%rsp) + +.Ldone_avx512: + vzeroall + leaq -8(%r10),%rsp + +.Lavx512_epilogue: + ret + +.align 32 +.Lchacha20_16x: + leaq 8(%rsp),%r10 + + subq $64+8,%rsp + andq $-64,%rsp + vzeroupper + + leaq .Lsigma(%rip),%r9 + vbroadcasti32x4 (%r9),%zmm3 + vbroadcasti32x4 (%rcx),%zmm7 + vbroadcasti32x4 16(%rcx),%zmm11 + vbroadcasti32x4 (%r8),%zmm15 + + vpshufd $0x00,%zmm3,%zmm0 + vpshufd $0x55,%zmm3,%zmm1 + vpshufd $0xaa,%zmm3,%zmm2 + vpshufd $0xff,%zmm3,%zmm3 + vmovdqa64 %zmm0,%zmm16 + vmovdqa64 %zmm1,%zmm17 + vmovdqa64 %zmm2,%zmm18 + vmovdqa64 %zmm3,%zmm19 + + vpshufd $0x00,%zmm7,%zmm4 + vpshufd $0x55,%zmm7,%zmm5 + vpshufd $0xaa,%zmm7,%zmm6 + vpshufd $0xff,%zmm7,%zmm7 + vmovdqa64 %zmm4,%zmm20 + vmovdqa64 %zmm5,%zmm21 + vmovdqa64 %zmm6,%zmm22 + vmovdqa64 %zmm7,%zmm23 + + vpshufd $0x00,%zmm11,%zmm8 + vpshufd $0x55,%zmm11,%zmm9 + vpshufd $0xaa,%zmm11,%zmm10 + vpshufd $0xff,%zmm11,%zmm11 + vmovdqa64 %zmm8,%zmm24 + vmovdqa64 %zmm9,%zmm25 + vmovdqa64 %zmm10,%zmm26 + vmovdqa64 %zmm11,%zmm27 + + vpshufd $0x00,%zmm15,%zmm12 + vpshufd $0x55,%zmm15,%zmm13 + vpshufd $0xaa,%zmm15,%zmm14 + vpshufd $0xff,%zmm15,%zmm15 + vpaddd .Lincz(%rip),%zmm12,%zmm12 + vmovdqa64 %zmm12,%zmm28 + vmovdqa64 %zmm13,%zmm29 + vmovdqa64 %zmm14,%zmm30 + vmovdqa64 %zmm15,%zmm31 + + movl $10,%eax + jmp .Loop16x + +.align 32 +.Loop_outer16x: + vpbroadcastd 0(%r9),%zmm0 + vpbroadcastd 4(%r9),%zmm1 + vpbroadcastd 8(%r9),%zmm2 + vpbroadcastd 12(%r9),%zmm3 + vpaddd .Lsixteen(%rip),%zmm28,%zmm28 + vmovdqa64 %zmm20,%zmm4 + vmovdqa64 %zmm21,%zmm5 + vmovdqa64 %zmm22,%zmm6 + vmovdqa64 %zmm23,%zmm7 + vmovdqa64 %zmm24,%zmm8 + vmovdqa64 %zmm25,%zmm9 + vmovdqa64 %zmm26,%zmm10 + vmovdqa64 %zmm27,%zmm11 + vmovdqa64 %zmm28,%zmm12 + vmovdqa64 %zmm29,%zmm13 + vmovdqa64 %zmm30,%zmm14 + vmovdqa64 %zmm31,%zmm15 + + vmovdqa64 %zmm0,%zmm16 + vmovdqa64 %zmm1,%zmm17 + vmovdqa64 %zmm2,%zmm18 + vmovdqa64 %zmm3,%zmm19 + + movl $10,%eax + jmp .Loop16x + +.align 32 +.Loop16x: + vpaddd %zmm4,%zmm0,%zmm0 + vpaddd %zmm5,%zmm1,%zmm1 + vpaddd %zmm6,%zmm2,%zmm2 + vpaddd %zmm7,%zmm3,%zmm3 + vpxord %zmm0,%zmm12,%zmm12 + vpxord %zmm1,%zmm13,%zmm13 + vpxord %zmm2,%zmm14,%zmm14 + vpxord %zmm3,%zmm15,%zmm15 + vprold $16,%zmm12,%zmm12 + vprold $16,%zmm13,%zmm13 + vprold $16,%zmm14,%zmm14 + vprold $16,%zmm15,%zmm15 + vpaddd %zmm12,%zmm8,%zmm8 + vpaddd %zmm13,%zmm9,%zmm9 + vpaddd %zmm14,%zmm10,%zmm10 + vpaddd %zmm15,%zmm11,%zmm11 + vpxord %zmm8,%zmm4,%zmm4 + vpxord %zmm9,%zmm5,%zmm5 + vpxord %zmm10,%zmm6,%zmm6 + vpxord %zmm11,%zmm7,%zmm7 + vprold $12,%zmm4,%zmm4 + vprold $12,%zmm5,%zmm5 + vprold $12,%zmm6,%zmm6 + vprold $12,%zmm7,%zmm7 + vpaddd %zmm4,%zmm0,%zmm0 + vpaddd %zmm5,%zmm1,%zmm1 + vpaddd %zmm6,%zmm2,%zmm2 + vpaddd %zmm7,%zmm3,%zmm3 + vpxord %zmm0,%zmm12,%zmm12 + vpxord %zmm1,%zmm13,%zmm13 + vpxord %zmm2,%zmm14,%zmm14 + vpxord %zmm3,%zmm15,%zmm15 + vprold $8,%zmm12,%zmm12 + vprold $8,%zmm13,%zmm13 + vprold $8,%zmm14,%zmm14 + vprold $8,%zmm15,%zmm15 + vpaddd %zmm12,%zmm8,%zmm8 + vpaddd %zmm13,%zmm9,%zmm9 + vpaddd %zmm14,%zmm10,%zmm10 + vpaddd %zmm15,%zmm11,%zmm11 + vpxord %zmm8,%zmm4,%zmm4 + vpxord %zmm9,%zmm5,%zmm5 + vpxord %zmm10,%zmm6,%zmm6 + vpxord %zmm11,%zmm7,%zmm7 + vprold $7,%zmm4,%zmm4 + vprold $7,%zmm5,%zmm5 + vprold $7,%zmm6,%zmm6 + vprold $7,%zmm7,%zmm7 + vpaddd %zmm5,%zmm0,%zmm0 + vpaddd %zmm6,%zmm1,%zmm1 + vpaddd %zmm7,%zmm2,%zmm2 + vpaddd %zmm4,%zmm3,%zmm3 + vpxord %zmm0,%zmm15,%zmm15 + vpxord %zmm1,%zmm12,%zmm12 + vpxord %zmm2,%zmm13,%zmm13 + vpxord %zmm3,%zmm14,%zmm14 + vprold $16,%zmm15,%zmm15 + vprold $16,%zmm12,%zmm12 + vprold $16,%zmm13,%zmm13 + vprold $16,%zmm14,%zmm14 + vpaddd %zmm15,%zmm10,%zmm10 + vpaddd %zmm12,%zmm11,%zmm11 + vpaddd %zmm13,%zmm8,%zmm8 + vpaddd %zmm14,%zmm9,%zmm9 + vpxord %zmm10,%zmm5,%zmm5 + vpxord %zmm11,%zmm6,%zmm6 + vpxord %zmm8,%zmm7,%zmm7 + vpxord %zmm9,%zmm4,%zmm4 + vprold $12,%zmm5,%zmm5 + vprold $12,%zmm6,%zmm6 + vprold $12,%zmm7,%zmm7 + vprold $12,%zmm4,%zmm4 + vpaddd %zmm5,%zmm0,%zmm0 + vpaddd %zmm6,%zmm1,%zmm1 + vpaddd %zmm7,%zmm2,%zmm2 + vpaddd %zmm4,%zmm3,%zmm3 + vpxord %zmm0,%zmm15,%zmm15 + vpxord %zmm1,%zmm12,%zmm12 + vpxord %zmm2,%zmm13,%zmm13 + vpxord %zmm3,%zmm14,%zmm14 + vprold $8,%zmm15,%zmm15 + vprold $8,%zmm12,%zmm12 + vprold $8,%zmm13,%zmm13 + vprold $8,%zmm14,%zmm14 + vpaddd %zmm15,%zmm10,%zmm10 + vpaddd %zmm12,%zmm11,%zmm11 + vpaddd %zmm13,%zmm8,%zmm8 + vpaddd %zmm14,%zmm9,%zmm9 + vpxord %zmm10,%zmm5,%zmm5 + vpxord %zmm11,%zmm6,%zmm6 + vpxord %zmm8,%zmm7,%zmm7 + vpxord %zmm9,%zmm4,%zmm4 + vprold $7,%zmm5,%zmm5 + vprold $7,%zmm6,%zmm6 + vprold $7,%zmm7,%zmm7 + vprold $7,%zmm4,%zmm4 + decl %eax + jnz .Loop16x + + vpaddd %zmm16,%zmm0,%zmm0 + vpaddd %zmm17,%zmm1,%zmm1 + vpaddd %zmm18,%zmm2,%zmm2 + vpaddd %zmm19,%zmm3,%zmm3 + + vpunpckldq %zmm1,%zmm0,%zmm18 + vpunpckldq %zmm3,%zmm2,%zmm19 + vpunpckhdq %zmm1,%zmm0,%zmm0 + vpunpckhdq %zmm3,%zmm2,%zmm2 + vpunpcklqdq %zmm19,%zmm18,%zmm1 + vpunpckhqdq %zmm19,%zmm18,%zmm18 + vpunpcklqdq %zmm2,%zmm0,%zmm3 + vpunpckhqdq %zmm2,%zmm0,%zmm0 + vpaddd %zmm20,%zmm4,%zmm4 + vpaddd %zmm21,%zmm5,%zmm5 + vpaddd %zmm22,%zmm6,%zmm6 + vpaddd %zmm23,%zmm7,%zmm7 + + vpunpckldq %zmm5,%zmm4,%zmm2 + vpunpckldq %zmm7,%zmm6,%zmm19 + vpunpckhdq %zmm5,%zmm4,%zmm4 + vpunpckhdq %zmm7,%zmm6,%zmm6 + vpunpcklqdq %zmm19,%zmm2,%zmm5 + vpunpckhqdq %zmm19,%zmm2,%zmm2 + vpunpcklqdq %zmm6,%zmm4,%zmm7 + vpunpckhqdq %zmm6,%zmm4,%zmm4 + vshufi32x4 $0x44,%zmm5,%zmm1,%zmm19 + vshufi32x4 $0xee,%zmm5,%zmm1,%zmm5 + vshufi32x4 $0x44,%zmm2,%zmm18,%zmm1 + vshufi32x4 $0xee,%zmm2,%zmm18,%zmm2 + vshufi32x4 $0x44,%zmm7,%zmm3,%zmm18 + vshufi32x4 $0xee,%zmm7,%zmm3,%zmm7 + vshufi32x4 $0x44,%zmm4,%zmm0,%zmm3 + vshufi32x4 $0xee,%zmm4,%zmm0,%zmm4 + vpaddd %zmm24,%zmm8,%zmm8 + vpaddd %zmm25,%zmm9,%zmm9 + vpaddd %zmm26,%zmm10,%zmm10 + vpaddd %zmm27,%zmm11,%zmm11 + + vpunpckldq %zmm9,%zmm8,%zmm6 + vpunpckldq %zmm11,%zmm10,%zmm0 + vpunpckhdq %zmm9,%zmm8,%zmm8 + vpunpckhdq %zmm11,%zmm10,%zmm10 + vpunpcklqdq %zmm0,%zmm6,%zmm9 + vpunpckhqdq %zmm0,%zmm6,%zmm6 + vpunpcklqdq %zmm10,%zmm8,%zmm11 + vpunpckhqdq %zmm10,%zmm8,%zmm8 + vpaddd %zmm28,%zmm12,%zmm12 + vpaddd %zmm29,%zmm13,%zmm13 + vpaddd %zmm30,%zmm14,%zmm14 + vpaddd %zmm31,%zmm15,%zmm15 + + vpunpckldq %zmm13,%zmm12,%zmm10 + vpunpckldq %zmm15,%zmm14,%zmm0 + vpunpckhdq %zmm13,%zmm12,%zmm12 + vpunpckhdq %zmm15,%zmm14,%zmm14 + vpunpcklqdq %zmm0,%zmm10,%zmm13 + vpunpckhqdq %zmm0,%zmm10,%zmm10 + vpunpcklqdq %zmm14,%zmm12,%zmm15 + vpunpckhqdq %zmm14,%zmm12,%zmm12 + vshufi32x4 $0x44,%zmm13,%zmm9,%zmm0 + vshufi32x4 $0xee,%zmm13,%zmm9,%zmm13 + vshufi32x4 $0x44,%zmm10,%zmm6,%zmm9 + vshufi32x4 $0xee,%zmm10,%zmm6,%zmm10 + vshufi32x4 $0x44,%zmm15,%zmm11,%zmm6 + vshufi32x4 $0xee,%zmm15,%zmm11,%zmm15 + vshufi32x4 $0x44,%zmm12,%zmm8,%zmm11 + vshufi32x4 $0xee,%zmm12,%zmm8,%zmm12 + vshufi32x4 $0x88,%zmm0,%zmm19,%zmm16 + vshufi32x4 $0xdd,%zmm0,%zmm19,%zmm19 + vshufi32x4 $0x88,%zmm13,%zmm5,%zmm0 + vshufi32x4 $0xdd,%zmm13,%zmm5,%zmm13 + vshufi32x4 $0x88,%zmm9,%zmm1,%zmm17 + vshufi32x4 $0xdd,%zmm9,%zmm1,%zmm1 + vshufi32x4 $0x88,%zmm10,%zmm2,%zmm9 + vshufi32x4 $0xdd,%zmm10,%zmm2,%zmm10 + vshufi32x4 $0x88,%zmm6,%zmm18,%zmm14 + vshufi32x4 $0xdd,%zmm6,%zmm18,%zmm18 + vshufi32x4 $0x88,%zmm15,%zmm7,%zmm6 + vshufi32x4 $0xdd,%zmm15,%zmm7,%zmm15 + vshufi32x4 $0x88,%zmm11,%zmm3,%zmm8 + vshufi32x4 $0xdd,%zmm11,%zmm3,%zmm3 + vshufi32x4 $0x88,%zmm12,%zmm4,%zmm11 + vshufi32x4 $0xdd,%zmm12,%zmm4,%zmm12 + cmpq $1024,%rdx + jb .Ltail16x + + vpxord 0(%rsi),%zmm16,%zmm16 + vpxord 64(%rsi),%zmm17,%zmm17 + vpxord 128(%rsi),%zmm14,%zmm14 + vpxord 192(%rsi),%zmm8,%zmm8 + vmovdqu32 %zmm16,0(%rdi) + vmovdqu32 %zmm17,64(%rdi) + vmovdqu32 %zmm14,128(%rdi) + vmovdqu32 %zmm8,192(%rdi) + + vpxord 256(%rsi),%zmm19,%zmm19 + vpxord 320(%rsi),%zmm1,%zmm1 + vpxord 384(%rsi),%zmm18,%zmm18 + vpxord 448(%rsi),%zmm3,%zmm3 + vmovdqu32 %zmm19,256(%rdi) + vmovdqu32 %zmm1,320(%rdi) + vmovdqu32 %zmm18,384(%rdi) + vmovdqu32 %zmm3,448(%rdi) + + vpxord 512(%rsi),%zmm0,%zmm0 + vpxord 576(%rsi),%zmm9,%zmm9 + vpxord 640(%rsi),%zmm6,%zmm6 + vpxord 704(%rsi),%zmm11,%zmm11 + vmovdqu32 %zmm0,512(%rdi) + vmovdqu32 %zmm9,576(%rdi) + vmovdqu32 %zmm6,640(%rdi) + vmovdqu32 %zmm11,704(%rdi) + + vpxord 768(%rsi),%zmm13,%zmm13 + vpxord 832(%rsi),%zmm10,%zmm10 + vpxord 896(%rsi),%zmm15,%zmm15 + vpxord 960(%rsi),%zmm12,%zmm12 + leaq 1024(%rsi),%rsi + vmovdqu32 %zmm13,768(%rdi) + vmovdqu32 %zmm10,832(%rdi) + vmovdqu32 %zmm15,896(%rdi) + vmovdqu32 %zmm12,960(%rdi) + leaq 1024(%rdi),%rdi + + subq $1024,%rdx + jnz .Loop_outer16x + + jmp .Ldone16x + +.align 32 +.Ltail16x: + xorq %r9,%r9 + subq %rsi,%rdi + cmpq $64,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm16,%zmm16 + vmovdqu32 %zmm16,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm17,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $128,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm17,%zmm17 + vmovdqu32 %zmm17,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm14,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $192,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm14,%zmm14 + vmovdqu32 %zmm14,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm8,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $256,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm8,%zmm8 + vmovdqu32 %zmm8,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm19,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $320,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm19,%zmm19 + vmovdqu32 %zmm19,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm1,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $384,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm1,%zmm1 + vmovdqu32 %zmm1,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm18,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $448,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm18,%zmm18 + vmovdqu32 %zmm18,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm3,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $512,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm3,%zmm3 + vmovdqu32 %zmm3,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm0,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $576,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm0,%zmm0 + vmovdqu32 %zmm0,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm9,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $640,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm9,%zmm9 + vmovdqu32 %zmm9,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm6,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $704,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm6,%zmm6 + vmovdqu32 %zmm6,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm11,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $768,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm11,%zmm11 + vmovdqu32 %zmm11,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm13,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $832,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm13,%zmm13 + vmovdqu32 %zmm13,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm10,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $896,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm10,%zmm10 + vmovdqu32 %zmm10,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm15,%zmm16 + leaq 64(%rsi),%rsi + + cmpq $960,%rdx + jb .Less_than_64_16x + vpxord (%rsi),%zmm15,%zmm15 + vmovdqu32 %zmm15,(%rdi,%rsi,1) + je .Ldone16x + vmovdqa32 %zmm12,%zmm16 + leaq 64(%rsi),%rsi + +.Less_than_64_16x: + vmovdqa32 %zmm16,0(%rsp) + leaq (%rdi,%rsi,1),%rdi + andq $63,%rdx + +.Loop_tail16x: + movzbl (%rsi,%r9,1),%eax + movzbl (%rsp,%r9,1),%ecx + leaq 1(%r9),%r9 + xorl %ecx,%eax + movb %al,-1(%rdi,%r9,1) + decq %rdx + jnz .Loop_tail16x + + vpxord %zmm16,%zmm16,%zmm16 + vmovdqa32 %zmm16,0(%rsp) + +.Ldone16x: + vzeroall + leaq -8(%r10),%rsp + +.L16x_epilogue: + ret +ENDPROC(chacha20_avx512) +#endif /* CONFIG_AS_AVX512 */ diff --git a/src/crypto/chacha20poly1305.c b/src/crypto/chacha20poly1305.c index a9c3bf8..ac033f0 100644 --- a/src/crypto/chacha20poly1305.c +++ b/src/crypto/chacha20poly1305.c @@ -1,6 +1,34 @@ -/* - * Copyright (C) 2015-2017 Jason A. Donenfeld . All Rights Reserved. +/* Copyright (C) 2015-2017 Jason A. Donenfeld . All Rights Reserved. * Copyright 2015 Martin Willi. + * Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * - Redistributions of source code must retain copyright notices, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * - Neither the name of the CRYPTOGAMS nor the names of its + * copyright holder and contributors may be used to endorse or + * promote products derived from this software without specific + * prior written permission. + * ALTERNATIVELY, provided that this notice is retained in full, this + * product may be distributed under the terms of the GNU General Public + * License (GPL), in which case the provisions of the GPL apply INSTEAD OF + * those given above. + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "chacha20poly1305.h" @@ -15,27 +43,39 @@ #if defined(CONFIG_X86_64) #include #include +asmlinkage void poly1305_init_x86_64(void *ctx, const unsigned char key[16]); +asmlinkage void poly1305_blocks_x86_64(void *ctx, const unsigned char *inp, size_t len, u32 padbit); +asmlinkage void poly1305_emit_x86_64(void *ctx, unsigned char mac[16], const u32 nonce[4]); #ifdef CONFIG_AS_SSSE3 -asmlinkage void hchacha20_asm_ssse3(u8 *derived_key, const u8 *nonce, const u8 *key); -asmlinkage void chacha20_asm_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src); -asmlinkage void chacha20_asm_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src); +asmlinkage void hchacha20_ssse3(u8 *derived_key, const u8 *nonce, const u8 *key); +asmlinkage void chacha20_ssse3(unsigned char *out, const unsigned char *in, size_t len, const unsigned int key[8], const unsigned int counter[4]); #endif -#ifdef CONFIG_AS_AVX2 -asmlinkage void chacha20_asm_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src); +#ifdef CONFIG_AS_AVX +asmlinkage void poly1305_emit_avx(void *ctx, u8 mac[16], const u32 nonce[4]); +asmlinkage void poly1305_blocks_avx(void *ctx, const u8 *inp, size_t len, u32 padbit); #endif -asmlinkage void poly1305_asm_block_sse2(u32 *h, const u8 *src, const u32 *r, unsigned int blocks); -asmlinkage void poly1305_asm_2block_sse2(u32 *h, const u8 *src, const u32 *r, unsigned int blocks, const u32 *u); #ifdef CONFIG_AS_AVX2 -asmlinkage void poly1305_asm_4block_avx2(u32 *h, const u8 *src, const u32 *r, unsigned int blocks, const u32 *u); +asmlinkage void chacha20_avx2(unsigned char *out, const unsigned char *in, size_t len, const unsigned int key[8], const unsigned int counter[4]); +asmlinkage void poly1305_blocks_avx2(void *ctx, const unsigned char *inp, size_t len, u32 padbit); #endif -static bool chacha20poly1305_use_avx2 __read_mostly; +#ifdef CONFIG_AS_AVX512 +asmlinkage void chacha20_avx512(unsigned char *out, const unsigned char *in, size_t len, const unsigned int key[8], const unsigned int counter[4]); +asmlinkage void poly1305_blocks_avx512(void *ctx, const unsigned char *inp, size_t len, u32 padbit); +#endif + static bool chacha20poly1305_use_ssse3 __read_mostly; -static bool chacha20poly1305_use_sse2 __read_mostly; +static bool chacha20poly1305_use_avx __read_mostly; +static bool chacha20poly1305_use_avx2 __read_mostly; +static bool chacha20poly1305_use_avx512 __read_mostly; + void chacha20poly1305_fpu_init(void) { - chacha20poly1305_use_sse2 = boot_cpu_has(X86_FEATURE_XMM2); chacha20poly1305_use_ssse3 = boot_cpu_has(X86_FEATURE_SSSE3); + chacha20poly1305_use_avx = boot_cpu_has(X86_FEATURE_AVX) && cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); chacha20poly1305_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) && cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); +#ifndef COMPAT_CANNOT_USE_AVX512 + chacha20poly1305_use_avx512 = boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX512F) && cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM | XFEATURE_MASK_ZMM_Hi256, NULL); +#endif } #elif IS_ENABLED(CONFIG_KERNEL_MODE_NEON) #include @@ -77,21 +117,6 @@ static inline u32 rotl32(u32 v, u8 n) return (v << n) | (v >> (sizeof(v) * 8 - n)); } -static inline u64 mlt(u64 a, u64 b) -{ - return a * b; -} - -static inline u32 sr(u64 v, u_char n) -{ - return v >> n; -} - -static inline u32 and(u32 v, u32 mask) -{ - return v & mask; -} - struct chacha20_ctx { u32 state[CHACHA20_BLOCK_SIZE / sizeof(u32)]; } __aligned(32); @@ -232,17 +257,13 @@ static void hchacha20_generic(u8 derived_key[CHACHA20POLY1305_KEYLEN], const u8 static inline void hchacha20(u8 derived_key[CHACHA20POLY1305_KEYLEN], const u8 nonce[16], const u8 key[CHACHA20POLY1305_KEYLEN], bool have_simd) { - if (!have_simd) - goto no_simd; - #if defined(CONFIG_X86_64) && defined(CONFIG_AS_SSSE3) - if (chacha20poly1305_use_ssse3) { - hchacha20_asm_ssse3(derived_key, nonce, key); + if (have_simd && chacha20poly1305_use_ssse3) { + hchacha20_ssse3(derived_key, nonce, key); return; } #endif -no_simd: hchacha20_generic(derived_key, nonce, key); } @@ -266,7 +287,7 @@ static void chacha20_keysetup(struct chacha20_ctx *ctx, const u8 key[CHACHA20_KE ctx->state[15] = le32_to_cpuvp(nonce + 4); } -static void chacha20_crypt(struct chacha20_ctx *ctx, u8 *dst, const u8 *src, unsigned int bytes, bool have_simd) +static void chacha20_crypt(struct chacha20_ctx *ctx, u8 *dst, const u8 *src, u32 bytes, bool have_simd) { u8 buf[CHACHA20_BLOCK_SIZE]; @@ -281,37 +302,23 @@ static void chacha20_crypt(struct chacha20_ctx *ctx, u8 *dst, const u8 *src, uns goto no_simd; #if defined(CONFIG_X86_64) +#ifdef CONFIG_AS_AVX512 + if (chacha20poly1305_use_avx512) { + chacha20_avx512(dst, src, bytes, &ctx->state[4], &ctx->state[12]); + ctx->state[12] += (bytes + 63) / 64; + return; + } +#endif #ifdef CONFIG_AS_AVX2 if (chacha20poly1305_use_avx2) { - while (bytes >= CHACHA20_BLOCK_SIZE * 8) { - chacha20_asm_8block_xor_avx2(ctx->state, dst, src); - bytes -= CHACHA20_BLOCK_SIZE * 8; - src += CHACHA20_BLOCK_SIZE * 8; - dst += CHACHA20_BLOCK_SIZE * 8; - ctx->state[12] += 8; - } + chacha20_avx2(dst, src, bytes, &ctx->state[4], &ctx->state[12]); + ctx->state[12] += (bytes + 63) / 64; + return; } #endif #ifdef CONFIG_AS_SSSE3 - while (bytes >= CHACHA20_BLOCK_SIZE * 4) { - chacha20_asm_4block_xor_ssse3(ctx->state, dst, src); - bytes -= CHACHA20_BLOCK_SIZE * 4; - src += CHACHA20_BLOCK_SIZE * 4; - dst += CHACHA20_BLOCK_SIZE * 4; - ctx->state[12] += 4; - } - while (bytes >= CHACHA20_BLOCK_SIZE) { - chacha20_asm_block_xor_ssse3(ctx->state, dst, src); - bytes -= CHACHA20_BLOCK_SIZE; - src += CHACHA20_BLOCK_SIZE; - dst += CHACHA20_BLOCK_SIZE; - ctx->state[12]++; - } - if (bytes) { - memcpy(buf, src, bytes); - chacha20_asm_block_xor_ssse3(ctx->state, buf, buf); - memcpy(dst, buf, bytes); - } + chacha20_ssse3(dst, src, bytes, &ctx->state[4], &ctx->state[12]); + ctx->state[12] += (bytes + 63) / 64; return; #endif #elif IS_ENABLED(CONFIG_KERNEL_MODE_NEON) @@ -352,261 +359,283 @@ no_simd: crypto_xor(dst, buf, bytes); } } +typedef void (*poly1305_blocks_f)(void *ctx, const u8 *inp, size_t len, u32 padbit); +typedef void (*poly1305_emit_f)(void *ctx, u8 mac[16], const u32 nonce[4]); struct poly1305_ctx { - /* key */ - u32 r[5]; - /* finalize key */ - u32 s[4]; - /* accumulator */ + u8 opaque[24 * sizeof(u64)]; + u32 nonce[4]; + u8 data[POLY1305_BLOCK_SIZE]; + size_t num; + struct { + poly1305_blocks_f blocks; + poly1305_emit_f emit; + } func; +} __aligned(8); + +#ifndef CONFIG_X86_64 +struct poly1305_internal { u32 h[5]; - /* partial buffer */ - u8 buf[POLY1305_BLOCK_SIZE]; - /* bytes used in partial buffer */ - unsigned int buflen; - /* derived key u set? */ - bool uset; - /* derived keys r^3, r^4 set? */ - bool wset; - /* derived Poly1305 key r^2 */ - u32 u[5]; - /* derived Poly1305 key r^3 */ - u32 r3[5]; - /* derived Poly1305 key r^4 */ - u32 r4[5]; + u32 r[4]; }; -static void poly1305_init(struct poly1305_ctx *ctx, const u8 key[POLY1305_KEY_SIZE]) +static void poly1305_init_generic(void *ctx, const u8 key[16]) { - memset(ctx, 0, sizeof(struct poly1305_ctx)); + struct poly1305_internal *st = (struct poly1305_internal *)ctx; + + /* h = 0 */ + st->h[0] = 0; + st->h[1] = 0; + st->h[2] = 0; + st->h[3] = 0; + st->h[4] = 0; + /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */ - ctx->r[0] = (le32_to_cpuvp(key + 0) >> 0) & 0x3ffffff; - ctx->r[1] = (get_unaligned_le32(key + 3) >> 2) & 0x3ffff03; - ctx->r[2] = (get_unaligned_le32(key + 6) >> 4) & 0x3ffc0ff; - ctx->r[3] = (get_unaligned_le32(key + 9) >> 6) & 0x3f03fff; - ctx->r[4] = (le32_to_cpuvp(key + 12) >> 8) & 0x00fffff; - ctx->s[0] = le32_to_cpuvp(key + 16); - ctx->s[1] = le32_to_cpuvp(key + 20); - ctx->s[2] = le32_to_cpuvp(key + 24); - ctx->s[3] = le32_to_cpuvp(key + 28); + st->r[0] = le32_to_cpuvp(&key[ 0]) & 0x0fffffff; + st->r[1] = le32_to_cpuvp(&key[ 4]) & 0x0ffffffc; + st->r[2] = le32_to_cpuvp(&key[ 8]) & 0x0ffffffc; + st->r[3] = le32_to_cpuvp(&key[12]) & 0x0ffffffc; } -static unsigned int poly1305_generic_blocks(struct poly1305_ctx *ctx, const u8 *src, unsigned int srclen, u32 hibit) +static void +poly1305_blocks_generic(void *ctx, const u8 *inp, size_t len, u32 padbit) { - u32 r0, r1, r2, r3, r4; - u32 s1, s2, s3, s4; - u32 h0, h1, h2, h3, h4; - u64 d0, d1, d2, d3, d4; - - r0 = ctx->r[0]; - r1 = ctx->r[1]; - r2 = ctx->r[2]; - r3 = ctx->r[3]; - r4 = ctx->r[4]; - - s1 = r1 * 5; - s2 = r2 * 5; - s3 = r3 * 5; - s4 = r4 * 5; - - h0 = ctx->h[0]; - h1 = ctx->h[1]; - h2 = ctx->h[2]; - h3 = ctx->h[3]; - h4 = ctx->h[4]; - - while (likely(srclen >= POLY1305_BLOCK_SIZE)) { +#define CONSTANT_TIME_CARRY(a,b) ((a ^ ((a ^ b) | ((a - b) ^ b))) >> (sizeof(a) * 8 - 1)) + struct poly1305_internal *st = (struct poly1305_internal *)ctx; + u32 r0, r1, r2, r3; + u32 s1, s2, s3; + u32 h0, h1, h2, h3, h4, c; + u64 d0, d1, d2, d3; + + r0 = st->r[0]; + r1 = st->r[1]; + r2 = st->r[2]; + r3 = st->r[3]; + + s1 = r1 + (r1 >> 2); + s2 = r2 + (r2 >> 2); + s3 = r3 + (r3 >> 2); + + h0 = st->h[0]; + h1 = st->h[1]; + h2 = st->h[2]; + h3 = st->h[3]; + h4 = st->h[4]; + + while (len >= POLY1305_BLOCK_SIZE) { /* h += m[i] */ - h0 += (le32_to_cpuvp(src + 0) >> 0) & 0x3ffffff; - h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff; - h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff; - h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff; - h4 += (le32_to_cpuvp(src + 12) >> 8) | hibit; - - /* h *= r */ - d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) + mlt(h3, s2) + mlt(h4, s1); - d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) + mlt(h3, s3) + mlt(h4, s2); - d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) + mlt(h3, s4) + mlt(h4, s3); - d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) + mlt(h3, r0) + mlt(h4, s4); - d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) + mlt(h3, r1) + mlt(h4, r0); - - /* (partial) h %= p */ - d1 += sr(d0, 26); h0 = and(d0, 0x3ffffff); - d2 += sr(d1, 26); h1 = and(d1, 0x3ffffff); - d3 += sr(d2, 26); h2 = and(d2, 0x3ffffff); - d4 += sr(d3, 26); h3 = and(d3, 0x3ffffff); - h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff); - h1 += h0 >> 26; h0 = h0 & 0x3ffffff; - - src += POLY1305_BLOCK_SIZE; - srclen -= POLY1305_BLOCK_SIZE; + h0 = (u32)(d0 = (u64)h0 + le32_to_cpuvp(inp + 0)); + h1 = (u32)(d1 = (u64)h1 + (d0 >> 32) + le32_to_cpuvp(inp + 4)); + h2 = (u32)(d2 = (u64)h2 + (d1 >> 32) + le32_to_cpuvp(inp + 8)); + h3 = (u32)(d3 = (u64)h3 + (d2 >> 32) + le32_to_cpuvp(inp + 12)); + h4 += (u32)(d3 >> 32) + padbit; + + /* h *= r "%" p, where "%" stands for "partial remainder" */ + d0 = ((u64)h0 * r0) + + ((u64)h1 * s3) + + ((u64)h2 * s2) + + ((u64)h3 * s1); + d1 = ((u64)h0 * r1) + + ((u64)h1 * r0) + + ((u64)h2 * s3) + + ((u64)h3 * s2) + + (h4 * s1); + d2 = ((u64)h0 * r2) + + ((u64)h1 * r1) + + ((u64)h2 * r0) + + ((u64)h3 * s3) + + (h4 * s2); + d3 = ((u64)h0 * r3) + + ((u64)h1 * r2) + + ((u64)h2 * r1) + + ((u64)h3 * r0) + + (h4 * s3); + h4 = (h4 * r0); + + /* last reduction step: */ + /* a) h4:h0 = h4<<128 + d3<<96 + d2<<64 + d1<<32 + d0 */ + h0 = (u32)d0; + h1 = (u32)(d1 += d0 >> 32); + h2 = (u32)(d2 += d1 >> 32); + h3 = (u32)(d3 += d2 >> 32); + h4 += (u32)(d3 >> 32); + /* b) (h4:h0 += (h4:h0>>130) * 5) %= 2^130 */ + c = (h4 >> 2) + (h4 & ~3U); + h4 &= 3; + h0 += c; + h1 += (c = CONSTANT_TIME_CARRY(h0,c)); + h2 += (c = CONSTANT_TIME_CARRY(h1,c)); + h3 += (c = CONSTANT_TIME_CARRY(h2,c)); + h4 += CONSTANT_TIME_CARRY(h3,c); + /* + * Occasional overflows to 3rd bit of h4 are taken care of + * "naturally". If after this point we end up at the top of + * this loop, then the overflow bit will be accounted for + * in next iteration. If we end up in poly1305_emit, then + * comparison to modulus below will still count as "carry + * into 131st bit", so that properly reduced value will be + * picked in conditional move. + */ + + inp += POLY1305_BLOCK_SIZE; + len -= POLY1305_BLOCK_SIZE; } - ctx->h[0] = h0; - ctx->h[1] = h1; - ctx->h[2] = h2; - ctx->h[3] = h3; - ctx->h[4] = h4; - - return srclen; + st->h[0] = h0; + st->h[1] = h1; + st->h[2] = h2; + st->h[3] = h3; + st->h[4] = h4; +#undef CONSTANT_TIME_CARRY } -#ifdef CONFIG_X86_64 -static void poly1305_simd_mult(u32 *a, const u32 *b) +static void poly1305_emit_generic(void *ctx, u8 mac[16], const u32 nonce[4]) { - u8 m[POLY1305_BLOCK_SIZE]; - - memset(m, 0, sizeof(m)); - /* The poly1305 block function adds a hi-bit to the accumulator which - * we don't need for key multiplication; compensate for it. - */ - a[4] -= 1U << 24; - poly1305_asm_block_sse2(a, m, b, 1); + struct poly1305_internal *st = (struct poly1305_internal *)ctx; + __le32 *omac = (__force __le32 *)mac; + u32 h0, h1, h2, h3, h4; + u32 g0, g1, g2, g3, g4; + u64 t; + u32 mask; + + h0 = st->h[0]; + h1 = st->h[1]; + h2 = st->h[2]; + h3 = st->h[3]; + h4 = st->h[4]; + + /* compare to modulus by computing h + -p */ + g0 = (u32)(t = (u64)h0 + 5); + g1 = (u32)(t = (u64)h1 + (t >> 32)); + g2 = (u32)(t = (u64)h2 + (t >> 32)); + g3 = (u32)(t = (u64)h3 + (t >> 32)); + g4 = h4 + (u32)(t >> 32); + + /* if there was carry into 131st bit, h3:h0 = g3:g0 */ + mask = 0 - (g4 >> 2); + g0 &= mask; + g1 &= mask; + g2 &= mask; + g3 &= mask; + mask = ~mask; + h0 = (h0 & mask) | g0; + h1 = (h1 & mask) | g1; + h2 = (h2 & mask) | g2; + h3 = (h3 & mask) | g3; + + /* mac = (h + nonce) % (2^128) */ + h0 = (u32)(t = (u64)h0 + nonce[0]); + h1 = (u32)(t = (u64)h1 + (t >> 32) + nonce[1]); + h2 = (u32)(t = (u64)h2 + (t >> 32) + nonce[2]); + h3 = (u32)(t = (u64)h3 + (t >> 32) + nonce[3]); + + omac[0] = cpu_to_le32(h0); + omac[1] = cpu_to_le32(h1); + omac[2] = cpu_to_le32(h2); + omac[3] = cpu_to_le32(h3); } +#endif /* !CONFIG_X86_64 */ -static unsigned int poly1305_simd_blocks(struct poly1305_ctx *ctx, const u8 *src, unsigned int srclen) +void poly1305_init(struct poly1305_ctx *ctx, const u8 key[POLY1305_KEY_SIZE], bool have_simd) { - unsigned int blocks; + ctx->nonce[0] = le32_to_cpuvp(&key[16]); + ctx->nonce[1] = le32_to_cpuvp(&key[20]); + ctx->nonce[2] = le32_to_cpuvp(&key[24]); + ctx->nonce[3] = le32_to_cpuvp(&key[28]); +#ifdef CONFIG_X86_64 + poly1305_init_x86_64(ctx->opaque, key); + ctx->func.blocks = poly1305_blocks_x86_64; + ctx->func.emit = poly1305_emit_x86_64; +#ifdef CONFIG_AS_AVX512 + if(chacha20poly1305_use_avx512 && have_simd) { + ctx->func.blocks = poly1305_blocks_avx512; + ctx->func.emit = poly1305_emit_avx; + } else +#endif #ifdef CONFIG_AS_AVX2 - if (chacha20poly1305_use_avx2 && srclen >= POLY1305_BLOCK_SIZE * 4) { - if (unlikely(!ctx->wset)) { - if (!ctx->uset) { - memcpy(ctx->u, ctx->r, sizeof(ctx->u)); - poly1305_simd_mult(ctx->u, ctx->r); - ctx->uset = true; - } - memcpy(ctx->r3, ctx->u, sizeof(ctx->u)); - poly1305_simd_mult(ctx->r3, ctx->r); - memcpy(ctx->r4, ctx->r3, sizeof(ctx->u)); - poly1305_simd_mult(ctx->r4, ctx->r); - ctx->wset = true; - } - blocks = srclen / (POLY1305_BLOCK_SIZE * 4); - poly1305_asm_4block_avx2(ctx->h, src, ctx->r, blocks, ctx->u); - src += POLY1305_BLOCK_SIZE * 4 * blocks; - srclen -= POLY1305_BLOCK_SIZE * 4 * blocks; - } + if (chacha20poly1305_use_avx2 && have_simd) { + ctx->func.blocks = poly1305_blocks_avx2; + ctx->func.emit = poly1305_emit_avx; + } else #endif - if (likely(srclen >= POLY1305_BLOCK_SIZE * 2)) { - if (unlikely(!ctx->uset)) { - memcpy(ctx->u, ctx->r, sizeof(ctx->u)); - poly1305_simd_mult(ctx->u, ctx->r); - ctx->uset = true; - } - blocks = srclen / (POLY1305_BLOCK_SIZE * 2); - poly1305_asm_2block_sse2(ctx->h, src, ctx->r, blocks, ctx->u); - src += POLY1305_BLOCK_SIZE * 2 * blocks; - srclen -= POLY1305_BLOCK_SIZE * 2 * blocks; - } - if (srclen >= POLY1305_BLOCK_SIZE) { - poly1305_asm_block_sse2(ctx->h, src, ctx->r, 1); - srclen -= POLY1305_BLOCK_SIZE; +#ifdef CONFIG_AS_AVX + if (chacha20poly1305_use_avx && have_simd) { + ctx->func.blocks = poly1305_blocks_avx; + ctx->func.emit = poly1305_emit_avx; } - return srclen; -} #endif +#else + poly1305_init_generic(ctx->opaque, key); +#endif + ctx->num = 0; +} -static void poly1305_update(struct poly1305_ctx *ctx, const u8 *src, unsigned int srclen, bool have_simd) +void poly1305_update(struct poly1305_ctx *ctx, const u8 *inp, size_t len) { - unsigned int bytes; - - if (unlikely(ctx->buflen)) { - bytes = min(srclen, POLY1305_BLOCK_SIZE - ctx->buflen); - memcpy(ctx->buf + ctx->buflen, src, bytes); - src += bytes; - srclen -= bytes; - ctx->buflen += bytes; - - if (ctx->buflen == POLY1305_BLOCK_SIZE) { #ifdef CONFIG_X86_64 - if (have_simd && chacha20poly1305_use_sse2) - poly1305_simd_blocks(ctx, ctx->buf, POLY1305_BLOCK_SIZE); - else + const poly1305_blocks_f blocks = ctx->func.blocks; +#else + const poly1305_blocks_f blocks = poly1305_blocks_generic; #endif - poly1305_generic_blocks(ctx, ctx->buf, POLY1305_BLOCK_SIZE, 1U << 24); - ctx->buflen = 0; + + const size_t num = ctx->num; + size_t rem;; + + if (num) { + rem = POLY1305_BLOCK_SIZE - num; + if (len >= rem) { + memcpy(ctx->data + num, inp, rem); + blocks(ctx->opaque, ctx->data, POLY1305_BLOCK_SIZE, 1); + inp += rem; + len -= rem; + } else { + /* Still not enough data to process a block. */ + memcpy(ctx->data + num, inp, len); + ctx->num = num + len; + return; } } - if (likely(srclen >= POLY1305_BLOCK_SIZE)) { -#ifdef CONFIG_X86_64 - if (have_simd && chacha20poly1305_use_sse2) - bytes = poly1305_simd_blocks(ctx, src, srclen); - else -#endif - bytes = poly1305_generic_blocks(ctx, src, srclen, 1U << 24); - src += srclen - bytes; - srclen = bytes; - } + rem = len % POLY1305_BLOCK_SIZE; + len -= rem; - if (unlikely(srclen)) { - ctx->buflen = srclen; - memcpy(ctx->buf, src, srclen); + if (len >= POLY1305_BLOCK_SIZE) { + blocks(ctx->opaque, inp, len, 1); + inp += len; } + + if (rem) + memcpy(ctx->data, inp, rem); + + ctx->num = rem; } -static void poly1305_finish(struct poly1305_ctx *ctx, u8 *dst) +void poly1305_finish(struct poly1305_ctx * ctx, u8 mac[16]) { - __le32 *mac = (__le32 *)dst; - u32 h0, h1, h2, h3, h4; - u32 g0, g1, g2, g3, g4; - u32 mask; - u64 f = 0; +#ifdef CONFIG_X86_64 + poly1305_blocks_f blocks = ctx->func.blocks; + poly1305_emit_f emit = ctx->func.emit; +#else + poly1305_blocks_f blocks = poly1305_blocks_generic; + poly1305_emit_f emit = poly1305_emit_generic; +#endif + size_t num = ctx->num; - if (unlikely(ctx->buflen)) { - ctx->buf[ctx->buflen++] = 1; - memset(ctx->buf + ctx->buflen, 0, POLY1305_BLOCK_SIZE - ctx->buflen); - poly1305_generic_blocks(ctx, ctx->buf, POLY1305_BLOCK_SIZE, 0); + if (num) { + ctx->data[num++] = 1; /* pad bit */ + while (num < POLY1305_BLOCK_SIZE) + ctx->data[num++] = 0; + blocks(ctx->opaque, ctx->data, POLY1305_BLOCK_SIZE, 0); } - /* fully carry h */ - h0 = ctx->h[0]; - h1 = ctx->h[1]; - h2 = ctx->h[2]; - h3 = ctx->h[3]; - h4 = ctx->h[4]; - - h2 += (h1 >> 26); h1 = h1 & 0x3ffffff; - h3 += (h2 >> 26); h2 = h2 & 0x3ffffff; - h4 += (h3 >> 26); h3 = h3 & 0x3ffffff; - h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff; - h1 += (h0 >> 26); h0 = h0 & 0x3ffffff; - - /* compute h + -p */ - g0 = h0 + 5; - g1 = h1 + (g0 >> 26); g0 &= 0x3ffffff; - g2 = h2 + (g1 >> 26); g1 &= 0x3ffffff; - g3 = h3 + (g2 >> 26); g2 &= 0x3ffffff; - g4 = h4 + (g3 >> 26) - (1U << 26); g3 &= 0x3ffffff; - - /* select h if h < p, or h + -p if h >= p */ - mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1; - g0 &= mask; - g1 &= mask; - g2 &= mask; - g3 &= mask; - g4 &= mask; - mask = ~mask; - h0 = (h0 & mask) | g0; - h1 = (h1 & mask) | g1; - h2 = (h2 & mask) | g2; - h3 = (h3 & mask) | g3; - h4 = (h4 & mask) | g4; - - /* h = h % (2^128) */ - h0 = (h0 >> 0) | (h1 << 26); - h1 = (h1 >> 6) | (h2 << 20); - h2 = (h2 >> 12) | (h3 << 14); - h3 = (h3 >> 18) | (h4 << 8); - - /* mac = (h + s) % (2^128) */ - f = (f >> 32) + h0 + ctx->s[0]; mac[0] = cpu_to_le32(f); - f = (f >> 32) + h1 + ctx->s[1]; mac[1] = cpu_to_le32(f); - f = (f >> 32) + h2 + ctx->s[2]; mac[2] = cpu_to_le32(f); - f = (f >> 32) + h3 + ctx->s[3]; mac[3] = cpu_to_le32(f); + emit(ctx->opaque, mac, ctx->nonce); + + /* zero out the state */ + memzero_explicit(ctx, sizeof(*ctx)); } + static const u8 pad0[16] = { 0 }; static struct crypto_alg chacha20_alg = { @@ -636,22 +665,22 @@ static inline void __chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size chacha20_keysetup(&chacha20_state, key, (u8 *)&le_nonce); chacha20_crypt(&chacha20_state, block0, block0, sizeof(block0), have_simd); - poly1305_init(&poly1305_state, block0); + poly1305_init(&poly1305_state, block0, have_simd); memzero_explicit(block0, sizeof(block0)); - poly1305_update(&poly1305_state, ad, ad_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, ad, ad_len); + poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf); chacha20_crypt(&chacha20_state, dst, src, src_len, have_simd); - poly1305_update(&poly1305_state, dst, src_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - src_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, dst, src_len); + poly1305_update(&poly1305_state, pad0, (0x10 - src_len) & 0xf); len = cpu_to_le64(ad_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); len = cpu_to_le64(src_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); poly1305_finish(&poly1305_state, dst + src_len); @@ -687,11 +716,11 @@ bool chacha20poly1305_encrypt_sg(struct scatterlist *dst, struct scatterlist *sr chacha20_keysetup(&chacha20_state, key, (u8 *)&le_nonce); chacha20_crypt(&chacha20_state, block0, block0, sizeof(block0), have_simd); - poly1305_init(&poly1305_state, block0); + poly1305_init(&poly1305_state, block0, have_simd); memzero_explicit(block0, sizeof(block0)); - poly1305_update(&poly1305_state, ad, ad_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, ad, ad_len); + poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf); if (likely(src_len)) { blkcipher_walk_init(&walk, dst, src, src_len); @@ -700,25 +729,25 @@ bool chacha20poly1305_encrypt_sg(struct scatterlist *dst, struct scatterlist *sr size_t chunk_len = rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE); chacha20_crypt(&chacha20_state, walk.dst.virt.addr, walk.src.virt.addr, chunk_len, have_simd); - poly1305_update(&poly1305_state, walk.dst.virt.addr, chunk_len, have_simd); + poly1305_update(&poly1305_state, walk.dst.virt.addr, chunk_len); ret = blkcipher_walk_done(&chacha20_desc, &walk, walk.nbytes % CHACHA20_BLOCK_SIZE); } if (walk.nbytes) { chacha20_crypt(&chacha20_state, walk.dst.virt.addr, walk.src.virt.addr, walk.nbytes, have_simd); - poly1305_update(&poly1305_state, walk.dst.virt.addr, walk.nbytes, have_simd); + poly1305_update(&poly1305_state, walk.dst.virt.addr, walk.nbytes); ret = blkcipher_walk_done(&chacha20_desc, &walk, 0); } } if (unlikely(ret)) goto err; - poly1305_update(&poly1305_state, pad0, (0x10 - src_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, pad0, (0x10 - src_len) & 0xf); len = cpu_to_le64(ad_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); len = cpu_to_le64(src_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); poly1305_finish(&poly1305_state, mac); scatterwalk_map_and_copy(mac, dst, src_len, sizeof(mac), 1); @@ -749,21 +778,21 @@ static inline bool __chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size chacha20_keysetup(&chacha20_state, key, (u8 *)&le_nonce); chacha20_crypt(&chacha20_state, block0, block0, sizeof(block0), have_simd); - poly1305_init(&poly1305_state, block0); + poly1305_init(&poly1305_state, block0, have_simd); memzero_explicit(block0, sizeof(block0)); - poly1305_update(&poly1305_state, ad, ad_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, ad, ad_len); + poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf); dst_len = src_len - POLY1305_MAC_SIZE; - poly1305_update(&poly1305_state, src, dst_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - dst_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, src, dst_len); + poly1305_update(&poly1305_state, pad0, (0x10 - dst_len) & 0xf); len = cpu_to_le64(ad_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); len = cpu_to_le64(dst_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); poly1305_finish(&poly1305_state, mac); memzero_explicit(&poly1305_state, sizeof(poly1305_state)); @@ -811,11 +840,11 @@ bool chacha20poly1305_decrypt_sg(struct scatterlist *dst, struct scatterlist *sr chacha20_keysetup(&chacha20_state, key, (u8 *)&le_nonce); chacha20_crypt(&chacha20_state, block0, block0, sizeof(block0), have_simd); - poly1305_init(&poly1305_state, block0); + poly1305_init(&poly1305_state, block0, have_simd); memzero_explicit(block0, sizeof(block0)); - poly1305_update(&poly1305_state, ad, ad_len, have_simd); - poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, ad, ad_len); + poly1305_update(&poly1305_state, pad0, (0x10 - ad_len) & 0xf); dst_len = src_len - POLY1305_MAC_SIZE; if (likely(dst_len)) { @@ -824,12 +853,12 @@ bool chacha20poly1305_decrypt_sg(struct scatterlist *dst, struct scatterlist *sr while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { size_t chunk_len = rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE); - poly1305_update(&poly1305_state, walk.src.virt.addr, chunk_len, have_simd); + poly1305_update(&poly1305_state, walk.src.virt.addr, chunk_len); chacha20_crypt(&chacha20_state, walk.dst.virt.addr, walk.src.virt.addr, chunk_len, have_simd); ret = blkcipher_walk_done(&chacha20_desc, &walk, walk.nbytes % CHACHA20_BLOCK_SIZE); } if (walk.nbytes) { - poly1305_update(&poly1305_state, walk.src.virt.addr, walk.nbytes, have_simd); + poly1305_update(&poly1305_state, walk.src.virt.addr, walk.nbytes); chacha20_crypt(&chacha20_state, walk.dst.virt.addr, walk.src.virt.addr, walk.nbytes, have_simd); ret = blkcipher_walk_done(&chacha20_desc, &walk, 0); } @@ -837,13 +866,13 @@ bool chacha20poly1305_decrypt_sg(struct scatterlist *dst, struct scatterlist *sr if (unlikely(ret)) goto err; - poly1305_update(&poly1305_state, pad0, (0x10 - dst_len) & 0xf, have_simd); + poly1305_update(&poly1305_state, pad0, (0x10 - dst_len) & 0xf); len = cpu_to_le64(ad_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); len = cpu_to_le64(dst_len); - poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len), have_simd); + poly1305_update(&poly1305_state, (u8 *)&len, sizeof(len)); poly1305_finish(&poly1305_state, computed_mac); memzero_explicit(&poly1305_state, sizeof(poly1305_state)); diff --git a/src/crypto/curve25519-avx-x86_64.S b/src/crypto/curve25519-avx-x86_64.S deleted file mode 100644 index b4851e5..0000000 --- a/src/crypto/curve25519-avx-x86_64.S +++ /dev/null @@ -1,3257 +0,0 @@ -/* - * Copyright (C) 2017 Jason A. Donenfeld . All Rights Reserved. - * Based on algorithms from Tung Chou - */ - -#include - -.data -.align 16 -curve25519_sandy2x_v0_0: .quad 0, 0 -curve25519_sandy2x_v1_0: .quad 1, 0 -curve25519_sandy2x_v2_1: .quad 2, 1 -curve25519_sandy2x_v9_0: .quad 9, 0 -curve25519_sandy2x_v9_9: .quad 9, 9 -curve25519_sandy2x_v19_19: .quad 19, 19 -curve25519_sandy2x_v38_1: .quad 38, 1 -curve25519_sandy2x_v38_38: .quad 38, 38 -curve25519_sandy2x_v121666_121666: .quad 121666, 121666 -curve25519_sandy2x_m25: .quad 33554431, 33554431 -curve25519_sandy2x_m26: .quad 67108863, 67108863 -curve25519_sandy2x_subc0: .quad 0x07FFFFDA, 0x03FFFFFE -curve25519_sandy2x_subc2: .quad 0x07FFFFFE, 0x03FFFFFE -curve25519_sandy2x_REDMASK51: .quad 0x0007FFFFFFFFFFFF - -.text -.align 32 -ENTRY(curve25519_sandy2x_fe51_mul) - push %rbp - mov %rsp,%rbp - sub $96,%rsp - and $-32,%rsp - movq %r11,0(%rsp) - movq %r12,8(%rsp) - movq %r13,16(%rsp) - movq %r14,24(%rsp) - movq %r15,32(%rsp) - movq %rbx,40(%rsp) - movq %rbp,48(%rsp) - movq %rdi,56(%rsp) - mov %rdx,%rcx - movq 24(%rsi),%rdx - imulq $19,%rdx,%rax - movq %rax,64(%rsp) - mulq 16(%rcx) - mov %rax,%r8 - mov %rdx,%r9 - movq 32(%rsi),%rdx - imulq $19,%rdx,%rax - movq %rax,72(%rsp) - mulq 8(%rcx) - add %rax,%r8 - adc %rdx,%r9 - movq 0(%rsi),%rax - mulq 0(%rcx) - add %rax,%r8 - adc %rdx,%r9 - movq 0(%rsi),%rax - mulq 8(%rcx) - mov %rax,%r10 - mov %rdx,%r11 - movq 0(%rsi),%rax - mulq 16(%rcx) - mov %rax,%r12 - mov %rdx,%r13 - movq 0(%rsi),%rax - mulq 24(%rcx) - mov %rax,%r14 - mov %rdx,%r15 - movq 0(%rsi),%rax - mulq 32(%rcx) - mov %rax,%rbx - mov %rdx,%rbp - movq 8(%rsi),%rax - mulq 0(%rcx) - add %rax,%r10 - adc %rdx,%r11 - movq 8(%rsi),%rax - mulq 8(%rcx) - add %rax,%r12 - adc %rdx,%r13 - movq 8(%rsi),%rax - mulq 16(%rcx) - add %rax,%r14 - adc %rdx,%r15 - movq 8(%rsi),%rax - mulq 24(%rcx) - add %rax,%rbx - adc %rdx,%rbp - movq 8(%rsi),%rdx - imulq $19,%rdx,%rax - mulq 32(%rcx) - add %rax,%r8 - adc %rdx,%r9 - movq 16(%rsi),%rax - mulq 0(%rcx) - add %rax,%r12 - adc %rdx,%r13 - movq 16(%rsi),%rax - mulq 8(%rcx) - add %rax,%r14 - adc %rdx,%r15 - movq 16(%rsi),%rax - mulq 16(%rcx) - add %rax,%rbx - adc %rdx,%rbp - movq 16(%rsi),%rdx - imulq $19,%rdx,%rax - mulq 24(%rcx) - add %rax,%r8 - adc %rdx,%r9 - movq 16(%rsi),%rdx - imulq $19,%rdx,%rax - mulq 32(%rcx) - add %rax,%r10 - adc %rdx,%r11 - movq 24(%rsi),%rax - mulq 0(%rcx) - add %rax,%r14 - adc %rdx,%r15 - movq 24(%rsi),%rax - mulq 8(%rcx) - add %rax,%rbx - adc %rdx,%rbp - movq 64(%rsp),%rax - mulq 24(%rcx) - add %rax,%r10 - adc %rdx,%r11 - movq 64(%rsp),%rax - mulq 32(%rcx) - add %rax,%r12 - adc %rdx,%r13 - movq 32(%rsi),%rax - mulq 0(%rcx) - add %rax,%rbx - adc %rdx,%rbp - movq 72(%rsp),%rax - mulq 16(%rcx) - add %rax,%r10 - adc %rdx,%r11 - movq 72(%rsp),%rax - mulq 24(%rcx) - add %rax,%r12 - adc %rdx,%r13 - movq 72(%rsp),%rax - mulq 32(%rcx) - add %rax,%r14 - adc %rdx,%r15 - movq curve25519_sandy2x_REDMASK51(%rip),%rsi - shld $13,%r8,%r9 - and %rsi,%r8 - shld $13,%r10,%r11 - and %rsi,%r10 - add %r9,%r10 - shld $13,%r12,%r13 - and %rsi,%r12 - add %r11,%r12 - shld $13,%r14,%r15 - and %rsi,%r14 - add %r13,%r14 - shld $13,%rbx,%rbp - and %rsi,%rbx - add %r15,%rbx - imulq $19,%rbp,%rdx - add %rdx,%r8 - mov %r8,%rdx - shr $51,%rdx - add %r10,%rdx - mov %rdx,%rcx - shr $51,%rdx - and %rsi,%r8 - add %r12,%rdx - mov %rdx,%r9 - shr $51,%rdx - and %rsi,%rcx - add %r14,%rdx - mov %rdx,%rax - shr $51,%rdx - and %rsi,%r9 - add %rbx,%rdx - mov %rdx,%r10 - shr $51,%rdx - and %rsi,%rax - imulq $19,%rdx,%rdx - add %rdx,%r8 - and %rsi,%r10 - movq %r8,0(%rdi) - movq %rcx,8(%rdi) - movq %r9,16(%rdi) - movq %rax,24(%rdi) - movq %r10,32(%rdi) - movq 0(%rsp),%r11 - movq 8(%rsp),%r12 - movq 16(%rsp),%r13 - movq 24(%rsp),%r14 - movq 32(%rsp),%r15 - movq 40(%rsp),%rbx - movq 48(%rsp),%rbp - leave - ret -ENDPROC(curve25519_sandy2x_fe51_mul) - -.align 32 -ENTRY(curve25519_sandy2x_fe51_nsquare) - push %rbp - mov %rsp,%rbp - sub $64,%rsp - and $-32,%rsp - movq %r11,0(%rsp) - movq %r12,8(%rsp) - movq %r13,16(%rsp) - movq %r14,24(%rsp) - movq %r15,32(%rsp) - movq %rbx,40(%rsp) - movq %rbp,48(%rsp) - movq 0(%rsi),%rcx - movq 8(%rsi),%r8 - movq 16(%rsi),%r9 - movq 24(%rsi),%rax - movq 32(%rsi),%rsi - movq %r9,16(%rdi) - movq %rax,24(%rdi) - movq %rsi,32(%rdi) - mov %rdx,%rsi - - .align 16 - .Lloop: - sub $1,%rsi - mov %rcx,%rax - mul %rcx - add %rcx,%rcx - mov %rax,%r9 - mov %rdx,%r10 - mov %rcx,%rax - mul %r8 - mov %rax,%r11 - mov %rdx,%r12 - mov %rcx,%rax - mulq 16(%rdi) - mov %rax,%r13 - mov %rdx,%r14 - mov %rcx,%rax - mulq 24(%rdi) - mov %rax,%r15 - mov %rdx,%rbx - mov %rcx,%rax - mulq 32(%rdi) - mov %rax,%rcx - mov %rdx,%rbp - mov %r8,%rax - mul %r8 - add %r8,%r8 - add %rax,%r13 - adc %rdx,%r14 - mov %r8,%rax - mulq 16(%rdi) - add %rax,%r15 - adc %rdx,%rbx - mov %r8,%rax - imulq $19, %r8,%r8 - mulq 24(%rdi) - add %rax,%rcx - adc %rdx,%rbp - mov %r8,%rax - mulq 32(%rdi) - add %rax,%r9 - adc %rdx,%r10 - movq 16(%rdi),%rax - mulq 16(%rdi) - add %rax,%rcx - adc %rdx,%rbp - shld $13,%rcx,%rbp - movq 16(%rdi),%rax - imulq $38, %rax,%rax - mulq 24(%rdi) - add %rax,%r9 - adc %rdx,%r10 - shld $13,%r9,%r10 - movq 16(%rdi),%rax - imulq $38, %rax,%rax - mulq 32(%rdi) - add %rax,%r11 - adc %rdx,%r12 - movq 24(%rdi),%rax - imulq $19, %rax,%rax - mulq 24(%rdi) - add %rax,%r11 - adc %rdx,%r12 - shld $13,%r11,%r12 - movq 24(%rdi),%rax - imulq $38, %rax,%rax - mulq 32(%rdi) - add %rax,%r13 - adc %rdx,%r14 - shld $13,%r13,%r14 - movq 32(%rdi),%rax - imulq $19, %rax,%rax - mulq 32(%rdi) - add %rax,%r15 - adc %rdx,%rbx - shld $13,%r15,%rbx - movq curve25519_sandy2x_REDMASK51(%rip),%rdx - and %rdx,%rcx - add %rbx,%rcx - and %rdx,%r9 - and %rdx,%r11 - add %r10,%r11 - and %rdx,%r13 - add %r12,%r13 - and %rdx,%r15 - add %r14,%r15 - imulq $19, %rbp,%rbp - lea (%r9,%rbp),%r9 - mov %r9,%rax - shr $51,%r9 - add %r11,%r9 - and %rdx,%rax - mov %r9,%r8 - shr $51,%r9 - add %r13,%r9 - and %rdx,%r8 - mov %r9,%r10 - shr $51,%r9 - add %r15,%r9 - and %rdx,%r10 - movq %r10,16(%rdi) - mov %r9,%r10 - shr $51,%r9 - add %rcx,%r9 - and %rdx,%r10 - movq %r10,24(%rdi) - mov %r9,%r10 - shr $51,%r9 - imulq $19, %r9,%r9 - lea (%rax,%r9),%rcx - and %rdx,%r10 - movq %r10,32(%rdi) - cmp $0,%rsi - jne .Lloop - - movq %rcx,0(%rdi) - movq %r8,8(%rdi) - movq 0(%rsp),%r11 - movq 8(%rsp),%r12 - movq 16(%rsp),%r13 - movq 24(%rsp),%r14 - movq 32(%rsp),%r15 - movq 40(%rsp),%rbx - movq 48(%rsp),%rbp - leave - ret -ENDPROC(curve25519_sandy2x_fe51_nsquare) - -.align 32 -ENTRY(curve25519_sandy2x_fe51_pack) - push %rbp - mov %rsp,%rbp - sub $32,%rsp - and $-32,%rsp - movq %r11,0(%rsp) - movq %r12,8(%rsp) - movq 0(%rsi),%rdx - movq 8(%rsi),%rcx - movq 16(%rsi),%r8 - movq 24(%rsi),%r9 - movq 32(%rsi),%rsi - movq curve25519_sandy2x_REDMASK51(%rip),%rax - lea -18(%rax),%r10 - mov $3,%r11 - - .align 16 - .Lreduceloop: - mov %rdx,%r12 - shr $51,%r12 - and %rax,%rdx - add %r12,%rcx - mov %rcx,%r12 - shr $51,%r12 - and %rax,%rcx - add %r12,%r8 - mov %r8,%r12 - shr $51,%r12 - and %rax,%r8 - add %r12,%r9 - mov %r9,%r12 - shr $51,%r12 - and %rax,%r9 - add %r12,%rsi - mov %rsi,%r12 - shr $51,%r12 - and %rax,%rsi - imulq $19, %r12,%r12 - add %r12,%rdx - sub $1,%r11 - ja .Lreduceloop - - mov $1,%r12 - cmp %r10,%rdx - cmovl %r11,%r12 - cmp %rax,%rcx - cmovne %r11,%r12 - cmp %rax,%r8 - cmovne %r11,%r12 - cmp %rax,%r9 - cmovne %r11,%r12 - cmp %rax,%rsi - cmovne %r11,%r12 - neg %r12 - and %r12,%rax - and %r12,%r10 - sub %r10,%rdx - sub %rax,%rcx - sub %rax,%r8 - sub %rax,%r9 - sub %rax,%rsi - mov %rdx,%rax - and $0xFF,%eax - movb %al,0(%rdi) - mov %rdx,%rax - shr $8,%rax - and $0xFF,%eax - movb %al,1(%rdi) - mov %rdx,%rax - shr $16,%rax - and $0xFF,%eax - movb %al,2(%rdi) - mov %rdx,%rax - shr $24,%rax - and $0xFF,%eax - movb %al,3(%rdi) - mov %rdx,%rax - shr $32,%rax - and $0xFF,%eax - movb %al,4(%rdi) - mov %rdx,%rax - shr $40,%rax - and $0xFF,%eax - movb %al,5(%rdi) - mov %rdx,%rdx - shr $48,%rdx - mov %rcx,%rax - shl $3,%rax - and $0xF8,%eax - xor %rdx,%rax - movb %al,6(%rdi) - mov %rcx,%rdx - shr $5,%rdx - and $0xFF,%edx - movb %dl,7(%rdi) - mov %rcx,%rdx - shr $13,%rdx - and $0xFF,%edx - movb %dl,8(%rdi) - mov %rcx,%rdx - shr $21,%rdx - and $0xFF,%edx - movb %dl,9(%rdi) - mov %rcx,%rdx - shr $29,%rdx - and $0xFF,%edx - movb %dl,10(%rdi) - mov %rcx,%rdx - shr $37,%rdx - and $0xFF,%edx - movb %dl,11(%rdi) - mov %rcx,%rdx - shr $45,%rdx - mov %r8,%rcx - shl $6,%rcx - and $0xC0,%ecx - xor %rdx,%rcx - movb %cl,12(%rdi) - mov %r8,%rdx - shr $2,%rdx - and $0xFF,%edx - movb %dl,13(%rdi) - mov %r8,%rdx - shr $10,%rdx - and $0xFF,%edx - movb %dl,14(%rdi) - mov %r8,%rdx - shr $18,%rdx - and $0xFF,%edx - movb %dl,15(%rdi) - mov %r8,%rdx - shr $26,%rdx - and $0xFF,%edx - movb %dl,16(%rdi) - mov %r8,%rdx - shr $34,%rdx - and $0xFF,%edx - movb %dl,17(%rdi) - mov %r8,%rdx - shr $42,%rdx - movb %dl,18(%rdi) - mov %r8,%rdx - shr $50,%rdx - mov %r9,%rcx - shl $1,%rcx - and $0xFE,%ecx - xor %rdx,%rcx - movb %cl,19(%rdi) - mov %r9,%rdx - shr $7,%rdx - and $0xFF,%edx - movb %dl,20(%rdi) - mov %r9,%rdx - shr $15,%rdx - and $0xFF,%edx - movb %dl,21(%rdi) - mov %r9,%rdx - shr $23,%rdx - and $0xFF,%edx - movb %dl,22(%rdi) - mov %r9,%rdx - shr $31,%rdx - and $0xFF,%edx - movb %dl,23(%rdi) - mov %r9,%rdx - shr $39,%rdx - and $0xFF,%edx - movb %dl,24(%rdi) - mov %r9,%rdx - shr $47,%rdx - mov %rsi,%rcx - shl $4,%rcx - and $0xF0,%ecx - xor %rdx,%rcx - movb %cl,25(%rdi) - mov %rsi,%rdx - shr $4,%rdx - and $0xFF,%edx - movb %dl,26(%rdi) - mov %rsi,%rdx - shr $12,%rdx - and $0xFF,%edx - movb %dl,27(%rdi) - mov %rsi,%rdx - shr $20,%rdx - and $0xFF,%edx - movb %dl,28(%rdi) - mov %rsi,%rdx - shr $28,%rdx - and $0xFF,%edx - movb %dl,29(%rdi) - mov %rsi,%rdx - shr $36,%rdx - and $0xFF,%edx - movb %dl,30(%rdi) - mov %rsi,%rsi - shr $44,%rsi - movb %sil,31(%rdi) - movq 0(%rsp),%r11 - movq 8(%rsp),%r12 - leave - ret -ENDPROC(curve25519_sandy2x_fe51_pack) - -.align 32 -ENTRY(curve25519_sandy2x_ladder) - push %rbp - mov %rsp,%rbp - sub $1856,%rsp - and $-32,%rsp - movq %r11,1824(%rsp) - movq %r12,1832(%rsp) - movq %r13,1840(%rsp) - movq %r14,1848(%rsp) - vmovdqa curve25519_sandy2x_v0_0(%rip),%xmm0 - vmovdqa curve25519_sandy2x_v1_0(%rip),%xmm1 - vmovdqu 0(%rdi),%xmm2 - vmovdqa %xmm2,0(%rsp) - vmovdqu 16(%rdi),%xmm2 - vmovdqa %xmm2,16(%rsp) - vmovdqu 32(%rdi),%xmm2 - vmovdqa %xmm2,32(%rsp) - vmovdqu 48(%rdi),%xmm2 - vmovdqa %xmm2,48(%rsp) - vmovdqu 64(%rdi),%xmm2 - vmovdqa %xmm2,64(%rsp) - vmovdqa %xmm1,80(%rsp) - vmovdqa %xmm0,96(%rsp) - vmovdqa %xmm0,112(%rsp) - vmovdqa %xmm0,128(%rsp) - vmovdqa %xmm0,144(%rsp) - vmovdqa %xmm1,%xmm0 - vpxor %xmm1,%xmm1,%xmm1 - vpxor %xmm2,%xmm2,%xmm2 - vpxor %xmm3,%xmm3,%xmm3 - vpxor %xmm4,%xmm4,%xmm4 - vpxor %xmm5,%xmm5,%xmm5 - vpxor %xmm6,%xmm6,%xmm6 - vpxor %xmm7,%xmm7,%xmm7 - vpxor %xmm8,%xmm8,%xmm8 - vpxor %xmm9,%xmm9,%xmm9 - vmovdqu 0(%rdi),%xmm10 - vmovdqa %xmm10,160(%rsp) - vmovdqu 16(%rdi),%xmm10 - vmovdqa %xmm10,176(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,192(%rsp) - vmovdqu 32(%rdi),%xmm10 - vmovdqa %xmm10,208(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,224(%rsp) - vmovdqu 48(%rdi),%xmm10 - vmovdqa %xmm10,240(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,256(%rsp) - vmovdqu 64(%rdi),%xmm10 - vmovdqa %xmm10,272(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,288(%rsp) - vmovdqu 8(%rdi),%xmm10 - vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,304(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,320(%rsp) - vmovdqu 24(%rdi),%xmm10 - vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,336(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,352(%rsp) - vmovdqu 40(%rdi),%xmm10 - vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,368(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,384(%rsp) - vmovdqu 56(%rdi),%xmm10 - vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,400(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,416(%rsp) - vmovdqu 0(%rdi),%xmm10 - vmovdqu 64(%rdi),%xmm11 - vblendps $12, %xmm11, %xmm10, %xmm10 - vpshufd $2,%xmm10,%xmm10 - vpmuludq curve25519_sandy2x_v38_1(%rip),%xmm10,%xmm10 - vmovdqa %xmm10,432(%rsp) - movq 0(%rsi),%rdx - movq 8(%rsi),%rcx - movq 16(%rsi),%r8 - movq 24(%rsi),%r9 - shrd $1,%rcx,%rdx - shrd $1,%r8,%rcx - shrd $1,%r9,%r8 - shr $1,%r9 - xorq 0(%rsi),%rdx - xorq 8(%rsi),%rcx - xorq 16(%rsi),%r8 - xorq 24(%rsi),%r9 - leaq 800(%rsp),%rsi - mov $64,%rax - - .align 16 - .Lladder_small_loop: - mov %rdx,%r10 - mov %rcx,%r11 - mov %r8,%r12 - mov %r9,%r13 - shr $1,%rdx - shr $1,%rcx - shr $1,%r8 - shr $1,%r9 - and $1,%r10d - and $1,%r11d - and $1,%r12d - and $1,%r13d - neg %r10 - neg %r11 - neg %r12 - neg %r13 - movl %r10d,0(%rsi) - movl %r11d,256(%rsi) - movl %r12d,512(%rsi) - movl %r13d,768(%rsi) - add $4,%rsi - sub $1,%rax - jne .Lladder_small_loop - mov $255,%rdx - add $760,%rsi - - .align 16 - .Lladder_loop: - sub $1,%rdx - vbroadcastss 0(%rsi),%xmm10 - sub $4,%rsi - vmovdqa 0(%rsp),%xmm11 - vmovdqa 80(%rsp),%xmm12 - vpxor %xmm11,%xmm0,%xmm13 - vpand %xmm10,%xmm13,%xmm13 - vpxor %xmm13,%xmm0,%xmm0 - vpxor %xmm13,%xmm11,%xmm11 - vpxor %xmm12,%xmm1,%xmm13 - vpand %xmm10,%xmm13,%xmm13 - vpxor %xmm13,%xmm1,%xmm1 - vpxor %xmm13,%xmm12,%xmm12 - vmovdqa 16(%rsp),%xmm13 - vmovdqa 96(%rsp),%xmm14 - vpxor %xmm13,%xmm2,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm2,%xmm2 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm3,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm3,%xmm3 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,0(%rsp) - vmovdqa %xmm14,16(%rsp) - vmovdqa 32(%rsp),%xmm13 - vmovdqa 112(%rsp),%xmm14 - vpxor %xmm13,%xmm4,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm4,%xmm4 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm5,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm5,%xmm5 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,32(%rsp) - vmovdqa %xmm14,80(%rsp) - vmovdqa 48(%rsp),%xmm13 - vmovdqa 128(%rsp),%xmm14 - vpxor %xmm13,%xmm6,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm6,%xmm6 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm7,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm7,%xmm7 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,48(%rsp) - vmovdqa %xmm14,96(%rsp) - vmovdqa 64(%rsp),%xmm13 - vmovdqa 144(%rsp),%xmm14 - vpxor %xmm13,%xmm8,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm8,%xmm8 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm9,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm9,%xmm9 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,64(%rsp) - vmovdqa %xmm14,112(%rsp) - vpaddq curve25519_sandy2x_subc0(%rip),%xmm11,%xmm10 - vpsubq %xmm12,%xmm10,%xmm10 - vpaddq %xmm12,%xmm11,%xmm11 - vpunpckhqdq %xmm10,%xmm11,%xmm12 - vpunpcklqdq %xmm10,%xmm11,%xmm10 - vpaddq %xmm1,%xmm0,%xmm11 - vpaddq curve25519_sandy2x_subc0(%rip),%xmm0,%xmm0 - vpsubq %xmm1,%xmm0,%xmm0 - vpunpckhqdq %xmm11,%xmm0,%xmm1 - vpunpcklqdq %xmm11,%xmm0,%xmm0 - vpmuludq %xmm0,%xmm10,%xmm11 - vpmuludq %xmm1,%xmm10,%xmm13 - vmovdqa %xmm1,128(%rsp) - vpaddq %xmm1,%xmm1,%xmm1 - vpmuludq %xmm0,%xmm12,%xmm14 - vmovdqa %xmm0,144(%rsp) - vpaddq %xmm14,%xmm13,%xmm13 - vpmuludq %xmm1,%xmm12,%xmm0 - vmovdqa %xmm1,448(%rsp) - vpaddq %xmm3,%xmm2,%xmm1 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm2,%xmm2 - vpsubq %xmm3,%xmm2,%xmm2 - vpunpckhqdq %xmm1,%xmm2,%xmm3 - vpunpcklqdq %xmm1,%xmm2,%xmm1 - vpmuludq %xmm1,%xmm10,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq %xmm3,%xmm10,%xmm2 - vmovdqa %xmm3,464(%rsp) - vpaddq %xmm3,%xmm3,%xmm3 - vpmuludq %xmm1,%xmm12,%xmm14 - vmovdqa %xmm1,480(%rsp) - vpaddq %xmm14,%xmm2,%xmm2 - vpmuludq %xmm3,%xmm12,%xmm1 - vmovdqa %xmm3,496(%rsp) - vpaddq %xmm5,%xmm4,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm4,%xmm4 - vpsubq %xmm5,%xmm4,%xmm4 - vpunpckhqdq %xmm3,%xmm4,%xmm5 - vpunpcklqdq %xmm3,%xmm4,%xmm3 - vpmuludq %xmm3,%xmm10,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq %xmm5,%xmm10,%xmm4 - vmovdqa %xmm5,512(%rsp) - vpaddq %xmm5,%xmm5,%xmm5 - vpmuludq %xmm3,%xmm12,%xmm14 - vmovdqa %xmm3,528(%rsp) - vpaddq %xmm14,%xmm4,%xmm4 - vpaddq %xmm7,%xmm6,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm6,%xmm6 - vpsubq %xmm7,%xmm6,%xmm6 - vpunpckhqdq %xmm3,%xmm6,%xmm7 - vpunpcklqdq %xmm3,%xmm6,%xmm3 - vpmuludq %xmm3,%xmm10,%xmm6 - vpmuludq %xmm5,%xmm12,%xmm14 - vmovdqa %xmm5,544(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm5,%xmm5 - vmovdqa %xmm5,560(%rsp) - vpaddq %xmm14,%xmm6,%xmm6 - vpmuludq %xmm7,%xmm10,%xmm5 - vmovdqa %xmm7,576(%rsp) - vpaddq %xmm7,%xmm7,%xmm7 - vpmuludq %xmm3,%xmm12,%xmm14 - vmovdqa %xmm3,592(%rsp) - vpaddq %xmm14,%xmm5,%xmm5 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vmovdqa %xmm3,608(%rsp) - vpaddq %xmm9,%xmm8,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm8,%xmm8 - vpsubq %xmm9,%xmm8,%xmm8 - vpunpckhqdq %xmm3,%xmm8,%xmm9 - vpunpcklqdq %xmm3,%xmm8,%xmm3 - vmovdqa %xmm3,624(%rsp) - vpmuludq %xmm7,%xmm12,%xmm8 - vmovdqa %xmm7,640(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm7,%xmm7 - vmovdqa %xmm7,656(%rsp) - vpmuludq %xmm3,%xmm10,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq %xmm9,%xmm10,%xmm7 - vmovdqa %xmm9,672(%rsp) - vpaddq %xmm9,%xmm9,%xmm9 - vpmuludq %xmm3,%xmm12,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vmovdqa %xmm3,688(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm12,%xmm12 - vpmuludq %xmm9,%xmm12,%xmm3 - vmovdqa %xmm9,704(%rsp) - vpaddq %xmm3,%xmm11,%xmm11 - vmovdqa 0(%rsp),%xmm3 - vmovdqa 16(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 480(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 464(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 528(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 512(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 592(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 576(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 624(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 672(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 448(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 480(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 496(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 528(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 544(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 592(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 640(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 624(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 704(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm0,%xmm0 - vmovdqa 32(%rsp),%xmm3 - vmovdqa 80(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 480(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 464(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 528(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 512(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 592(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 576(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 624(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 672(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 448(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 480(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 496(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 528(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 544(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 592(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 640(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 624(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 704(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm1,%xmm1 - vmovdqa 48(%rsp),%xmm3 - vmovdqa 96(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 480(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 464(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 528(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 512(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 592(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 576(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 624(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 672(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 448(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 480(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 496(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 528(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 544(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 592(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 640(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 624(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 704(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm6,%xmm6 - vmovdqa 64(%rsp),%xmm3 - vmovdqa 112(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 480(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 464(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 528(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 512(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 592(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 576(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 624(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 672(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 448(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 480(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 496(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 528(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 544(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 592(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 640(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 624(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 704(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm8,%xmm8 - vpsrlq $25,%xmm4,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 - vpsrlq $26,%xmm11,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm6,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm13,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpand curve25519_sandy2x_m25(%rip),%xmm13,%xmm13 - vpsrlq $25,%xmm5,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm0,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpand curve25519_sandy2x_m26(%rip),%xmm0,%xmm0 - vpsrlq $26,%xmm8,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $25,%xmm2,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm2,%xmm2 - vpsrlq $25,%xmm7,%xmm3 - vpsllq $4,%xmm3,%xmm9 - vpaddq %xmm3,%xmm11,%xmm11 - vpsllq $1,%xmm3,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpaddq %xmm9,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $26,%xmm1,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $26,%xmm11,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $25,%xmm4,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 - vpunpcklqdq %xmm13,%xmm11,%xmm3 - vpunpckhqdq %xmm13,%xmm11,%xmm9 - vpaddq curve25519_sandy2x_subc0(%rip),%xmm9,%xmm10 - vpsubq %xmm3,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm3,%xmm10,%xmm9 - vpunpcklqdq %xmm3,%xmm10,%xmm10 - vpmuludq %xmm10,%xmm10,%xmm3 - vpaddq %xmm10,%xmm10,%xmm10 - vpmuludq %xmm9,%xmm10,%xmm11 - vpunpcklqdq %xmm2,%xmm0,%xmm12 - vpunpckhqdq %xmm2,%xmm0,%xmm0 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm0,%xmm2 - vpsubq %xmm12,%xmm2,%xmm2 - vpaddq %xmm0,%xmm12,%xmm12 - vpunpckhqdq %xmm12,%xmm2,%xmm0 - vpunpcklqdq %xmm12,%xmm2,%xmm2 - vpmuludq %xmm2,%xmm10,%xmm12 - vpaddq %xmm9,%xmm9,%xmm13 - vpmuludq %xmm13,%xmm9,%xmm9 - vpaddq %xmm9,%xmm12,%xmm12 - vpmuludq %xmm0,%xmm10,%xmm9 - vpmuludq %xmm2,%xmm13,%xmm14 - vpaddq %xmm14,%xmm9,%xmm9 - vpunpcklqdq %xmm4,%xmm1,%xmm14 - vpunpckhqdq %xmm4,%xmm1,%xmm1 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm1,%xmm4 - vpsubq %xmm14,%xmm4,%xmm4 - vpaddq %xmm1,%xmm14,%xmm14 - vpunpckhqdq %xmm14,%xmm4,%xmm1 - vpunpcklqdq %xmm14,%xmm4,%xmm4 - vmovdqa %xmm1,0(%rsp) - vpaddq %xmm1,%xmm1,%xmm1 - vmovdqa %xmm1,16(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vmovdqa %xmm1,32(%rsp) - vpmuludq %xmm4,%xmm10,%xmm1 - vpmuludq %xmm2,%xmm2,%xmm14 - vpaddq %xmm14,%xmm1,%xmm1 - vpmuludq 0(%rsp),%xmm10,%xmm14 - vpmuludq %xmm4,%xmm13,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpunpcklqdq %xmm5,%xmm6,%xmm15 - vpunpckhqdq %xmm5,%xmm6,%xmm5 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm6 - vpsubq %xmm15,%xmm6,%xmm6 - vpaddq %xmm5,%xmm15,%xmm15 - vpunpckhqdq %xmm15,%xmm6,%xmm5 - vpunpcklqdq %xmm15,%xmm6,%xmm6 - vmovdqa %xmm6,48(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm6,%xmm6 - vmovdqa %xmm6,64(%rsp) - vmovdqa %xmm5,80(%rsp) - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm5,%xmm5 - vmovdqa %xmm5,96(%rsp) - vpmuludq 48(%rsp),%xmm10,%xmm5 - vpaddq %xmm0,%xmm0,%xmm6 - vpmuludq %xmm6,%xmm0,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpmuludq 80(%rsp),%xmm10,%xmm0 - vpmuludq %xmm4,%xmm6,%xmm15 - vpaddq %xmm15,%xmm0,%xmm0 - vpmuludq %xmm6,%xmm13,%xmm15 - vpaddq %xmm15,%xmm1,%xmm1 - vpmuludq %xmm6,%xmm2,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpunpcklqdq %xmm7,%xmm8,%xmm15 - vpunpckhqdq %xmm7,%xmm8,%xmm7 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm8 - vpsubq %xmm15,%xmm8,%xmm8 - vpaddq %xmm7,%xmm15,%xmm15 - vpunpckhqdq %xmm15,%xmm8,%xmm7 - vpunpcklqdq %xmm15,%xmm8,%xmm8 - vmovdqa %xmm8,112(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm8,%xmm8 - vmovdqa %xmm8,448(%rsp) - vpmuludq 112(%rsp),%xmm10,%xmm8 - vpmuludq %xmm7,%xmm10,%xmm10 - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm7,%xmm15 - vpmuludq %xmm15,%xmm7,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq %xmm15,%xmm13,%xmm7 - vpaddq %xmm7,%xmm3,%xmm3 - vpmuludq %xmm15,%xmm2,%xmm7 - vpaddq %xmm7,%xmm11,%xmm11 - vpmuludq 80(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm7,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq 16(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 48(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm0,%xmm0 - vpmuludq 112(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm10,%xmm10 - vpmuludq %xmm15,%xmm6,%xmm7 - vpaddq %xmm7,%xmm12,%xmm12 - vpmuludq %xmm15,%xmm4,%xmm7 - vpaddq %xmm7,%xmm9,%xmm9 - vpaddq %xmm2,%xmm2,%xmm2 - vpmuludq %xmm4,%xmm2,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 448(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm3,%xmm3 - vpmuludq 448(%rsp),%xmm6,%xmm7 - vpaddq %xmm7,%xmm11,%xmm11 - vpmuludq 0(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm0,%xmm0 - vpmuludq 48(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq 80(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 96(%rsp),%xmm4,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq %xmm4,%xmm4,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpaddq %xmm4,%xmm4,%xmm2 - vpmuludq 448(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vpmuludq 16(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq 48(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm14,%xmm14 - vpmuludq 96(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 448(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 16(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm8,%xmm8 - vpmuludq 48(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 80(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vpmuludq 112(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm0,%xmm0 - vmovdqa 48(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 448(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 80(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 448(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm14,%xmm14 - vpmuludq 64(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 64(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 96(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vmovdqa 48(%rsp),%xmm4 - vpmuludq 96(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 0(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vmovdqa 32(%rsp),%xmm2 - vpmuludq 0(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vmovdqa 64(%rsp),%xmm2 - vpmuludq 48(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vmovdqa 96(%rsp),%xmm2 - vpmuludq 80(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vmovdqa 448(%rsp),%xmm2 - vpmuludq 112(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpsrlq $26,%xmm3,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 - vpsrlq $25,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $25,%xmm11,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpand curve25519_sandy2x_m25(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm5,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm12,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpand curve25519_sandy2x_m26(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm0,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm9,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm9,%xmm9 - vpsrlq $26,%xmm8,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $25,%xmm10,%xmm2 - vpsllq $4,%xmm2,%xmm4 - vpaddq %xmm2,%xmm3,%xmm3 - vpsllq $1,%xmm2,%xmm2 - vpaddq %xmm2,%xmm4,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $26,%xmm3,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 - vpunpckhqdq %xmm11,%xmm3,%xmm2 - vmovdqa %xmm2,0(%rsp) - vpshufd $0,%xmm3,%xmm2 - vpshufd $0,%xmm11,%xmm3 - vpmuludq 160(%rsp),%xmm2,%xmm4 - vpmuludq 432(%rsp),%xmm3,%xmm6 - vpaddq %xmm6,%xmm4,%xmm4 - vpmuludq 176(%rsp),%xmm2,%xmm6 - vpmuludq 304(%rsp),%xmm3,%xmm7 - vpaddq %xmm7,%xmm6,%xmm6 - vpmuludq 208(%rsp),%xmm2,%xmm7 - vpmuludq 336(%rsp),%xmm3,%xmm11 - vpaddq %xmm11,%xmm7,%xmm7 - vpmuludq 240(%rsp),%xmm2,%xmm11 - vpmuludq 368(%rsp),%xmm3,%xmm13 - vpaddq %xmm13,%xmm11,%xmm11 - vpmuludq 272(%rsp),%xmm2,%xmm2 - vpmuludq 400(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpunpckhqdq %xmm9,%xmm12,%xmm3 - vmovdqa %xmm3,16(%rsp) - vpshufd $0,%xmm12,%xmm3 - vpshufd $0,%xmm9,%xmm9 - vpmuludq 288(%rsp),%xmm3,%xmm12 - vpaddq %xmm12,%xmm4,%xmm4 - vpmuludq 416(%rsp),%xmm9,%xmm12 - vpaddq %xmm12,%xmm4,%xmm4 - vpmuludq 160(%rsp),%xmm3,%xmm12 - vpaddq %xmm12,%xmm6,%xmm6 - vpmuludq 432(%rsp),%xmm9,%xmm12 - vpaddq %xmm12,%xmm6,%xmm6 - vpmuludq 176(%rsp),%xmm3,%xmm12 - vpaddq %xmm12,%xmm7,%xmm7 - vpmuludq 304(%rsp),%xmm9,%xmm12 - vpaddq %xmm12,%xmm7,%xmm7 - vpmuludq 208(%rsp),%xmm3,%xmm12 - vpaddq %xmm12,%xmm11,%xmm11 - vpmuludq 336(%rsp),%xmm9,%xmm12 - vpaddq %xmm12,%xmm11,%xmm11 - vpmuludq 240(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 368(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpunpckhqdq %xmm14,%xmm1,%xmm3 - vmovdqa %xmm3,32(%rsp) - vpshufd $0,%xmm1,%xmm1 - vpshufd $0,%xmm14,%xmm3 - vpmuludq 256(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm4,%xmm4 - vpmuludq 384(%rsp),%xmm3,%xmm9 - vpaddq %xmm9,%xmm4,%xmm4 - vpmuludq 288(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm6,%xmm6 - vpmuludq 416(%rsp),%xmm3,%xmm9 - vpaddq %xmm9,%xmm6,%xmm6 - vpmuludq 160(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm7,%xmm7 - vpmuludq 432(%rsp),%xmm3,%xmm9 - vpaddq %xmm9,%xmm7,%xmm7 - vpmuludq 176(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm11,%xmm11 - vpmuludq 304(%rsp),%xmm3,%xmm9 - vpaddq %xmm9,%xmm11,%xmm11 - vpmuludq 208(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm2,%xmm2 - vpmuludq 336(%rsp),%xmm3,%xmm1 - vpaddq %xmm1,%xmm2,%xmm2 - vpunpckhqdq %xmm0,%xmm5,%xmm1 - vmovdqa %xmm1,48(%rsp) - vpshufd $0,%xmm5,%xmm1 - vpshufd $0,%xmm0,%xmm0 - vpmuludq 224(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 352(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 256(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 384(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 288(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 416(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 160(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 432(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 176(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm2,%xmm2 - vpmuludq 304(%rsp),%xmm0,%xmm0 - vpaddq %xmm0,%xmm2,%xmm2 - vpunpckhqdq %xmm10,%xmm8,%xmm0 - vmovdqa %xmm0,64(%rsp) - vpshufd $0,%xmm8,%xmm0 - vpshufd $0,%xmm10,%xmm1 - vpmuludq 192(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 320(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 224(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 352(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 256(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 384(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 288(%rsp),%xmm0,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 416(%rsp),%xmm1,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 160(%rsp),%xmm0,%xmm0 - vpaddq %xmm0,%xmm2,%xmm2 - vpmuludq 432(%rsp),%xmm1,%xmm0 - vpaddq %xmm0,%xmm2,%xmm2 - vmovdqa %xmm4,80(%rsp) - vmovdqa %xmm6,96(%rsp) - vmovdqa %xmm7,112(%rsp) - vmovdqa %xmm11,448(%rsp) - vmovdqa %xmm2,496(%rsp) - vmovdqa 144(%rsp),%xmm0 - vpmuludq %xmm0,%xmm0,%xmm1 - vpaddq %xmm0,%xmm0,%xmm0 - vmovdqa 128(%rsp),%xmm2 - vpmuludq %xmm2,%xmm0,%xmm3 - vmovdqa 480(%rsp),%xmm4 - vpmuludq %xmm4,%xmm0,%xmm5 - vmovdqa 464(%rsp),%xmm6 - vpmuludq %xmm6,%xmm0,%xmm7 - vmovdqa 528(%rsp),%xmm8 - vpmuludq %xmm8,%xmm0,%xmm9 - vpmuludq 512(%rsp),%xmm0,%xmm10 - vpmuludq 592(%rsp),%xmm0,%xmm11 - vpmuludq 576(%rsp),%xmm0,%xmm12 - vpmuludq 624(%rsp),%xmm0,%xmm13 - vmovdqa 672(%rsp),%xmm14 - vpmuludq %xmm14,%xmm0,%xmm0 - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm14,%xmm15 - vpmuludq %xmm15,%xmm14,%xmm14 - vpaddq %xmm14,%xmm13,%xmm13 - vpaddq %xmm6,%xmm6,%xmm14 - vpmuludq %xmm14,%xmm6,%xmm6 - vpaddq %xmm6,%xmm11,%xmm11 - vpaddq %xmm2,%xmm2,%xmm6 - vpmuludq %xmm6,%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq %xmm15,%xmm6,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vpmuludq %xmm15,%xmm4,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpmuludq 544(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 592(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 640(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 624(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq %xmm4,%xmm6,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq %xmm14,%xmm6,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq %xmm8,%xmm6,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq %xmm15,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq %xmm15,%xmm8,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq %xmm4,%xmm4,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq %xmm14,%xmm4,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpaddq %xmm4,%xmm4,%xmm2 - vpmuludq %xmm8,%xmm2,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vpmuludq 688(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq 688(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vpmuludq 512(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vpmuludq 592(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm13,%xmm13 - vpmuludq 576(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq 656(%rsp),%xmm8,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpmuludq %xmm8,%xmm14,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq %xmm8,%xmm8,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpaddq %xmm8,%xmm8,%xmm2 - vpmuludq 688(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vpmuludq 544(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 592(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 656(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 544(%rsp),%xmm4 - vpmuludq 688(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm7,%xmm7 - vpmuludq 544(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm13,%xmm13 - vpmuludq 592(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm0,%xmm0 - vpmuludq 640(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vpmuludq 624(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vmovdqa 592(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 688(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 608(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 544(%rsp),%xmm4 - vpmuludq 608(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 544(%rsp),%xmm4 - vpmuludq 656(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vmovdqa 592(%rsp),%xmm4 - vpmuludq 656(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm7,%xmm7 - vmovdqa 640(%rsp),%xmm4 - vpmuludq 688(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 512(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vmovdqa 560(%rsp),%xmm2 - vpmuludq 512(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vmovdqa 608(%rsp),%xmm2 - vpmuludq 592(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vmovdqa 656(%rsp),%xmm2 - vpmuludq 576(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vmovdqa 688(%rsp),%xmm2 - vpmuludq 624(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $25,%xmm10,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm3,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 - vpsrlq $26,%xmm11,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm5,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 - vpsrlq $25,%xmm12,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm7,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $26,%xmm13,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 - vpsrlq $26,%xmm9,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $25,%xmm0,%xmm2 - vpsllq $4,%xmm2,%xmm4 - vpaddq %xmm2,%xmm1,%xmm1 - vpsllq $1,%xmm2,%xmm2 - vpaddq %xmm2,%xmm4,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm10,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpunpckhqdq %xmm3,%xmm1,%xmm2 - vpunpcklqdq %xmm3,%xmm1,%xmm1 - vmovdqa %xmm1,464(%rsp) - vpaddq curve25519_sandy2x_subc0(%rip),%xmm2,%xmm3 - vpsubq %xmm1,%xmm3,%xmm3 - vpunpckhqdq %xmm3,%xmm2,%xmm1 - vpunpcklqdq %xmm3,%xmm2,%xmm2 - vmovdqa %xmm2,480(%rsp) - vmovdqa %xmm1,512(%rsp) - vpsllq $1,%xmm1,%xmm1 - vmovdqa %xmm1,528(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm3,%xmm3 - vmovdqa 80(%rsp),%xmm1 - vpunpcklqdq %xmm1,%xmm3,%xmm2 - vpunpckhqdq %xmm1,%xmm3,%xmm1 - vpunpckhqdq %xmm7,%xmm5,%xmm3 - vpunpcklqdq %xmm7,%xmm5,%xmm4 - vmovdqa %xmm4,544(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm5 - vpsubq %xmm4,%xmm5,%xmm5 - vpunpckhqdq %xmm5,%xmm3,%xmm4 - vpunpcklqdq %xmm5,%xmm3,%xmm3 - vmovdqa %xmm3,560(%rsp) - vmovdqa %xmm4,576(%rsp) - vpsllq $1,%xmm4,%xmm4 - vmovdqa %xmm4,592(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm5,%xmm5 - vmovdqa 96(%rsp),%xmm3 - vpunpcklqdq %xmm3,%xmm5,%xmm4 - vpunpckhqdq %xmm3,%xmm5,%xmm3 - vpunpckhqdq %xmm10,%xmm9,%xmm5 - vpunpcklqdq %xmm10,%xmm9,%xmm6 - vmovdqa %xmm6,608(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm7 - vpsubq %xmm6,%xmm7,%xmm7 - vpunpckhqdq %xmm7,%xmm5,%xmm6 - vpunpcklqdq %xmm7,%xmm5,%xmm5 - vmovdqa %xmm5,624(%rsp) - vmovdqa %xmm6,640(%rsp) - vpsllq $1,%xmm6,%xmm6 - vmovdqa %xmm6,656(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm7,%xmm7 - vmovdqa 112(%rsp),%xmm5 - vpunpcklqdq %xmm5,%xmm7,%xmm6 - vpunpckhqdq %xmm5,%xmm7,%xmm5 - vpunpckhqdq %xmm12,%xmm11,%xmm7 - vpunpcklqdq %xmm12,%xmm11,%xmm8 - vmovdqa %xmm8,672(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm9 - vpsubq %xmm8,%xmm9,%xmm9 - vpunpckhqdq %xmm9,%xmm7,%xmm8 - vpunpcklqdq %xmm9,%xmm7,%xmm7 - vmovdqa %xmm7,688(%rsp) - vmovdqa %xmm8,704(%rsp) - vpsllq $1,%xmm8,%xmm8 - vmovdqa %xmm8,720(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm9,%xmm9 - vmovdqa 448(%rsp),%xmm7 - vpunpcklqdq %xmm7,%xmm9,%xmm8 - vpunpckhqdq %xmm7,%xmm9,%xmm7 - vpunpckhqdq %xmm0,%xmm13,%xmm9 - vpunpcklqdq %xmm0,%xmm13,%xmm0 - vmovdqa %xmm0,448(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm9,%xmm10 - vpsubq %xmm0,%xmm10,%xmm10 - vpunpckhqdq %xmm10,%xmm9,%xmm0 - vpunpcklqdq %xmm10,%xmm9,%xmm9 - vmovdqa %xmm9,736(%rsp) - vmovdqa %xmm0,752(%rsp) - vpsllq $1,%xmm0,%xmm0 - vmovdqa %xmm0,768(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm10,%xmm10 - vmovdqa 496(%rsp),%xmm0 - vpunpcklqdq %xmm0,%xmm10,%xmm9 - vpunpckhqdq %xmm0,%xmm10,%xmm0 - vpsrlq $26,%xmm2,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 - vpsrlq $25,%xmm5,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $25,%xmm1,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpand curve25519_sandy2x_m25(%rip),%xmm1,%xmm1 - vpsrlq $26,%xmm8,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm4,%xmm10 - vpaddq %xmm10,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm4,%xmm4 - vpsrlq $25,%xmm7,%xmm10 - vpaddq %xmm10,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $25,%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 - vpsrlq $26,%xmm9,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $26,%xmm6,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm0,%xmm10 - vpsllq $4,%xmm10,%xmm11 - vpaddq %xmm10,%xmm2,%xmm2 - vpsllq $1,%xmm10,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpaddq %xmm11,%xmm2,%xmm2 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm5,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm2,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 - vpunpckhqdq %xmm1,%xmm2,%xmm10 - vmovdqa %xmm10,80(%rsp) - vpunpcklqdq %xmm1,%xmm2,%xmm1 - vpunpckhqdq %xmm3,%xmm4,%xmm2 - vmovdqa %xmm2,96(%rsp) - vpunpcklqdq %xmm3,%xmm4,%xmm2 - vpunpckhqdq %xmm5,%xmm6,%xmm3 - vmovdqa %xmm3,112(%rsp) - vpunpcklqdq %xmm5,%xmm6,%xmm3 - vpunpckhqdq %xmm7,%xmm8,%xmm4 - vmovdqa %xmm4,128(%rsp) - vpunpcklqdq %xmm7,%xmm8,%xmm4 - vpunpckhqdq %xmm0,%xmm9,%xmm5 - vmovdqa %xmm5,144(%rsp) - vpunpcklqdq %xmm0,%xmm9,%xmm0 - vmovdqa 464(%rsp),%xmm5 - vpaddq %xmm5,%xmm1,%xmm1 - vpunpcklqdq %xmm1,%xmm5,%xmm6 - vpunpckhqdq %xmm1,%xmm5,%xmm1 - vpmuludq 512(%rsp),%xmm6,%xmm5 - vpmuludq 480(%rsp),%xmm1,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 560(%rsp),%xmm6,%xmm7 - vpmuludq 528(%rsp),%xmm1,%xmm8 - vpaddq %xmm8,%xmm7,%xmm7 - vpmuludq 576(%rsp),%xmm6,%xmm8 - vpmuludq 560(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm8,%xmm8 - vpmuludq 624(%rsp),%xmm6,%xmm9 - vpmuludq 592(%rsp),%xmm1,%xmm10 - vpaddq %xmm10,%xmm9,%xmm9 - vpmuludq 640(%rsp),%xmm6,%xmm10 - vpmuludq 624(%rsp),%xmm1,%xmm11 - vpaddq %xmm11,%xmm10,%xmm10 - vpmuludq 688(%rsp),%xmm6,%xmm11 - vpmuludq 656(%rsp),%xmm1,%xmm12 - vpaddq %xmm12,%xmm11,%xmm11 - vpmuludq 704(%rsp),%xmm6,%xmm12 - vpmuludq 688(%rsp),%xmm1,%xmm13 - vpaddq %xmm13,%xmm12,%xmm12 - vpmuludq 736(%rsp),%xmm6,%xmm13 - vpmuludq 720(%rsp),%xmm1,%xmm14 - vpaddq %xmm14,%xmm13,%xmm13 - vpmuludq 752(%rsp),%xmm6,%xmm14 - vpmuludq 736(%rsp),%xmm1,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpmuludq 480(%rsp),%xmm6,%xmm6 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 768(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vmovdqa 544(%rsp),%xmm1 - vpaddq %xmm1,%xmm2,%xmm2 - vpunpcklqdq %xmm2,%xmm1,%xmm15 - vpunpckhqdq %xmm2,%xmm1,%xmm1 - vpmuludq 480(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 512(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 560(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 576(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 624(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 640(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 688(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 704(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm15,%xmm15 - vpmuludq 736(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 752(%rsp),%xmm15,%xmm15 - vpaddq %xmm15,%xmm5,%xmm5 - vpmuludq 480(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 528(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 560(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 592(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 624(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 656(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 688(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 720(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 736(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 768(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vmovdqa 608(%rsp),%xmm1 - vpaddq %xmm1,%xmm3,%xmm3 - vpunpcklqdq %xmm3,%xmm1,%xmm2 - vpunpckhqdq %xmm3,%xmm1,%xmm1 - vpmuludq 480(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpmuludq 512(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm10,%xmm10 - vpmuludq 560(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 576(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm12,%xmm12 - vpmuludq 624(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 640(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 688(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 704(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 736(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 752(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 480(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 528(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 560(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 592(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 624(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 656(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 688(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 720(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 736(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 768(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vmovdqa 672(%rsp),%xmm1 - vpaddq %xmm1,%xmm4,%xmm4 - vpunpcklqdq %xmm4,%xmm1,%xmm2 - vpunpckhqdq %xmm4,%xmm1,%xmm1 - vpmuludq 480(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 512(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm12,%xmm12 - vpmuludq 560(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 576(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 624(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 640(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 688(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 704(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 736(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpmuludq 752(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 480(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 528(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 560(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 592(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 624(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 656(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 688(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 720(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 736(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 768(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vmovdqa 448(%rsp),%xmm1 - vpaddq %xmm1,%xmm0,%xmm0 - vpunpcklqdq %xmm0,%xmm1,%xmm2 - vpunpckhqdq %xmm0,%xmm1,%xmm0 - vpmuludq 480(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm13,%xmm13 - vpmuludq 512(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 560(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpmuludq 576(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm5,%xmm5 - vpmuludq 624(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vpmuludq 640(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm8,%xmm8 - vpmuludq 688(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vpmuludq 704(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm10,%xmm10 - vpmuludq 736(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vpmuludq 752(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 480(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm0,%xmm0 - vpmuludq 528(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpmuludq 560(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm5,%xmm5 - vpmuludq 592(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vpmuludq 624(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm8,%xmm8 - vpmuludq 656(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vpmuludq 688(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm10,%xmm10 - vpmuludq 720(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vpmuludq 736(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm12,%xmm12 - vpmuludq 768(%rsp),%xmm0,%xmm0 - vpaddq %xmm0,%xmm13,%xmm13 - vpsrlq $26,%xmm6,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm10,%xmm0 - vpaddq %xmm0,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm5,%xmm0 - vpaddq %xmm0,%xmm7,%xmm7 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm11,%xmm0 - vpaddq %xmm0,%xmm12,%xmm12 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm7,%xmm0 - vpaddq %xmm0,%xmm8,%xmm8 - vpand curve25519_sandy2x_m26(%rip),%xmm7,%xmm7 - vpsrlq $25,%xmm12,%xmm0 - vpaddq %xmm0,%xmm13,%xmm13 - vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm8,%xmm0 - vpaddq %xmm0,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm13,%xmm0 - vpaddq %xmm0,%xmm14,%xmm14 - vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 - vpsrlq $26,%xmm9,%xmm0 - vpaddq %xmm0,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $25,%xmm14,%xmm0 - vpsllq $4,%xmm0,%xmm1 - vpaddq %xmm0,%xmm6,%xmm6 - vpsllq $1,%xmm0,%xmm0 - vpaddq %xmm0,%xmm1,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $25,%xmm10,%xmm0 - vpaddq %xmm0,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $26,%xmm6,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpunpckhqdq %xmm5,%xmm6,%xmm1 - vpunpcklqdq %xmm5,%xmm6,%xmm0 - vpunpckhqdq %xmm8,%xmm7,%xmm3 - vpunpcklqdq %xmm8,%xmm7,%xmm2 - vpunpckhqdq %xmm10,%xmm9,%xmm5 - vpunpcklqdq %xmm10,%xmm9,%xmm4 - vpunpckhqdq %xmm12,%xmm11,%xmm7 - vpunpcklqdq %xmm12,%xmm11,%xmm6 - vpunpckhqdq %xmm14,%xmm13,%xmm9 - vpunpcklqdq %xmm14,%xmm13,%xmm8 - cmp $0,%rdx - jne .Lladder_loop - vmovdqu %xmm1,160(%rdi) - vmovdqu %xmm0,80(%rdi) - vmovdqu %xmm3,176(%rdi) - vmovdqu %xmm2,96(%rdi) - vmovdqu %xmm5,192(%rdi) - vmovdqu %xmm4,112(%rdi) - vmovdqu %xmm7,208(%rdi) - vmovdqu %xmm6,128(%rdi) - vmovdqu %xmm9,224(%rdi) - vmovdqu %xmm8,144(%rdi) - movq 1824(%rsp),%r11 - movq 1832(%rsp),%r12 - movq 1840(%rsp),%r13 - movq 1848(%rsp),%r14 - leave - ret -ENDPROC(curve25519_sandy2x_ladder) - -.align 32 -ENTRY(curve25519_sandy2x_ladder_base) - push %rbp - mov %rsp,%rbp - sub $1568,%rsp - and $-32,%rsp - movq %r11,1536(%rsp) - movq %r12,1544(%rsp) - movq %r13,1552(%rsp) - vmovdqa curve25519_sandy2x_v0_0(%rip),%xmm0 - vmovdqa curve25519_sandy2x_v1_0(%rip),%xmm1 - vmovdqa curve25519_sandy2x_v9_0(%rip),%xmm2 - vmovdqa %xmm2,0(%rsp) - vmovdqa %xmm0,16(%rsp) - vmovdqa %xmm0,32(%rsp) - vmovdqa %xmm0,48(%rsp) - vmovdqa %xmm0,64(%rsp) - vmovdqa %xmm1,80(%rsp) - vmovdqa %xmm0,96(%rsp) - vmovdqa %xmm0,112(%rsp) - vmovdqa %xmm0,128(%rsp) - vmovdqa %xmm0,144(%rsp) - vmovdqa %xmm1,%xmm0 - vpxor %xmm1,%xmm1,%xmm1 - vpxor %xmm2,%xmm2,%xmm2 - vpxor %xmm3,%xmm3,%xmm3 - vpxor %xmm4,%xmm4,%xmm4 - vpxor %xmm5,%xmm5,%xmm5 - vpxor %xmm6,%xmm6,%xmm6 - vpxor %xmm7,%xmm7,%xmm7 - vpxor %xmm8,%xmm8,%xmm8 - vpxor %xmm9,%xmm9,%xmm9 - movq 0(%rsi),%rdx - movq 8(%rsi),%rcx - movq 16(%rsi),%r8 - movq 24(%rsi),%r9 - shrd $1,%rcx,%rdx - shrd $1,%r8,%rcx - shrd $1,%r9,%r8 - shr $1,%r9 - xorq 0(%rsi),%rdx - xorq 8(%rsi),%rcx - xorq 16(%rsi),%r8 - xorq 24(%rsi),%r9 - leaq 512(%rsp),%rsi - mov $64,%rax - - .align 16 - .Lladder_base_small_loop: - mov %rdx,%r10 - mov %rcx,%r11 - mov %r8,%r12 - mov %r9,%r13 - shr $1,%rdx - shr $1,%rcx - shr $1,%r8 - shr $1,%r9 - and $1,%r10d - and $1,%r11d - and $1,%r12d - and $1,%r13d - neg %r10 - neg %r11 - neg %r12 - neg %r13 - movl %r10d,0(%rsi) - movl %r11d,256(%rsi) - movl %r12d,512(%rsi) - movl %r13d,768(%rsi) - add $4,%rsi - sub $1,%rax - jne .Lladder_base_small_loop - mov $255,%rdx - add $760,%rsi - - .align 16 - .Lladder_base_loop: - sub $1,%rdx - vbroadcastss 0(%rsi),%xmm10 - sub $4,%rsi - vmovdqa 0(%rsp),%xmm11 - vmovdqa 80(%rsp),%xmm12 - vpxor %xmm11,%xmm0,%xmm13 - vpand %xmm10,%xmm13,%xmm13 - vpxor %xmm13,%xmm0,%xmm0 - vpxor %xmm13,%xmm11,%xmm11 - vpxor %xmm12,%xmm1,%xmm13 - vpand %xmm10,%xmm13,%xmm13 - vpxor %xmm13,%xmm1,%xmm1 - vpxor %xmm13,%xmm12,%xmm12 - vmovdqa 16(%rsp),%xmm13 - vmovdqa 96(%rsp),%xmm14 - vpxor %xmm13,%xmm2,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm2,%xmm2 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm3,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm3,%xmm3 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,0(%rsp) - vmovdqa %xmm14,16(%rsp) - vmovdqa 32(%rsp),%xmm13 - vmovdqa 112(%rsp),%xmm14 - vpxor %xmm13,%xmm4,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm4,%xmm4 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm5,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm5,%xmm5 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,32(%rsp) - vmovdqa %xmm14,80(%rsp) - vmovdqa 48(%rsp),%xmm13 - vmovdqa 128(%rsp),%xmm14 - vpxor %xmm13,%xmm6,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm6,%xmm6 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm7,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm7,%xmm7 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,48(%rsp) - vmovdqa %xmm14,96(%rsp) - vmovdqa 64(%rsp),%xmm13 - vmovdqa 144(%rsp),%xmm14 - vpxor %xmm13,%xmm8,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm8,%xmm8 - vpxor %xmm15,%xmm13,%xmm13 - vpxor %xmm14,%xmm9,%xmm15 - vpand %xmm10,%xmm15,%xmm15 - vpxor %xmm15,%xmm9,%xmm9 - vpxor %xmm15,%xmm14,%xmm14 - vmovdqa %xmm13,64(%rsp) - vmovdqa %xmm14,112(%rsp) - vpaddq curve25519_sandy2x_subc0(%rip),%xmm11,%xmm10 - vpsubq %xmm12,%xmm10,%xmm10 - vpaddq %xmm12,%xmm11,%xmm11 - vpunpckhqdq %xmm10,%xmm11,%xmm12 - vpunpcklqdq %xmm10,%xmm11,%xmm10 - vpaddq %xmm1,%xmm0,%xmm11 - vpaddq curve25519_sandy2x_subc0(%rip),%xmm0,%xmm0 - vpsubq %xmm1,%xmm0,%xmm0 - vpunpckhqdq %xmm11,%xmm0,%xmm1 - vpunpcklqdq %xmm11,%xmm0,%xmm0 - vpmuludq %xmm0,%xmm10,%xmm11 - vpmuludq %xmm1,%xmm10,%xmm13 - vmovdqa %xmm1,128(%rsp) - vpaddq %xmm1,%xmm1,%xmm1 - vpmuludq %xmm0,%xmm12,%xmm14 - vmovdqa %xmm0,144(%rsp) - vpaddq %xmm14,%xmm13,%xmm13 - vpmuludq %xmm1,%xmm12,%xmm0 - vmovdqa %xmm1,160(%rsp) - vpaddq %xmm3,%xmm2,%xmm1 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm2,%xmm2 - vpsubq %xmm3,%xmm2,%xmm2 - vpunpckhqdq %xmm1,%xmm2,%xmm3 - vpunpcklqdq %xmm1,%xmm2,%xmm1 - vpmuludq %xmm1,%xmm10,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq %xmm3,%xmm10,%xmm2 - vmovdqa %xmm3,176(%rsp) - vpaddq %xmm3,%xmm3,%xmm3 - vpmuludq %xmm1,%xmm12,%xmm14 - vmovdqa %xmm1,192(%rsp) - vpaddq %xmm14,%xmm2,%xmm2 - vpmuludq %xmm3,%xmm12,%xmm1 - vmovdqa %xmm3,208(%rsp) - vpaddq %xmm5,%xmm4,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm4,%xmm4 - vpsubq %xmm5,%xmm4,%xmm4 - vpunpckhqdq %xmm3,%xmm4,%xmm5 - vpunpcklqdq %xmm3,%xmm4,%xmm3 - vpmuludq %xmm3,%xmm10,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq %xmm5,%xmm10,%xmm4 - vmovdqa %xmm5,224(%rsp) - vpaddq %xmm5,%xmm5,%xmm5 - vpmuludq %xmm3,%xmm12,%xmm14 - vmovdqa %xmm3,240(%rsp) - vpaddq %xmm14,%xmm4,%xmm4 - vpaddq %xmm7,%xmm6,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm6,%xmm6 - vpsubq %xmm7,%xmm6,%xmm6 - vpunpckhqdq %xmm3,%xmm6,%xmm7 - vpunpcklqdq %xmm3,%xmm6,%xmm3 - vpmuludq %xmm3,%xmm10,%xmm6 - vpmuludq %xmm5,%xmm12,%xmm14 - vmovdqa %xmm5,256(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm5,%xmm5 - vmovdqa %xmm5,272(%rsp) - vpaddq %xmm14,%xmm6,%xmm6 - vpmuludq %xmm7,%xmm10,%xmm5 - vmovdqa %xmm7,288(%rsp) - vpaddq %xmm7,%xmm7,%xmm7 - vpmuludq %xmm3,%xmm12,%xmm14 - vmovdqa %xmm3,304(%rsp) - vpaddq %xmm14,%xmm5,%xmm5 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vmovdqa %xmm3,320(%rsp) - vpaddq %xmm9,%xmm8,%xmm3 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm8,%xmm8 - vpsubq %xmm9,%xmm8,%xmm8 - vpunpckhqdq %xmm3,%xmm8,%xmm9 - vpunpcklqdq %xmm3,%xmm8,%xmm3 - vmovdqa %xmm3,336(%rsp) - vpmuludq %xmm7,%xmm12,%xmm8 - vmovdqa %xmm7,352(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm7,%xmm7 - vmovdqa %xmm7,368(%rsp) - vpmuludq %xmm3,%xmm10,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq %xmm9,%xmm10,%xmm7 - vmovdqa %xmm9,384(%rsp) - vpaddq %xmm9,%xmm9,%xmm9 - vpmuludq %xmm3,%xmm12,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vmovdqa %xmm3,400(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm12,%xmm12 - vpmuludq %xmm9,%xmm12,%xmm3 - vmovdqa %xmm9,416(%rsp) - vpaddq %xmm3,%xmm11,%xmm11 - vmovdqa 0(%rsp),%xmm3 - vmovdqa 16(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 192(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 176(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 240(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 224(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 304(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 288(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 336(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 384(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 160(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 192(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 208(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 240(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 256(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 304(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 352(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 336(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 416(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm0,%xmm0 - vmovdqa 32(%rsp),%xmm3 - vmovdqa 80(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 192(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 176(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 240(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 224(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 304(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 288(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 336(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 384(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 160(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 192(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 208(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 240(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 256(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 304(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 352(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 336(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 416(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm1,%xmm1 - vmovdqa 48(%rsp),%xmm3 - vmovdqa 96(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpmuludq 192(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 176(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 240(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 224(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 304(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 288(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 336(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 384(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 160(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 192(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 208(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 240(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 256(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 304(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 352(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 336(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 416(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm6,%xmm6 - vmovdqa 64(%rsp),%xmm3 - vmovdqa 112(%rsp),%xmm9 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 - vpsubq %xmm9,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm10,%xmm3,%xmm9 - vpunpcklqdq %xmm10,%xmm3,%xmm3 - vpmuludq 144(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpmuludq 128(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 - vpmuludq 192(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpmuludq 176(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm13,%xmm13 - vpmuludq 240(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpmuludq 224(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm2,%xmm2 - vpmuludq 304(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpmuludq 288(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpmuludq 336(%rsp),%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpmuludq 384(%rsp),%xmm3,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 144(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 - vpmuludq 160(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 192(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 208(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpmuludq 240(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpmuludq 256(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpmuludq 304(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpmuludq 352(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 336(%rsp),%xmm9,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 416(%rsp),%xmm9,%xmm9 - vpaddq %xmm9,%xmm8,%xmm8 - vpsrlq $25,%xmm4,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 - vpsrlq $26,%xmm11,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm6,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm13,%xmm3 - vpaddq %xmm3,%xmm0,%xmm0 - vpand curve25519_sandy2x_m25(%rip),%xmm13,%xmm13 - vpsrlq $25,%xmm5,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm0,%xmm3 - vpaddq %xmm3,%xmm2,%xmm2 - vpand curve25519_sandy2x_m26(%rip),%xmm0,%xmm0 - vpsrlq $26,%xmm8,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $25,%xmm2,%xmm3 - vpaddq %xmm3,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm2,%xmm2 - vpsrlq $25,%xmm7,%xmm3 - vpsllq $4,%xmm3,%xmm9 - vpaddq %xmm3,%xmm11,%xmm11 - vpsllq $1,%xmm3,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpaddq %xmm9,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $26,%xmm1,%xmm3 - vpaddq %xmm3,%xmm4,%xmm4 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $26,%xmm11,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $25,%xmm4,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 - vpunpcklqdq %xmm13,%xmm11,%xmm3 - vpunpckhqdq %xmm13,%xmm11,%xmm9 - vpaddq curve25519_sandy2x_subc0(%rip),%xmm9,%xmm10 - vpsubq %xmm3,%xmm10,%xmm10 - vpaddq %xmm9,%xmm3,%xmm3 - vpunpckhqdq %xmm3,%xmm10,%xmm9 - vpunpcklqdq %xmm3,%xmm10,%xmm10 - vpmuludq %xmm10,%xmm10,%xmm3 - vpaddq %xmm10,%xmm10,%xmm10 - vpmuludq %xmm9,%xmm10,%xmm11 - vpunpcklqdq %xmm2,%xmm0,%xmm12 - vpunpckhqdq %xmm2,%xmm0,%xmm0 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm0,%xmm2 - vpsubq %xmm12,%xmm2,%xmm2 - vpaddq %xmm0,%xmm12,%xmm12 - vpunpckhqdq %xmm12,%xmm2,%xmm0 - vpunpcklqdq %xmm12,%xmm2,%xmm2 - vpmuludq %xmm2,%xmm10,%xmm12 - vpaddq %xmm9,%xmm9,%xmm13 - vpmuludq %xmm13,%xmm9,%xmm9 - vpaddq %xmm9,%xmm12,%xmm12 - vpmuludq %xmm0,%xmm10,%xmm9 - vpmuludq %xmm2,%xmm13,%xmm14 - vpaddq %xmm14,%xmm9,%xmm9 - vpunpcklqdq %xmm4,%xmm1,%xmm14 - vpunpckhqdq %xmm4,%xmm1,%xmm1 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm1,%xmm4 - vpsubq %xmm14,%xmm4,%xmm4 - vpaddq %xmm1,%xmm14,%xmm14 - vpunpckhqdq %xmm14,%xmm4,%xmm1 - vpunpcklqdq %xmm14,%xmm4,%xmm4 - vmovdqa %xmm1,0(%rsp) - vpaddq %xmm1,%xmm1,%xmm1 - vmovdqa %xmm1,16(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vmovdqa %xmm1,32(%rsp) - vpmuludq %xmm4,%xmm10,%xmm1 - vpmuludq %xmm2,%xmm2,%xmm14 - vpaddq %xmm14,%xmm1,%xmm1 - vpmuludq 0(%rsp),%xmm10,%xmm14 - vpmuludq %xmm4,%xmm13,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpunpcklqdq %xmm5,%xmm6,%xmm15 - vpunpckhqdq %xmm5,%xmm6,%xmm5 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm6 - vpsubq %xmm15,%xmm6,%xmm6 - vpaddq %xmm5,%xmm15,%xmm15 - vpunpckhqdq %xmm15,%xmm6,%xmm5 - vpunpcklqdq %xmm15,%xmm6,%xmm6 - vmovdqa %xmm6,48(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm6,%xmm6 - vmovdqa %xmm6,64(%rsp) - vmovdqa %xmm5,80(%rsp) - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm5,%xmm5 - vmovdqa %xmm5,96(%rsp) - vpmuludq 48(%rsp),%xmm10,%xmm5 - vpaddq %xmm0,%xmm0,%xmm6 - vpmuludq %xmm6,%xmm0,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpmuludq 80(%rsp),%xmm10,%xmm0 - vpmuludq %xmm4,%xmm6,%xmm15 - vpaddq %xmm15,%xmm0,%xmm0 - vpmuludq %xmm6,%xmm13,%xmm15 - vpaddq %xmm15,%xmm1,%xmm1 - vpmuludq %xmm6,%xmm2,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpunpcklqdq %xmm7,%xmm8,%xmm15 - vpunpckhqdq %xmm7,%xmm8,%xmm7 - vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm8 - vpsubq %xmm15,%xmm8,%xmm8 - vpaddq %xmm7,%xmm15,%xmm15 - vpunpckhqdq %xmm15,%xmm8,%xmm7 - vpunpcklqdq %xmm15,%xmm8,%xmm8 - vmovdqa %xmm8,112(%rsp) - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm8,%xmm8 - vmovdqa %xmm8,160(%rsp) - vpmuludq 112(%rsp),%xmm10,%xmm8 - vpmuludq %xmm7,%xmm10,%xmm10 - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm7,%xmm15 - vpmuludq %xmm15,%xmm7,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq %xmm15,%xmm13,%xmm7 - vpaddq %xmm7,%xmm3,%xmm3 - vpmuludq %xmm15,%xmm2,%xmm7 - vpaddq %xmm7,%xmm11,%xmm11 - vpmuludq 80(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm7,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq 16(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 48(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm0,%xmm0 - vpmuludq 112(%rsp),%xmm13,%xmm7 - vpaddq %xmm7,%xmm10,%xmm10 - vpmuludq %xmm15,%xmm6,%xmm7 - vpaddq %xmm7,%xmm12,%xmm12 - vpmuludq %xmm15,%xmm4,%xmm7 - vpaddq %xmm7,%xmm9,%xmm9 - vpaddq %xmm2,%xmm2,%xmm2 - vpmuludq %xmm4,%xmm2,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 160(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm3,%xmm3 - vpmuludq 160(%rsp),%xmm6,%xmm7 - vpaddq %xmm7,%xmm11,%xmm11 - vpmuludq 0(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm0,%xmm0 - vpmuludq 48(%rsp),%xmm2,%xmm7 - vpaddq %xmm7,%xmm8,%xmm8 - vpmuludq 80(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 96(%rsp),%xmm4,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq %xmm4,%xmm4,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpaddq %xmm4,%xmm4,%xmm2 - vpmuludq 160(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vpmuludq 16(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq 48(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm14,%xmm14 - vpmuludq 96(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 160(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 16(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm8,%xmm8 - vpmuludq 48(%rsp),%xmm6,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 80(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vpmuludq 112(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm0,%xmm0 - vmovdqa 48(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 160(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 80(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 160(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm14,%xmm14 - vpmuludq 64(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 64(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vmovdqa 16(%rsp),%xmm4 - vpmuludq 96(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vmovdqa 48(%rsp),%xmm4 - vpmuludq 96(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 0(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vmovdqa 32(%rsp),%xmm2 - vpmuludq 0(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vmovdqa 64(%rsp),%xmm2 - vpmuludq 48(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vmovdqa 96(%rsp),%xmm2 - vpmuludq 80(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vmovdqa 160(%rsp),%xmm2 - vpmuludq 112(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpsrlq $26,%xmm3,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 - vpsrlq $25,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $25,%xmm11,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpand curve25519_sandy2x_m25(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm5,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm12,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpand curve25519_sandy2x_m26(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm0,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm9,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm9,%xmm9 - vpsrlq $26,%xmm8,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $25,%xmm10,%xmm2 - vpsllq $4,%xmm2,%xmm4 - vpaddq %xmm2,%xmm3,%xmm3 - vpsllq $1,%xmm2,%xmm2 - vpaddq %xmm2,%xmm4,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $26,%xmm3,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 - vpunpckhqdq %xmm11,%xmm3,%xmm2 - vmovdqa %xmm2,0(%rsp) - vpunpcklqdq %xmm11,%xmm3,%xmm2 - vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm2,%xmm2 - vmovdqa %xmm2,80(%rsp) - vpunpckhqdq %xmm9,%xmm12,%xmm2 - vmovdqa %xmm2,16(%rsp) - vpunpcklqdq %xmm9,%xmm12,%xmm2 - vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm2,%xmm2 - vmovdqa %xmm2,96(%rsp) - vpunpckhqdq %xmm14,%xmm1,%xmm2 - vmovdqa %xmm2,32(%rsp) - vpunpcklqdq %xmm14,%xmm1,%xmm1 - vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm1,%xmm1 - vmovdqa %xmm1,112(%rsp) - vpunpckhqdq %xmm0,%xmm5,%xmm1 - vmovdqa %xmm1,48(%rsp) - vpunpcklqdq %xmm0,%xmm5,%xmm0 - vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm0,%xmm0 - vmovdqa %xmm0,160(%rsp) - vpunpckhqdq %xmm10,%xmm8,%xmm0 - vmovdqa %xmm0,64(%rsp) - vpunpcklqdq %xmm10,%xmm8,%xmm0 - vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm0,%xmm0 - vmovdqa %xmm0,208(%rsp) - vmovdqa 144(%rsp),%xmm0 - vpmuludq %xmm0,%xmm0,%xmm1 - vpaddq %xmm0,%xmm0,%xmm0 - vmovdqa 128(%rsp),%xmm2 - vpmuludq %xmm2,%xmm0,%xmm3 - vmovdqa 192(%rsp),%xmm4 - vpmuludq %xmm4,%xmm0,%xmm5 - vmovdqa 176(%rsp),%xmm6 - vpmuludq %xmm6,%xmm0,%xmm7 - vmovdqa 240(%rsp),%xmm8 - vpmuludq %xmm8,%xmm0,%xmm9 - vpmuludq 224(%rsp),%xmm0,%xmm10 - vpmuludq 304(%rsp),%xmm0,%xmm11 - vpmuludq 288(%rsp),%xmm0,%xmm12 - vpmuludq 336(%rsp),%xmm0,%xmm13 - vmovdqa 384(%rsp),%xmm14 - vpmuludq %xmm14,%xmm0,%xmm0 - vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm14,%xmm15 - vpmuludq %xmm15,%xmm14,%xmm14 - vpaddq %xmm14,%xmm13,%xmm13 - vpaddq %xmm6,%xmm6,%xmm14 - vpmuludq %xmm14,%xmm6,%xmm6 - vpaddq %xmm6,%xmm11,%xmm11 - vpaddq %xmm2,%xmm2,%xmm6 - vpmuludq %xmm6,%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq %xmm15,%xmm6,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vpmuludq %xmm15,%xmm4,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpmuludq 256(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 304(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 352(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 336(%rsp),%xmm6,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq %xmm4,%xmm6,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq %xmm14,%xmm6,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq %xmm8,%xmm6,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq %xmm15,%xmm14,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq %xmm15,%xmm8,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq %xmm4,%xmm4,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq %xmm14,%xmm4,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpaddq %xmm4,%xmm4,%xmm2 - vpmuludq %xmm8,%xmm2,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vpmuludq 400(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpmuludq 400(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vpmuludq 224(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vpmuludq 304(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm13,%xmm13 - vpmuludq 288(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpmuludq 368(%rsp),%xmm8,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpmuludq %xmm8,%xmm14,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq %xmm8,%xmm8,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpaddq %xmm8,%xmm8,%xmm2 - vpmuludq 400(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vpmuludq 256(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 304(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 368(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 256(%rsp),%xmm4 - vpmuludq 400(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm7,%xmm7 - vpmuludq 256(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm13,%xmm13 - vpmuludq 304(%rsp),%xmm14,%xmm4 - vpaddq %xmm4,%xmm0,%xmm0 - vpmuludq 352(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm11,%xmm11 - vpmuludq 336(%rsp),%xmm15,%xmm4 - vpaddq %xmm4,%xmm12,%xmm12 - vmovdqa 304(%rsp),%xmm4 - vpaddq %xmm4,%xmm4,%xmm4 - vpmuludq 400(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm9,%xmm9 - vpmuludq 320(%rsp),%xmm2,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vmovdqa 256(%rsp),%xmm4 - vpmuludq 320(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm3,%xmm3 - vmovdqa 256(%rsp),%xmm4 - vpmuludq 368(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm5,%xmm5 - vmovdqa 304(%rsp),%xmm4 - vpmuludq 368(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm7,%xmm7 - vmovdqa 352(%rsp),%xmm4 - vpmuludq 400(%rsp),%xmm4,%xmm4 - vpaddq %xmm4,%xmm10,%xmm10 - vpmuludq 224(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vmovdqa 272(%rsp),%xmm2 - vpmuludq 224(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm1,%xmm1 - vmovdqa 320(%rsp),%xmm2 - vpmuludq 304(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vmovdqa 368(%rsp),%xmm2 - vpmuludq 288(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vmovdqa 400(%rsp),%xmm2 - vpmuludq 336(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpsrlq $25,%xmm10,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm3,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 - vpsrlq $26,%xmm11,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm5,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 - vpsrlq $25,%xmm12,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm7,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $26,%xmm13,%xmm2 - vpaddq %xmm2,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 - vpsrlq $26,%xmm9,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $25,%xmm0,%xmm2 - vpsllq $4,%xmm2,%xmm4 - vpaddq %xmm2,%xmm1,%xmm1 - vpsllq $1,%xmm2,%xmm2 - vpaddq %xmm2,%xmm4,%xmm4 - vpaddq %xmm4,%xmm1,%xmm1 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm10,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $26,%xmm1,%xmm2 - vpaddq %xmm2,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 - vpunpckhqdq %xmm3,%xmm1,%xmm2 - vpunpcklqdq %xmm3,%xmm1,%xmm1 - vmovdqa %xmm1,176(%rsp) - vpaddq curve25519_sandy2x_subc0(%rip),%xmm2,%xmm3 - vpsubq %xmm1,%xmm3,%xmm3 - vpunpckhqdq %xmm3,%xmm2,%xmm1 - vpunpcklqdq %xmm3,%xmm2,%xmm2 - vmovdqa %xmm2,192(%rsp) - vmovdqa %xmm1,224(%rsp) - vpsllq $1,%xmm1,%xmm1 - vmovdqa %xmm1,240(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm3,%xmm3 - vmovdqa 80(%rsp),%xmm1 - vpunpcklqdq %xmm1,%xmm3,%xmm2 - vpunpckhqdq %xmm1,%xmm3,%xmm1 - vpunpckhqdq %xmm7,%xmm5,%xmm3 - vpunpcklqdq %xmm7,%xmm5,%xmm4 - vmovdqa %xmm4,256(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm5 - vpsubq %xmm4,%xmm5,%xmm5 - vpunpckhqdq %xmm5,%xmm3,%xmm4 - vpunpcklqdq %xmm5,%xmm3,%xmm3 - vmovdqa %xmm3,272(%rsp) - vmovdqa %xmm4,288(%rsp) - vpsllq $1,%xmm4,%xmm4 - vmovdqa %xmm4,304(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm5,%xmm5 - vmovdqa 96(%rsp),%xmm3 - vpunpcklqdq %xmm3,%xmm5,%xmm4 - vpunpckhqdq %xmm3,%xmm5,%xmm3 - vpunpckhqdq %xmm10,%xmm9,%xmm5 - vpunpcklqdq %xmm10,%xmm9,%xmm6 - vmovdqa %xmm6,320(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm7 - vpsubq %xmm6,%xmm7,%xmm7 - vpunpckhqdq %xmm7,%xmm5,%xmm6 - vpunpcklqdq %xmm7,%xmm5,%xmm5 - vmovdqa %xmm5,336(%rsp) - vmovdqa %xmm6,352(%rsp) - vpsllq $1,%xmm6,%xmm6 - vmovdqa %xmm6,368(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm7,%xmm7 - vmovdqa 112(%rsp),%xmm5 - vpunpcklqdq %xmm5,%xmm7,%xmm6 - vpunpckhqdq %xmm5,%xmm7,%xmm5 - vpunpckhqdq %xmm12,%xmm11,%xmm7 - vpunpcklqdq %xmm12,%xmm11,%xmm8 - vmovdqa %xmm8,384(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm9 - vpsubq %xmm8,%xmm9,%xmm9 - vpunpckhqdq %xmm9,%xmm7,%xmm8 - vpunpcklqdq %xmm9,%xmm7,%xmm7 - vmovdqa %xmm7,400(%rsp) - vmovdqa %xmm8,416(%rsp) - vpsllq $1,%xmm8,%xmm8 - vmovdqa %xmm8,432(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm9,%xmm9 - vmovdqa 160(%rsp),%xmm7 - vpunpcklqdq %xmm7,%xmm9,%xmm8 - vpunpckhqdq %xmm7,%xmm9,%xmm7 - vpunpckhqdq %xmm0,%xmm13,%xmm9 - vpunpcklqdq %xmm0,%xmm13,%xmm0 - vmovdqa %xmm0,160(%rsp) - vpaddq curve25519_sandy2x_subc2(%rip),%xmm9,%xmm10 - vpsubq %xmm0,%xmm10,%xmm10 - vpunpckhqdq %xmm10,%xmm9,%xmm0 - vpunpcklqdq %xmm10,%xmm9,%xmm9 - vmovdqa %xmm9,448(%rsp) - vmovdqa %xmm0,464(%rsp) - vpsllq $1,%xmm0,%xmm0 - vmovdqa %xmm0,480(%rsp) - vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm10,%xmm10 - vmovdqa 208(%rsp),%xmm0 - vpunpcklqdq %xmm0,%xmm10,%xmm9 - vpunpckhqdq %xmm0,%xmm10,%xmm0 - vpsrlq $26,%xmm2,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 - vpsrlq $25,%xmm5,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $25,%xmm1,%xmm10 - vpaddq %xmm10,%xmm4,%xmm4 - vpand curve25519_sandy2x_m25(%rip),%xmm1,%xmm1 - vpsrlq $26,%xmm8,%xmm10 - vpaddq %xmm10,%xmm7,%xmm7 - vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm4,%xmm10 - vpaddq %xmm10,%xmm3,%xmm3 - vpand curve25519_sandy2x_m26(%rip),%xmm4,%xmm4 - vpsrlq $25,%xmm7,%xmm10 - vpaddq %xmm10,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 - vpsrlq $25,%xmm3,%xmm10 - vpaddq %xmm10,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 - vpsrlq $26,%xmm9,%xmm10 - vpaddq %xmm10,%xmm0,%xmm0 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $26,%xmm6,%xmm10 - vpaddq %xmm10,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm0,%xmm10 - vpsllq $4,%xmm10,%xmm11 - vpaddq %xmm10,%xmm2,%xmm2 - vpsllq $1,%xmm10,%xmm10 - vpaddq %xmm10,%xmm11,%xmm11 - vpaddq %xmm11,%xmm2,%xmm2 - vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 - vpsrlq $25,%xmm5,%xmm10 - vpaddq %xmm10,%xmm8,%xmm8 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm2,%xmm10 - vpaddq %xmm10,%xmm1,%xmm1 - vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 - vpunpckhqdq %xmm1,%xmm2,%xmm10 - vmovdqa %xmm10,80(%rsp) - vpunpcklqdq %xmm1,%xmm2,%xmm1 - vpunpckhqdq %xmm3,%xmm4,%xmm2 - vmovdqa %xmm2,96(%rsp) - vpunpcklqdq %xmm3,%xmm4,%xmm2 - vpunpckhqdq %xmm5,%xmm6,%xmm3 - vmovdqa %xmm3,112(%rsp) - vpunpcklqdq %xmm5,%xmm6,%xmm3 - vpunpckhqdq %xmm7,%xmm8,%xmm4 - vmovdqa %xmm4,128(%rsp) - vpunpcklqdq %xmm7,%xmm8,%xmm4 - vpunpckhqdq %xmm0,%xmm9,%xmm5 - vmovdqa %xmm5,144(%rsp) - vpunpcklqdq %xmm0,%xmm9,%xmm0 - vmovdqa 176(%rsp),%xmm5 - vpaddq %xmm5,%xmm1,%xmm1 - vpunpcklqdq %xmm1,%xmm5,%xmm6 - vpunpckhqdq %xmm1,%xmm5,%xmm1 - vpmuludq 224(%rsp),%xmm6,%xmm5 - vpmuludq 192(%rsp),%xmm1,%xmm7 - vpaddq %xmm7,%xmm5,%xmm5 - vpmuludq 272(%rsp),%xmm6,%xmm7 - vpmuludq 240(%rsp),%xmm1,%xmm8 - vpaddq %xmm8,%xmm7,%xmm7 - vpmuludq 288(%rsp),%xmm6,%xmm8 - vpmuludq 272(%rsp),%xmm1,%xmm9 - vpaddq %xmm9,%xmm8,%xmm8 - vpmuludq 336(%rsp),%xmm6,%xmm9 - vpmuludq 304(%rsp),%xmm1,%xmm10 - vpaddq %xmm10,%xmm9,%xmm9 - vpmuludq 352(%rsp),%xmm6,%xmm10 - vpmuludq 336(%rsp),%xmm1,%xmm11 - vpaddq %xmm11,%xmm10,%xmm10 - vpmuludq 400(%rsp),%xmm6,%xmm11 - vpmuludq 368(%rsp),%xmm1,%xmm12 - vpaddq %xmm12,%xmm11,%xmm11 - vpmuludq 416(%rsp),%xmm6,%xmm12 - vpmuludq 400(%rsp),%xmm1,%xmm13 - vpaddq %xmm13,%xmm12,%xmm12 - vpmuludq 448(%rsp),%xmm6,%xmm13 - vpmuludq 432(%rsp),%xmm1,%xmm14 - vpaddq %xmm14,%xmm13,%xmm13 - vpmuludq 464(%rsp),%xmm6,%xmm14 - vpmuludq 448(%rsp),%xmm1,%xmm15 - vpaddq %xmm15,%xmm14,%xmm14 - vpmuludq 192(%rsp),%xmm6,%xmm6 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 480(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vmovdqa 256(%rsp),%xmm1 - vpaddq %xmm1,%xmm2,%xmm2 - vpunpcklqdq %xmm2,%xmm1,%xmm15 - vpunpckhqdq %xmm2,%xmm1,%xmm1 - vpmuludq 192(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 224(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 272(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 288(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 336(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 352(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 400(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 416(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm15,%xmm15 - vpmuludq 448(%rsp),%xmm15,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 464(%rsp),%xmm15,%xmm15 - vpaddq %xmm15,%xmm5,%xmm5 - vpmuludq 192(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 240(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 272(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 304(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 336(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 368(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 400(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 432(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 448(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 480(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vmovdqa 320(%rsp),%xmm1 - vpaddq %xmm1,%xmm3,%xmm3 - vpunpcklqdq %xmm3,%xmm1,%xmm2 - vpunpckhqdq %xmm3,%xmm1,%xmm1 - vpmuludq 192(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpmuludq 224(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm10,%xmm10 - vpmuludq 272(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 288(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm12,%xmm12 - vpmuludq 336(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 352(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 400(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 416(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 448(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 464(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 192(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 240(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm11,%xmm11 - vpmuludq 272(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 304(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 336(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 368(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 400(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 432(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 448(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 480(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vmovdqa 384(%rsp),%xmm1 - vpaddq %xmm1,%xmm4,%xmm4 - vpunpcklqdq %xmm4,%xmm1,%xmm2 - vpunpckhqdq %xmm4,%xmm1,%xmm1 - vpmuludq 192(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm11,%xmm11 - vpmuludq 224(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm12,%xmm12 - vpmuludq 272(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm13,%xmm13 - vpmuludq 288(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 336(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm6,%xmm6 - vpmuludq 352(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm5,%xmm5 - vpmuludq 400(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm7,%xmm7 - vpmuludq 416(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm8,%xmm8 - vpmuludq 448(%rsp),%xmm2,%xmm3 - vpaddq %xmm3,%xmm9,%xmm9 - vpmuludq 464(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 192(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 240(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm13,%xmm13 - vpmuludq 272(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 - vpmuludq 304(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm6,%xmm6 - vpmuludq 336(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm5,%xmm5 - vpmuludq 368(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm7,%xmm7 - vpmuludq 400(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm8,%xmm8 - vpmuludq 432(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm9,%xmm9 - vpmuludq 448(%rsp),%xmm1,%xmm2 - vpaddq %xmm2,%xmm10,%xmm10 - vpmuludq 480(%rsp),%xmm1,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vmovdqa 160(%rsp),%xmm1 - vpaddq %xmm1,%xmm0,%xmm0 - vpunpcklqdq %xmm0,%xmm1,%xmm2 - vpunpckhqdq %xmm0,%xmm1,%xmm0 - vpmuludq 192(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm13,%xmm13 - vpmuludq 224(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 - vpmuludq 272(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpmuludq 288(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm5,%xmm5 - vpmuludq 336(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vpmuludq 352(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm8,%xmm8 - vpmuludq 400(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vpmuludq 416(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm10,%xmm10 - vpmuludq 448(%rsp),%xmm2,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vpmuludq 464(%rsp),%xmm2,%xmm2 - vpaddq %xmm2,%xmm12,%xmm12 - vpmuludq 192(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm14,%xmm14 - vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm0,%xmm0 - vpmuludq 240(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpmuludq 272(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm5,%xmm5 - vpmuludq 304(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm7,%xmm7 - vpmuludq 336(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm8,%xmm8 - vpmuludq 368(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm9,%xmm9 - vpmuludq 400(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm10,%xmm10 - vpmuludq 432(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm11,%xmm11 - vpmuludq 448(%rsp),%xmm0,%xmm1 - vpaddq %xmm1,%xmm12,%xmm12 - vpmuludq 480(%rsp),%xmm0,%xmm0 - vpaddq %xmm0,%xmm13,%xmm13 - vpsrlq $26,%xmm6,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpsrlq $25,%xmm10,%xmm0 - vpaddq %xmm0,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $25,%xmm5,%xmm0 - vpaddq %xmm0,%xmm7,%xmm7 - vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 - vpsrlq $26,%xmm11,%xmm0 - vpaddq %xmm0,%xmm12,%xmm12 - vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 - vpsrlq $26,%xmm7,%xmm0 - vpaddq %xmm0,%xmm8,%xmm8 - vpand curve25519_sandy2x_m26(%rip),%xmm7,%xmm7 - vpsrlq $25,%xmm12,%xmm0 - vpaddq %xmm0,%xmm13,%xmm13 - vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 - vpsrlq $25,%xmm8,%xmm0 - vpaddq %xmm0,%xmm9,%xmm9 - vpand curve25519_sandy2x_m25(%rip),%xmm8,%xmm8 - vpsrlq $26,%xmm13,%xmm0 - vpaddq %xmm0,%xmm14,%xmm14 - vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 - vpsrlq $26,%xmm9,%xmm0 - vpaddq %xmm0,%xmm10,%xmm10 - vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 - vpsrlq $25,%xmm14,%xmm0 - vpsllq $4,%xmm0,%xmm1 - vpaddq %xmm0,%xmm6,%xmm6 - vpsllq $1,%xmm0,%xmm0 - vpaddq %xmm0,%xmm1,%xmm1 - vpaddq %xmm1,%xmm6,%xmm6 - vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 - vpsrlq $25,%xmm10,%xmm0 - vpaddq %xmm0,%xmm11,%xmm11 - vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 - vpsrlq $26,%xmm6,%xmm0 - vpaddq %xmm0,%xmm5,%xmm5 - vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 - vpunpckhqdq %xmm5,%xmm6,%xmm1 - vpunpcklqdq %xmm5,%xmm6,%xmm0 - vpunpckhqdq %xmm8,%xmm7,%xmm3 - vpunpcklqdq %xmm8,%xmm7,%xmm2 - vpunpckhqdq %xmm10,%xmm9,%xmm5 - vpunpcklqdq %xmm10,%xmm9,%xmm4 - vpunpckhqdq %xmm12,%xmm11,%xmm7 - vpunpcklqdq %xmm12,%xmm11,%xmm6 - vpunpckhqdq %xmm14,%xmm13,%xmm9 - vpunpcklqdq %xmm14,%xmm13,%xmm8 - cmp $0,%rdx - jne .Lladder_base_loop - vmovdqu %xmm1,80(%rdi) - vmovdqu %xmm0,0(%rdi) - vmovdqu %xmm3,96(%rdi) - vmovdqu %xmm2,16(%rdi) - vmovdqu %xmm5,112(%rdi) - vmovdqu %xmm4,32(%rdi) - vmovdqu %xmm7,128(%rdi) - vmovdqu %xmm6,48(%rdi) - vmovdqu %xmm9,144(%rdi) - vmovdqu %xmm8,64(%rdi) - movq 1536(%rsp),%r11 - movq 1544(%rsp),%r12 - movq 1552(%rsp),%r13 - leave - ret -ENDPROC(curve25519_sandy2x_ladder_base) diff --git a/src/crypto/curve25519-x86_64.S b/src/crypto/curve25519-x86_64.S new file mode 100644 index 0000000..2407ff4 --- /dev/null +++ b/src/crypto/curve25519-x86_64.S @@ -0,0 +1,3259 @@ +/* + * Copyright (C) 2017 Jason A. Donenfeld . All Rights Reserved. + * Based on algorithms from Tung Chou + */ + +#include + +.data +.align 16 +curve25519_sandy2x_v0_0: .quad 0, 0 +curve25519_sandy2x_v1_0: .quad 1, 0 +curve25519_sandy2x_v2_1: .quad 2, 1 +curve25519_sandy2x_v9_0: .quad 9, 0 +curve25519_sandy2x_v9_9: .quad 9, 9 +curve25519_sandy2x_v19_19: .quad 19, 19 +curve25519_sandy2x_v38_1: .quad 38, 1 +curve25519_sandy2x_v38_38: .quad 38, 38 +curve25519_sandy2x_v121666_121666: .quad 121666, 121666 +curve25519_sandy2x_m25: .quad 33554431, 33554431 +curve25519_sandy2x_m26: .quad 67108863, 67108863 +curve25519_sandy2x_subc0: .quad 0x07FFFFDA, 0x03FFFFFE +curve25519_sandy2x_subc2: .quad 0x07FFFFFE, 0x03FFFFFE +curve25519_sandy2x_REDMASK51: .quad 0x0007FFFFFFFFFFFF + +.text +.align 32 +#ifdef CONFIG_AS_AVX +ENTRY(curve25519_sandy2x_fe51_mul) + push %rbp + mov %rsp,%rbp + sub $96,%rsp + and $-32,%rsp + movq %r11,0(%rsp) + movq %r12,8(%rsp) + movq %r13,16(%rsp) + movq %r14,24(%rsp) + movq %r15,32(%rsp) + movq %rbx,40(%rsp) + movq %rbp,48(%rsp) + movq %rdi,56(%rsp) + mov %rdx,%rcx + movq 24(%rsi),%rdx + imulq $19,%rdx,%rax + movq %rax,64(%rsp) + mulq 16(%rcx) + mov %rax,%r8 + mov %rdx,%r9 + movq 32(%rsi),%rdx + imulq $19,%rdx,%rax + movq %rax,72(%rsp) + mulq 8(%rcx) + add %rax,%r8 + adc %rdx,%r9 + movq 0(%rsi),%rax + mulq 0(%rcx) + add %rax,%r8 + adc %rdx,%r9 + movq 0(%rsi),%rax + mulq 8(%rcx) + mov %rax,%r10 + mov %rdx,%r11 + movq 0(%rsi),%rax + mulq 16(%rcx) + mov %rax,%r12 + mov %rdx,%r13 + movq 0(%rsi),%rax + mulq 24(%rcx) + mov %rax,%r14 + mov %rdx,%r15 + movq 0(%rsi),%rax + mulq 32(%rcx) + mov %rax,%rbx + mov %rdx,%rbp + movq 8(%rsi),%rax + mulq 0(%rcx) + add %rax,%r10 + adc %rdx,%r11 + movq 8(%rsi),%rax + mulq 8(%rcx) + add %rax,%r12 + adc %rdx,%r13 + movq 8(%rsi),%rax + mulq 16(%rcx) + add %rax,%r14 + adc %rdx,%r15 + movq 8(%rsi),%rax + mulq 24(%rcx) + add %rax,%rbx + adc %rdx,%rbp + movq 8(%rsi),%rdx + imulq $19,%rdx,%rax + mulq 32(%rcx) + add %rax,%r8 + adc %rdx,%r9 + movq 16(%rsi),%rax + mulq 0(%rcx) + add %rax,%r12 + adc %rdx,%r13 + movq 16(%rsi),%rax + mulq 8(%rcx) + add %rax,%r14 + adc %rdx,%r15 + movq 16(%rsi),%rax + mulq 16(%rcx) + add %rax,%rbx + adc %rdx,%rbp + movq 16(%rsi),%rdx + imulq $19,%rdx,%rax + mulq 24(%rcx) + add %rax,%r8 + adc %rdx,%r9 + movq 16(%rsi),%rdx + imulq $19,%rdx,%rax + mulq 32(%rcx) + add %rax,%r10 + adc %rdx,%r11 + movq 24(%rsi),%rax + mulq 0(%rcx) + add %rax,%r14 + adc %rdx,%r15 + movq 24(%rsi),%rax + mulq 8(%rcx) + add %rax,%rbx + adc %rdx,%rbp + movq 64(%rsp),%rax + mulq 24(%rcx) + add %rax,%r10 + adc %rdx,%r11 + movq 64(%rsp),%rax + mulq 32(%rcx) + add %rax,%r12 + adc %rdx,%r13 + movq 32(%rsi),%rax + mulq 0(%rcx) + add %rax,%rbx + adc %rdx,%rbp + movq 72(%rsp),%rax + mulq 16(%rcx) + add %rax,%r10 + adc %rdx,%r11 + movq 72(%rsp),%rax + mulq 24(%rcx) + add %rax,%r12 + adc %rdx,%r13 + movq 72(%rsp),%rax + mulq 32(%rcx) + add %rax,%r14 + adc %rdx,%r15 + movq curve25519_sandy2x_REDMASK51(%rip),%rsi + shld $13,%r8,%r9 + and %rsi,%r8 + shld $13,%r10,%r11 + and %rsi,%r10 + add %r9,%r10 + shld $13,%r12,%r13 + and %rsi,%r12 + add %r11,%r12 + shld $13,%r14,%r15 + and %rsi,%r14 + add %r13,%r14 + shld $13,%rbx,%rbp + and %rsi,%rbx + add %r15,%rbx + imulq $19,%rbp,%rdx + add %rdx,%r8 + mov %r8,%rdx + shr $51,%rdx + add %r10,%rdx + mov %rdx,%rcx + shr $51,%rdx + and %rsi,%r8 + add %r12,%rdx + mov %rdx,%r9 + shr $51,%rdx + and %rsi,%rcx + add %r14,%rdx + mov %rdx,%rax + shr $51,%rdx + and %rsi,%r9 + add %rbx,%rdx + mov %rdx,%r10 + shr $51,%rdx + and %rsi,%rax + imulq $19,%rdx,%rdx + add %rdx,%r8 + and %rsi,%r10 + movq %r8,0(%rdi) + movq %rcx,8(%rdi) + movq %r9,16(%rdi) + movq %rax,24(%rdi) + movq %r10,32(%rdi) + movq 0(%rsp),%r11 + movq 8(%rsp),%r12 + movq 16(%rsp),%r13 + movq 24(%rsp),%r14 + movq 32(%rsp),%r15 + movq 40(%rsp),%rbx + movq 48(%rsp),%rbp + leave + ret +ENDPROC(curve25519_sandy2x_fe51_mul) + +.align 32 +ENTRY(curve25519_sandy2x_fe51_nsquare) + push %rbp + mov %rsp,%rbp + sub $64,%rsp + and $-32,%rsp + movq %r11,0(%rsp) + movq %r12,8(%rsp) + movq %r13,16(%rsp) + movq %r14,24(%rsp) + movq %r15,32(%rsp) + movq %rbx,40(%rsp) + movq %rbp,48(%rsp) + movq 0(%rsi),%rcx + movq 8(%rsi),%r8 + movq 16(%rsi),%r9 + movq 24(%rsi),%rax + movq 32(%rsi),%rsi + movq %r9,16(%rdi) + movq %rax,24(%rdi) + movq %rsi,32(%rdi) + mov %rdx,%rsi + + .align 16 + .Lloop: + sub $1,%rsi + mov %rcx,%rax + mul %rcx + add %rcx,%rcx + mov %rax,%r9 + mov %rdx,%r10 + mov %rcx,%rax + mul %r8 + mov %rax,%r11 + mov %rdx,%r12 + mov %rcx,%rax + mulq 16(%rdi) + mov %rax,%r13 + mov %rdx,%r14 + mov %rcx,%rax + mulq 24(%rdi) + mov %rax,%r15 + mov %rdx,%rbx + mov %rcx,%rax + mulq 32(%rdi) + mov %rax,%rcx + mov %rdx,%rbp + mov %r8,%rax + mul %r8 + add %r8,%r8 + add %rax,%r13 + adc %rdx,%r14 + mov %r8,%rax + mulq 16(%rdi) + add %rax,%r15 + adc %rdx,%rbx + mov %r8,%rax + imulq $19, %r8,%r8 + mulq 24(%rdi) + add %rax,%rcx + adc %rdx,%rbp + mov %r8,%rax + mulq 32(%rdi) + add %rax,%r9 + adc %rdx,%r10 + movq 16(%rdi),%rax + mulq 16(%rdi) + add %rax,%rcx + adc %rdx,%rbp + shld $13,%rcx,%rbp + movq 16(%rdi),%rax + imulq $38, %rax,%rax + mulq 24(%rdi) + add %rax,%r9 + adc %rdx,%r10 + shld $13,%r9,%r10 + movq 16(%rdi),%rax + imulq $38, %rax,%rax + mulq 32(%rdi) + add %rax,%r11 + adc %rdx,%r12 + movq 24(%rdi),%rax + imulq $19, %rax,%rax + mulq 24(%rdi) + add %rax,%r11 + adc %rdx,%r12 + shld $13,%r11,%r12 + movq 24(%rdi),%rax + imulq $38, %rax,%rax + mulq 32(%rdi) + add %rax,%r13 + adc %rdx,%r14 + shld $13,%r13,%r14 + movq 32(%rdi),%rax + imulq $19, %rax,%rax + mulq 32(%rdi) + add %rax,%r15 + adc %rdx,%rbx + shld $13,%r15,%rbx + movq curve25519_sandy2x_REDMASK51(%rip),%rdx + and %rdx,%rcx + add %rbx,%rcx + and %rdx,%r9 + and %rdx,%r11 + add %r10,%r11 + and %rdx,%r13 + add %r12,%r13 + and %rdx,%r15 + add %r14,%r15 + imulq $19, %rbp,%rbp + lea (%r9,%rbp),%r9 + mov %r9,%rax + shr $51,%r9 + add %r11,%r9 + and %rdx,%rax + mov %r9,%r8 + shr $51,%r9 + add %r13,%r9 + and %rdx,%r8 + mov %r9,%r10 + shr $51,%r9 + add %r15,%r9 + and %rdx,%r10 + movq %r10,16(%rdi) + mov %r9,%r10 + shr $51,%r9 + add %rcx,%r9 + and %rdx,%r10 + movq %r10,24(%rdi) + mov %r9,%r10 + shr $51,%r9 + imulq $19, %r9,%r9 + lea (%rax,%r9),%rcx + and %rdx,%r10 + movq %r10,32(%rdi) + cmp $0,%rsi + jne .Lloop + + movq %rcx,0(%rdi) + movq %r8,8(%rdi) + movq 0(%rsp),%r11 + movq 8(%rsp),%r12 + movq 16(%rsp),%r13 + movq 24(%rsp),%r14 + movq 32(%rsp),%r15 + movq 40(%rsp),%rbx + movq 48(%rsp),%rbp + leave + ret +ENDPROC(curve25519_sandy2x_fe51_nsquare) + +.align 32 +ENTRY(curve25519_sandy2x_fe51_pack) + push %rbp + mov %rsp,%rbp + sub $32,%rsp + and $-32,%rsp + movq %r11,0(%rsp) + movq %r12,8(%rsp) + movq 0(%rsi),%rdx + movq 8(%rsi),%rcx + movq 16(%rsi),%r8 + movq 24(%rsi),%r9 + movq 32(%rsi),%rsi + movq curve25519_sandy2x_REDMASK51(%rip),%rax + lea -18(%rax),%r10 + mov $3,%r11 + + .align 16 + .Lreduceloop: + mov %rdx,%r12 + shr $51,%r12 + and %rax,%rdx + add %r12,%rcx + mov %rcx,%r12 + shr $51,%r12 + and %rax,%rcx + add %r12,%r8 + mov %r8,%r12 + shr $51,%r12 + and %rax,%r8 + add %r12,%r9 + mov %r9,%r12 + shr $51,%r12 + and %rax,%r9 + add %r12,%rsi + mov %rsi,%r12 + shr $51,%r12 + and %rax,%rsi + imulq $19, %r12,%r12 + add %r12,%rdx + sub $1,%r11 + ja .Lreduceloop + + mov $1,%r12 + cmp %r10,%rdx + cmovl %r11,%r12 + cmp %rax,%rcx + cmovne %r11,%r12 + cmp %rax,%r8 + cmovne %r11,%r12 + cmp %rax,%r9 + cmovne %r11,%r12 + cmp %rax,%rsi + cmovne %r11,%r12 + neg %r12 + and %r12,%rax + and %r12,%r10 + sub %r10,%rdx + sub %rax,%rcx + sub %rax,%r8 + sub %rax,%r9 + sub %rax,%rsi + mov %rdx,%rax + and $0xFF,%eax + movb %al,0(%rdi) + mov %rdx,%rax + shr $8,%rax + and $0xFF,%eax + movb %al,1(%rdi) + mov %rdx,%rax + shr $16,%rax + and $0xFF,%eax + movb %al,2(%rdi) + mov %rdx,%rax + shr $24,%rax + and $0xFF,%eax + movb %al,3(%rdi) + mov %rdx,%rax + shr $32,%rax + and $0xFF,%eax + movb %al,4(%rdi) + mov %rdx,%rax + shr $40,%rax + and $0xFF,%eax + movb %al,5(%rdi) + mov %rdx,%rdx + shr $48,%rdx + mov %rcx,%rax + shl $3,%rax + and $0xF8,%eax + xor %rdx,%rax + movb %al,6(%rdi) + mov %rcx,%rdx + shr $5,%rdx + and $0xFF,%edx + movb %dl,7(%rdi) + mov %rcx,%rdx + shr $13,%rdx + and $0xFF,%edx + movb %dl,8(%rdi) + mov %rcx,%rdx + shr $21,%rdx + and $0xFF,%edx + movb %dl,9(%rdi) + mov %rcx,%rdx + shr $29,%rdx + and $0xFF,%edx + movb %dl,10(%rdi) + mov %rcx,%rdx + shr $37,%rdx + and $0xFF,%edx + movb %dl,11(%rdi) + mov %rcx,%rdx + shr $45,%rdx + mov %r8,%rcx + shl $6,%rcx + and $0xC0,%ecx + xor %rdx,%rcx + movb %cl,12(%rdi) + mov %r8,%rdx + shr $2,%rdx + and $0xFF,%edx + movb %dl,13(%rdi) + mov %r8,%rdx + shr $10,%rdx + and $0xFF,%edx + movb %dl,14(%rdi) + mov %r8,%rdx + shr $18,%rdx + and $0xFF,%edx + movb %dl,15(%rdi) + mov %r8,%rdx + shr $26,%rdx + and $0xFF,%edx + movb %dl,16(%rdi) + mov %r8,%rdx + shr $34,%rdx + and $0xFF,%edx + movb %dl,17(%rdi) + mov %r8,%rdx + shr $42,%rdx + movb %dl,18(%rdi) + mov %r8,%rdx + shr $50,%rdx + mov %r9,%rcx + shl $1,%rcx + and $0xFE,%ecx + xor %rdx,%rcx + movb %cl,19(%rdi) + mov %r9,%rdx + shr $7,%rdx + and $0xFF,%edx + movb %dl,20(%rdi) + mov %r9,%rdx + shr $15,%rdx + and $0xFF,%edx + movb %dl,21(%rdi) + mov %r9,%rdx + shr $23,%rdx + and $0xFF,%edx + movb %dl,22(%rdi) + mov %r9,%rdx + shr $31,%rdx + and $0xFF,%edx + movb %dl,23(%rdi) + mov %r9,%rdx + shr $39,%rdx + and $0xFF,%edx + movb %dl,24(%rdi) + mov %r9,%rdx + shr $47,%rdx + mov %rsi,%rcx + shl $4,%rcx + and $0xF0,%ecx + xor %rdx,%rcx + movb %cl,25(%rdi) + mov %rsi,%rdx + shr $4,%rdx + and $0xFF,%edx + movb %dl,26(%rdi) + mov %rsi,%rdx + shr $12,%rdx + and $0xFF,%edx + movb %dl,27(%rdi) + mov %rsi,%rdx + shr $20,%rdx + and $0xFF,%edx + movb %dl,28(%rdi) + mov %rsi,%rdx + shr $28,%rdx + and $0xFF,%edx + movb %dl,29(%rdi) + mov %rsi,%rdx + shr $36,%rdx + and $0xFF,%edx + movb %dl,30(%rdi) + mov %rsi,%rsi + shr $44,%rsi + movb %sil,31(%rdi) + movq 0(%rsp),%r11 + movq 8(%rsp),%r12 + leave + ret +ENDPROC(curve25519_sandy2x_fe51_pack) + +.align 32 +ENTRY(curve25519_sandy2x_ladder) + push %rbp + mov %rsp,%rbp + sub $1856,%rsp + and $-32,%rsp + movq %r11,1824(%rsp) + movq %r12,1832(%rsp) + movq %r13,1840(%rsp) + movq %r14,1848(%rsp) + vmovdqa curve25519_sandy2x_v0_0(%rip),%xmm0 + vmovdqa curve25519_sandy2x_v1_0(%rip),%xmm1 + vmovdqu 0(%rdi),%xmm2 + vmovdqa %xmm2,0(%rsp) + vmovdqu 16(%rdi),%xmm2 + vmovdqa %xmm2,16(%rsp) + vmovdqu 32(%rdi),%xmm2 + vmovdqa %xmm2,32(%rsp) + vmovdqu 48(%rdi),%xmm2 + vmovdqa %xmm2,48(%rsp) + vmovdqu 64(%rdi),%xmm2 + vmovdqa %xmm2,64(%rsp) + vmovdqa %xmm1,80(%rsp) + vmovdqa %xmm0,96(%rsp) + vmovdqa %xmm0,112(%rsp) + vmovdqa %xmm0,128(%rsp) + vmovdqa %xmm0,144(%rsp) + vmovdqa %xmm1,%xmm0 + vpxor %xmm1,%xmm1,%xmm1 + vpxor %xmm2,%xmm2,%xmm2 + vpxor %xmm3,%xmm3,%xmm3 + vpxor %xmm4,%xmm4,%xmm4 + vpxor %xmm5,%xmm5,%xmm5 + vpxor %xmm6,%xmm6,%xmm6 + vpxor %xmm7,%xmm7,%xmm7 + vpxor %xmm8,%xmm8,%xmm8 + vpxor %xmm9,%xmm9,%xmm9 + vmovdqu 0(%rdi),%xmm10 + vmovdqa %xmm10,160(%rsp) + vmovdqu 16(%rdi),%xmm10 + vmovdqa %xmm10,176(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,192(%rsp) + vmovdqu 32(%rdi),%xmm10 + vmovdqa %xmm10,208(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,224(%rsp) + vmovdqu 48(%rdi),%xmm10 + vmovdqa %xmm10,240(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,256(%rsp) + vmovdqu 64(%rdi),%xmm10 + vmovdqa %xmm10,272(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,288(%rsp) + vmovdqu 8(%rdi),%xmm10 + vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,304(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,320(%rsp) + vmovdqu 24(%rdi),%xmm10 + vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,336(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,352(%rsp) + vmovdqu 40(%rdi),%xmm10 + vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,368(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,384(%rsp) + vmovdqu 56(%rdi),%xmm10 + vpmuludq curve25519_sandy2x_v2_1(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,400(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,416(%rsp) + vmovdqu 0(%rdi),%xmm10 + vmovdqu 64(%rdi),%xmm11 + vblendps $12, %xmm11, %xmm10, %xmm10 + vpshufd $2,%xmm10,%xmm10 + vpmuludq curve25519_sandy2x_v38_1(%rip),%xmm10,%xmm10 + vmovdqa %xmm10,432(%rsp) + movq 0(%rsi),%rdx + movq 8(%rsi),%rcx + movq 16(%rsi),%r8 + movq 24(%rsi),%r9 + shrd $1,%rcx,%rdx + shrd $1,%r8,%rcx + shrd $1,%r9,%r8 + shr $1,%r9 + xorq 0(%rsi),%rdx + xorq 8(%rsi),%rcx + xorq 16(%rsi),%r8 + xorq 24(%rsi),%r9 + leaq 800(%rsp),%rsi + mov $64,%rax + + .align 16 + .Lladder_small_loop: + mov %rdx,%r10 + mov %rcx,%r11 + mov %r8,%r12 + mov %r9,%r13 + shr $1,%rdx + shr $1,%rcx + shr $1,%r8 + shr $1,%r9 + and $1,%r10d + and $1,%r11d + and $1,%r12d + and $1,%r13d + neg %r10 + neg %r11 + neg %r12 + neg %r13 + movl %r10d,0(%rsi) + movl %r11d,256(%rsi) + movl %r12d,512(%rsi) + movl %r13d,768(%rsi) + add $4,%rsi + sub $1,%rax + jne .Lladder_small_loop + mov $255,%rdx + add $760,%rsi + + .align 16 + .Lladder_loop: + sub $1,%rdx + vbroadcastss 0(%rsi),%xmm10 + sub $4,%rsi + vmovdqa 0(%rsp),%xmm11 + vmovdqa 80(%rsp),%xmm12 + vpxor %xmm11,%xmm0,%xmm13 + vpand %xmm10,%xmm13,%xmm13 + vpxor %xmm13,%xmm0,%xmm0 + vpxor %xmm13,%xmm11,%xmm11 + vpxor %xmm12,%xmm1,%xmm13 + vpand %xmm10,%xmm13,%xmm13 + vpxor %xmm13,%xmm1,%xmm1 + vpxor %xmm13,%xmm12,%xmm12 + vmovdqa 16(%rsp),%xmm13 + vmovdqa 96(%rsp),%xmm14 + vpxor %xmm13,%xmm2,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm2,%xmm2 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm3,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm3,%xmm3 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,0(%rsp) + vmovdqa %xmm14,16(%rsp) + vmovdqa 32(%rsp),%xmm13 + vmovdqa 112(%rsp),%xmm14 + vpxor %xmm13,%xmm4,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm4,%xmm4 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm5,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm5,%xmm5 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,32(%rsp) + vmovdqa %xmm14,80(%rsp) + vmovdqa 48(%rsp),%xmm13 + vmovdqa 128(%rsp),%xmm14 + vpxor %xmm13,%xmm6,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm6,%xmm6 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm7,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm7,%xmm7 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,48(%rsp) + vmovdqa %xmm14,96(%rsp) + vmovdqa 64(%rsp),%xmm13 + vmovdqa 144(%rsp),%xmm14 + vpxor %xmm13,%xmm8,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm8,%xmm8 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm9,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm9,%xmm9 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,64(%rsp) + vmovdqa %xmm14,112(%rsp) + vpaddq curve25519_sandy2x_subc0(%rip),%xmm11,%xmm10 + vpsubq %xmm12,%xmm10,%xmm10 + vpaddq %xmm12,%xmm11,%xmm11 + vpunpckhqdq %xmm10,%xmm11,%xmm12 + vpunpcklqdq %xmm10,%xmm11,%xmm10 + vpaddq %xmm1,%xmm0,%xmm11 + vpaddq curve25519_sandy2x_subc0(%rip),%xmm0,%xmm0 + vpsubq %xmm1,%xmm0,%xmm0 + vpunpckhqdq %xmm11,%xmm0,%xmm1 + vpunpcklqdq %xmm11,%xmm0,%xmm0 + vpmuludq %xmm0,%xmm10,%xmm11 + vpmuludq %xmm1,%xmm10,%xmm13 + vmovdqa %xmm1,128(%rsp) + vpaddq %xmm1,%xmm1,%xmm1 + vpmuludq %xmm0,%xmm12,%xmm14 + vmovdqa %xmm0,144(%rsp) + vpaddq %xmm14,%xmm13,%xmm13 + vpmuludq %xmm1,%xmm12,%xmm0 + vmovdqa %xmm1,448(%rsp) + vpaddq %xmm3,%xmm2,%xmm1 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm2,%xmm2 + vpsubq %xmm3,%xmm2,%xmm2 + vpunpckhqdq %xmm1,%xmm2,%xmm3 + vpunpcklqdq %xmm1,%xmm2,%xmm1 + vpmuludq %xmm1,%xmm10,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq %xmm3,%xmm10,%xmm2 + vmovdqa %xmm3,464(%rsp) + vpaddq %xmm3,%xmm3,%xmm3 + vpmuludq %xmm1,%xmm12,%xmm14 + vmovdqa %xmm1,480(%rsp) + vpaddq %xmm14,%xmm2,%xmm2 + vpmuludq %xmm3,%xmm12,%xmm1 + vmovdqa %xmm3,496(%rsp) + vpaddq %xmm5,%xmm4,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm4,%xmm4 + vpsubq %xmm5,%xmm4,%xmm4 + vpunpckhqdq %xmm3,%xmm4,%xmm5 + vpunpcklqdq %xmm3,%xmm4,%xmm3 + vpmuludq %xmm3,%xmm10,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq %xmm5,%xmm10,%xmm4 + vmovdqa %xmm5,512(%rsp) + vpaddq %xmm5,%xmm5,%xmm5 + vpmuludq %xmm3,%xmm12,%xmm14 + vmovdqa %xmm3,528(%rsp) + vpaddq %xmm14,%xmm4,%xmm4 + vpaddq %xmm7,%xmm6,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm6,%xmm6 + vpsubq %xmm7,%xmm6,%xmm6 + vpunpckhqdq %xmm3,%xmm6,%xmm7 + vpunpcklqdq %xmm3,%xmm6,%xmm3 + vpmuludq %xmm3,%xmm10,%xmm6 + vpmuludq %xmm5,%xmm12,%xmm14 + vmovdqa %xmm5,544(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm5,%xmm5 + vmovdqa %xmm5,560(%rsp) + vpaddq %xmm14,%xmm6,%xmm6 + vpmuludq %xmm7,%xmm10,%xmm5 + vmovdqa %xmm7,576(%rsp) + vpaddq %xmm7,%xmm7,%xmm7 + vpmuludq %xmm3,%xmm12,%xmm14 + vmovdqa %xmm3,592(%rsp) + vpaddq %xmm14,%xmm5,%xmm5 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vmovdqa %xmm3,608(%rsp) + vpaddq %xmm9,%xmm8,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm8,%xmm8 + vpsubq %xmm9,%xmm8,%xmm8 + vpunpckhqdq %xmm3,%xmm8,%xmm9 + vpunpcklqdq %xmm3,%xmm8,%xmm3 + vmovdqa %xmm3,624(%rsp) + vpmuludq %xmm7,%xmm12,%xmm8 + vmovdqa %xmm7,640(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm7,%xmm7 + vmovdqa %xmm7,656(%rsp) + vpmuludq %xmm3,%xmm10,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq %xmm9,%xmm10,%xmm7 + vmovdqa %xmm9,672(%rsp) + vpaddq %xmm9,%xmm9,%xmm9 + vpmuludq %xmm3,%xmm12,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vmovdqa %xmm3,688(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm12,%xmm12 + vpmuludq %xmm9,%xmm12,%xmm3 + vmovdqa %xmm9,704(%rsp) + vpaddq %xmm3,%xmm11,%xmm11 + vmovdqa 0(%rsp),%xmm3 + vmovdqa 16(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 480(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 464(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 528(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 512(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 592(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 576(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 624(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 672(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 448(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 480(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 496(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 528(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 544(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 592(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 640(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 624(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 704(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm0,%xmm0 + vmovdqa 32(%rsp),%xmm3 + vmovdqa 80(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 480(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 464(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 528(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 512(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 592(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 576(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 624(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 672(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 448(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 480(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 496(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 528(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 544(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 592(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 640(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 624(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 704(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm1,%xmm1 + vmovdqa 48(%rsp),%xmm3 + vmovdqa 96(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 480(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 464(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 528(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 512(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 592(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 576(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 624(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 672(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 448(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 480(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 496(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 528(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 544(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 592(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 640(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 624(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 704(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm6,%xmm6 + vmovdqa 64(%rsp),%xmm3 + vmovdqa 112(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 480(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 464(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 528(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 512(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 592(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 576(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 624(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 672(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 448(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 480(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 496(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 528(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 544(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 592(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 640(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 624(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 704(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm8,%xmm8 + vpsrlq $25,%xmm4,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 + vpsrlq $26,%xmm11,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm6,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm13,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpand curve25519_sandy2x_m25(%rip),%xmm13,%xmm13 + vpsrlq $25,%xmm5,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm0,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpand curve25519_sandy2x_m26(%rip),%xmm0,%xmm0 + vpsrlq $26,%xmm8,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $25,%xmm2,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm2,%xmm2 + vpsrlq $25,%xmm7,%xmm3 + vpsllq $4,%xmm3,%xmm9 + vpaddq %xmm3,%xmm11,%xmm11 + vpsllq $1,%xmm3,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpaddq %xmm9,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $26,%xmm1,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $26,%xmm11,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $25,%xmm4,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 + vpunpcklqdq %xmm13,%xmm11,%xmm3 + vpunpckhqdq %xmm13,%xmm11,%xmm9 + vpaddq curve25519_sandy2x_subc0(%rip),%xmm9,%xmm10 + vpsubq %xmm3,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm3,%xmm10,%xmm9 + vpunpcklqdq %xmm3,%xmm10,%xmm10 + vpmuludq %xmm10,%xmm10,%xmm3 + vpaddq %xmm10,%xmm10,%xmm10 + vpmuludq %xmm9,%xmm10,%xmm11 + vpunpcklqdq %xmm2,%xmm0,%xmm12 + vpunpckhqdq %xmm2,%xmm0,%xmm0 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm0,%xmm2 + vpsubq %xmm12,%xmm2,%xmm2 + vpaddq %xmm0,%xmm12,%xmm12 + vpunpckhqdq %xmm12,%xmm2,%xmm0 + vpunpcklqdq %xmm12,%xmm2,%xmm2 + vpmuludq %xmm2,%xmm10,%xmm12 + vpaddq %xmm9,%xmm9,%xmm13 + vpmuludq %xmm13,%xmm9,%xmm9 + vpaddq %xmm9,%xmm12,%xmm12 + vpmuludq %xmm0,%xmm10,%xmm9 + vpmuludq %xmm2,%xmm13,%xmm14 + vpaddq %xmm14,%xmm9,%xmm9 + vpunpcklqdq %xmm4,%xmm1,%xmm14 + vpunpckhqdq %xmm4,%xmm1,%xmm1 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm1,%xmm4 + vpsubq %xmm14,%xmm4,%xmm4 + vpaddq %xmm1,%xmm14,%xmm14 + vpunpckhqdq %xmm14,%xmm4,%xmm1 + vpunpcklqdq %xmm14,%xmm4,%xmm4 + vmovdqa %xmm1,0(%rsp) + vpaddq %xmm1,%xmm1,%xmm1 + vmovdqa %xmm1,16(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vmovdqa %xmm1,32(%rsp) + vpmuludq %xmm4,%xmm10,%xmm1 + vpmuludq %xmm2,%xmm2,%xmm14 + vpaddq %xmm14,%xmm1,%xmm1 + vpmuludq 0(%rsp),%xmm10,%xmm14 + vpmuludq %xmm4,%xmm13,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpunpcklqdq %xmm5,%xmm6,%xmm15 + vpunpckhqdq %xmm5,%xmm6,%xmm5 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm6 + vpsubq %xmm15,%xmm6,%xmm6 + vpaddq %xmm5,%xmm15,%xmm15 + vpunpckhqdq %xmm15,%xmm6,%xmm5 + vpunpcklqdq %xmm15,%xmm6,%xmm6 + vmovdqa %xmm6,48(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm6,%xmm6 + vmovdqa %xmm6,64(%rsp) + vmovdqa %xmm5,80(%rsp) + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm5,%xmm5 + vmovdqa %xmm5,96(%rsp) + vpmuludq 48(%rsp),%xmm10,%xmm5 + vpaddq %xmm0,%xmm0,%xmm6 + vpmuludq %xmm6,%xmm0,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpmuludq 80(%rsp),%xmm10,%xmm0 + vpmuludq %xmm4,%xmm6,%xmm15 + vpaddq %xmm15,%xmm0,%xmm0 + vpmuludq %xmm6,%xmm13,%xmm15 + vpaddq %xmm15,%xmm1,%xmm1 + vpmuludq %xmm6,%xmm2,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpunpcklqdq %xmm7,%xmm8,%xmm15 + vpunpckhqdq %xmm7,%xmm8,%xmm7 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm8 + vpsubq %xmm15,%xmm8,%xmm8 + vpaddq %xmm7,%xmm15,%xmm15 + vpunpckhqdq %xmm15,%xmm8,%xmm7 + vpunpcklqdq %xmm15,%xmm8,%xmm8 + vmovdqa %xmm8,112(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm8,%xmm8 + vmovdqa %xmm8,448(%rsp) + vpmuludq 112(%rsp),%xmm10,%xmm8 + vpmuludq %xmm7,%xmm10,%xmm10 + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm7,%xmm15 + vpmuludq %xmm15,%xmm7,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq %xmm15,%xmm13,%xmm7 + vpaddq %xmm7,%xmm3,%xmm3 + vpmuludq %xmm15,%xmm2,%xmm7 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq 80(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm7,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq 16(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 48(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm0,%xmm0 + vpmuludq 112(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm10,%xmm10 + vpmuludq %xmm15,%xmm6,%xmm7 + vpaddq %xmm7,%xmm12,%xmm12 + vpmuludq %xmm15,%xmm4,%xmm7 + vpaddq %xmm7,%xmm9,%xmm9 + vpaddq %xmm2,%xmm2,%xmm2 + vpmuludq %xmm4,%xmm2,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 448(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm3,%xmm3 + vpmuludq 448(%rsp),%xmm6,%xmm7 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq 0(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm0,%xmm0 + vpmuludq 48(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq 80(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 96(%rsp),%xmm4,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq %xmm4,%xmm4,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpaddq %xmm4,%xmm4,%xmm2 + vpmuludq 448(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vpmuludq 16(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq 48(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm14,%xmm14 + vpmuludq 96(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 448(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 16(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm8,%xmm8 + vpmuludq 48(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 80(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vpmuludq 112(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm0,%xmm0 + vmovdqa 48(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 448(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 80(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 448(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm14,%xmm14 + vpmuludq 64(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 64(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 96(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vmovdqa 48(%rsp),%xmm4 + vpmuludq 96(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 0(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vmovdqa 32(%rsp),%xmm2 + vpmuludq 0(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vmovdqa 64(%rsp),%xmm2 + vpmuludq 48(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vmovdqa 96(%rsp),%xmm2 + vpmuludq 80(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vmovdqa 448(%rsp),%xmm2 + vpmuludq 112(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpsrlq $26,%xmm3,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 + vpsrlq $25,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $25,%xmm11,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpand curve25519_sandy2x_m25(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm5,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm12,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpand curve25519_sandy2x_m26(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm0,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm9,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm9,%xmm9 + vpsrlq $26,%xmm8,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $25,%xmm10,%xmm2 + vpsllq $4,%xmm2,%xmm4 + vpaddq %xmm2,%xmm3,%xmm3 + vpsllq $1,%xmm2,%xmm2 + vpaddq %xmm2,%xmm4,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $26,%xmm3,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 + vpunpckhqdq %xmm11,%xmm3,%xmm2 + vmovdqa %xmm2,0(%rsp) + vpshufd $0,%xmm3,%xmm2 + vpshufd $0,%xmm11,%xmm3 + vpmuludq 160(%rsp),%xmm2,%xmm4 + vpmuludq 432(%rsp),%xmm3,%xmm6 + vpaddq %xmm6,%xmm4,%xmm4 + vpmuludq 176(%rsp),%xmm2,%xmm6 + vpmuludq 304(%rsp),%xmm3,%xmm7 + vpaddq %xmm7,%xmm6,%xmm6 + vpmuludq 208(%rsp),%xmm2,%xmm7 + vpmuludq 336(%rsp),%xmm3,%xmm11 + vpaddq %xmm11,%xmm7,%xmm7 + vpmuludq 240(%rsp),%xmm2,%xmm11 + vpmuludq 368(%rsp),%xmm3,%xmm13 + vpaddq %xmm13,%xmm11,%xmm11 + vpmuludq 272(%rsp),%xmm2,%xmm2 + vpmuludq 400(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpunpckhqdq %xmm9,%xmm12,%xmm3 + vmovdqa %xmm3,16(%rsp) + vpshufd $0,%xmm12,%xmm3 + vpshufd $0,%xmm9,%xmm9 + vpmuludq 288(%rsp),%xmm3,%xmm12 + vpaddq %xmm12,%xmm4,%xmm4 + vpmuludq 416(%rsp),%xmm9,%xmm12 + vpaddq %xmm12,%xmm4,%xmm4 + vpmuludq 160(%rsp),%xmm3,%xmm12 + vpaddq %xmm12,%xmm6,%xmm6 + vpmuludq 432(%rsp),%xmm9,%xmm12 + vpaddq %xmm12,%xmm6,%xmm6 + vpmuludq 176(%rsp),%xmm3,%xmm12 + vpaddq %xmm12,%xmm7,%xmm7 + vpmuludq 304(%rsp),%xmm9,%xmm12 + vpaddq %xmm12,%xmm7,%xmm7 + vpmuludq 208(%rsp),%xmm3,%xmm12 + vpaddq %xmm12,%xmm11,%xmm11 + vpmuludq 336(%rsp),%xmm9,%xmm12 + vpaddq %xmm12,%xmm11,%xmm11 + vpmuludq 240(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 368(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpunpckhqdq %xmm14,%xmm1,%xmm3 + vmovdqa %xmm3,32(%rsp) + vpshufd $0,%xmm1,%xmm1 + vpshufd $0,%xmm14,%xmm3 + vpmuludq 256(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm4,%xmm4 + vpmuludq 384(%rsp),%xmm3,%xmm9 + vpaddq %xmm9,%xmm4,%xmm4 + vpmuludq 288(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm6,%xmm6 + vpmuludq 416(%rsp),%xmm3,%xmm9 + vpaddq %xmm9,%xmm6,%xmm6 + vpmuludq 160(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm7,%xmm7 + vpmuludq 432(%rsp),%xmm3,%xmm9 + vpaddq %xmm9,%xmm7,%xmm7 + vpmuludq 176(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm11,%xmm11 + vpmuludq 304(%rsp),%xmm3,%xmm9 + vpaddq %xmm9,%xmm11,%xmm11 + vpmuludq 208(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm2,%xmm2 + vpmuludq 336(%rsp),%xmm3,%xmm1 + vpaddq %xmm1,%xmm2,%xmm2 + vpunpckhqdq %xmm0,%xmm5,%xmm1 + vmovdqa %xmm1,48(%rsp) + vpshufd $0,%xmm5,%xmm1 + vpshufd $0,%xmm0,%xmm0 + vpmuludq 224(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 352(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 256(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 384(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 288(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 416(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 160(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 432(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 176(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm2,%xmm2 + vpmuludq 304(%rsp),%xmm0,%xmm0 + vpaddq %xmm0,%xmm2,%xmm2 + vpunpckhqdq %xmm10,%xmm8,%xmm0 + vmovdqa %xmm0,64(%rsp) + vpshufd $0,%xmm8,%xmm0 + vpshufd $0,%xmm10,%xmm1 + vpmuludq 192(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 320(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 224(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 352(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 256(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 384(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 288(%rsp),%xmm0,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 416(%rsp),%xmm1,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 160(%rsp),%xmm0,%xmm0 + vpaddq %xmm0,%xmm2,%xmm2 + vpmuludq 432(%rsp),%xmm1,%xmm0 + vpaddq %xmm0,%xmm2,%xmm2 + vmovdqa %xmm4,80(%rsp) + vmovdqa %xmm6,96(%rsp) + vmovdqa %xmm7,112(%rsp) + vmovdqa %xmm11,448(%rsp) + vmovdqa %xmm2,496(%rsp) + vmovdqa 144(%rsp),%xmm0 + vpmuludq %xmm0,%xmm0,%xmm1 + vpaddq %xmm0,%xmm0,%xmm0 + vmovdqa 128(%rsp),%xmm2 + vpmuludq %xmm2,%xmm0,%xmm3 + vmovdqa 480(%rsp),%xmm4 + vpmuludq %xmm4,%xmm0,%xmm5 + vmovdqa 464(%rsp),%xmm6 + vpmuludq %xmm6,%xmm0,%xmm7 + vmovdqa 528(%rsp),%xmm8 + vpmuludq %xmm8,%xmm0,%xmm9 + vpmuludq 512(%rsp),%xmm0,%xmm10 + vpmuludq 592(%rsp),%xmm0,%xmm11 + vpmuludq 576(%rsp),%xmm0,%xmm12 + vpmuludq 624(%rsp),%xmm0,%xmm13 + vmovdqa 672(%rsp),%xmm14 + vpmuludq %xmm14,%xmm0,%xmm0 + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm14,%xmm15 + vpmuludq %xmm15,%xmm14,%xmm14 + vpaddq %xmm14,%xmm13,%xmm13 + vpaddq %xmm6,%xmm6,%xmm14 + vpmuludq %xmm14,%xmm6,%xmm6 + vpaddq %xmm6,%xmm11,%xmm11 + vpaddq %xmm2,%xmm2,%xmm6 + vpmuludq %xmm6,%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq %xmm15,%xmm6,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vpmuludq %xmm15,%xmm4,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpmuludq 544(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 592(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 640(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 624(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq %xmm4,%xmm6,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq %xmm14,%xmm6,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq %xmm8,%xmm6,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq %xmm15,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq %xmm15,%xmm8,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq %xmm4,%xmm4,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq %xmm14,%xmm4,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpaddq %xmm4,%xmm4,%xmm2 + vpmuludq %xmm8,%xmm2,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vpmuludq 688(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq 688(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vpmuludq 512(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vpmuludq 592(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm13,%xmm13 + vpmuludq 576(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq 656(%rsp),%xmm8,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpmuludq %xmm8,%xmm14,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq %xmm8,%xmm8,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpaddq %xmm8,%xmm8,%xmm2 + vpmuludq 688(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vpmuludq 544(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 592(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 656(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 544(%rsp),%xmm4 + vpmuludq 688(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm7,%xmm7 + vpmuludq 544(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm13,%xmm13 + vpmuludq 592(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm0,%xmm0 + vpmuludq 640(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vpmuludq 624(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vmovdqa 592(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 688(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 608(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 544(%rsp),%xmm4 + vpmuludq 608(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 544(%rsp),%xmm4 + vpmuludq 656(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vmovdqa 592(%rsp),%xmm4 + vpmuludq 656(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm7,%xmm7 + vmovdqa 640(%rsp),%xmm4 + vpmuludq 688(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 512(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vmovdqa 560(%rsp),%xmm2 + vpmuludq 512(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vmovdqa 608(%rsp),%xmm2 + vpmuludq 592(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vmovdqa 656(%rsp),%xmm2 + vpmuludq 576(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vmovdqa 688(%rsp),%xmm2 + vpmuludq 624(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $25,%xmm10,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm3,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 + vpsrlq $26,%xmm11,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm5,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 + vpsrlq $25,%xmm12,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm7,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $26,%xmm13,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 + vpsrlq $26,%xmm9,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $25,%xmm0,%xmm2 + vpsllq $4,%xmm2,%xmm4 + vpaddq %xmm2,%xmm1,%xmm1 + vpsllq $1,%xmm2,%xmm2 + vpaddq %xmm2,%xmm4,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm10,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpunpckhqdq %xmm3,%xmm1,%xmm2 + vpunpcklqdq %xmm3,%xmm1,%xmm1 + vmovdqa %xmm1,464(%rsp) + vpaddq curve25519_sandy2x_subc0(%rip),%xmm2,%xmm3 + vpsubq %xmm1,%xmm3,%xmm3 + vpunpckhqdq %xmm3,%xmm2,%xmm1 + vpunpcklqdq %xmm3,%xmm2,%xmm2 + vmovdqa %xmm2,480(%rsp) + vmovdqa %xmm1,512(%rsp) + vpsllq $1,%xmm1,%xmm1 + vmovdqa %xmm1,528(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm3,%xmm3 + vmovdqa 80(%rsp),%xmm1 + vpunpcklqdq %xmm1,%xmm3,%xmm2 + vpunpckhqdq %xmm1,%xmm3,%xmm1 + vpunpckhqdq %xmm7,%xmm5,%xmm3 + vpunpcklqdq %xmm7,%xmm5,%xmm4 + vmovdqa %xmm4,544(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm5 + vpsubq %xmm4,%xmm5,%xmm5 + vpunpckhqdq %xmm5,%xmm3,%xmm4 + vpunpcklqdq %xmm5,%xmm3,%xmm3 + vmovdqa %xmm3,560(%rsp) + vmovdqa %xmm4,576(%rsp) + vpsllq $1,%xmm4,%xmm4 + vmovdqa %xmm4,592(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm5,%xmm5 + vmovdqa 96(%rsp),%xmm3 + vpunpcklqdq %xmm3,%xmm5,%xmm4 + vpunpckhqdq %xmm3,%xmm5,%xmm3 + vpunpckhqdq %xmm10,%xmm9,%xmm5 + vpunpcklqdq %xmm10,%xmm9,%xmm6 + vmovdqa %xmm6,608(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm7 + vpsubq %xmm6,%xmm7,%xmm7 + vpunpckhqdq %xmm7,%xmm5,%xmm6 + vpunpcklqdq %xmm7,%xmm5,%xmm5 + vmovdqa %xmm5,624(%rsp) + vmovdqa %xmm6,640(%rsp) + vpsllq $1,%xmm6,%xmm6 + vmovdqa %xmm6,656(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm7,%xmm7 + vmovdqa 112(%rsp),%xmm5 + vpunpcklqdq %xmm5,%xmm7,%xmm6 + vpunpckhqdq %xmm5,%xmm7,%xmm5 + vpunpckhqdq %xmm12,%xmm11,%xmm7 + vpunpcklqdq %xmm12,%xmm11,%xmm8 + vmovdqa %xmm8,672(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm9 + vpsubq %xmm8,%xmm9,%xmm9 + vpunpckhqdq %xmm9,%xmm7,%xmm8 + vpunpcklqdq %xmm9,%xmm7,%xmm7 + vmovdqa %xmm7,688(%rsp) + vmovdqa %xmm8,704(%rsp) + vpsllq $1,%xmm8,%xmm8 + vmovdqa %xmm8,720(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm9,%xmm9 + vmovdqa 448(%rsp),%xmm7 + vpunpcklqdq %xmm7,%xmm9,%xmm8 + vpunpckhqdq %xmm7,%xmm9,%xmm7 + vpunpckhqdq %xmm0,%xmm13,%xmm9 + vpunpcklqdq %xmm0,%xmm13,%xmm0 + vmovdqa %xmm0,448(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm9,%xmm10 + vpsubq %xmm0,%xmm10,%xmm10 + vpunpckhqdq %xmm10,%xmm9,%xmm0 + vpunpcklqdq %xmm10,%xmm9,%xmm9 + vmovdqa %xmm9,736(%rsp) + vmovdqa %xmm0,752(%rsp) + vpsllq $1,%xmm0,%xmm0 + vmovdqa %xmm0,768(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm10,%xmm10 + vmovdqa 496(%rsp),%xmm0 + vpunpcklqdq %xmm0,%xmm10,%xmm9 + vpunpckhqdq %xmm0,%xmm10,%xmm0 + vpsrlq $26,%xmm2,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 + vpsrlq $25,%xmm5,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $25,%xmm1,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpand curve25519_sandy2x_m25(%rip),%xmm1,%xmm1 + vpsrlq $26,%xmm8,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm4,%xmm10 + vpaddq %xmm10,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm4,%xmm4 + vpsrlq $25,%xmm7,%xmm10 + vpaddq %xmm10,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $25,%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 + vpsrlq $26,%xmm9,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $26,%xmm6,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm0,%xmm10 + vpsllq $4,%xmm10,%xmm11 + vpaddq %xmm10,%xmm2,%xmm2 + vpsllq $1,%xmm10,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpaddq %xmm11,%xmm2,%xmm2 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm5,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm2,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 + vpunpckhqdq %xmm1,%xmm2,%xmm10 + vmovdqa %xmm10,80(%rsp) + vpunpcklqdq %xmm1,%xmm2,%xmm1 + vpunpckhqdq %xmm3,%xmm4,%xmm2 + vmovdqa %xmm2,96(%rsp) + vpunpcklqdq %xmm3,%xmm4,%xmm2 + vpunpckhqdq %xmm5,%xmm6,%xmm3 + vmovdqa %xmm3,112(%rsp) + vpunpcklqdq %xmm5,%xmm6,%xmm3 + vpunpckhqdq %xmm7,%xmm8,%xmm4 + vmovdqa %xmm4,128(%rsp) + vpunpcklqdq %xmm7,%xmm8,%xmm4 + vpunpckhqdq %xmm0,%xmm9,%xmm5 + vmovdqa %xmm5,144(%rsp) + vpunpcklqdq %xmm0,%xmm9,%xmm0 + vmovdqa 464(%rsp),%xmm5 + vpaddq %xmm5,%xmm1,%xmm1 + vpunpcklqdq %xmm1,%xmm5,%xmm6 + vpunpckhqdq %xmm1,%xmm5,%xmm1 + vpmuludq 512(%rsp),%xmm6,%xmm5 + vpmuludq 480(%rsp),%xmm1,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 560(%rsp),%xmm6,%xmm7 + vpmuludq 528(%rsp),%xmm1,%xmm8 + vpaddq %xmm8,%xmm7,%xmm7 + vpmuludq 576(%rsp),%xmm6,%xmm8 + vpmuludq 560(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm8,%xmm8 + vpmuludq 624(%rsp),%xmm6,%xmm9 + vpmuludq 592(%rsp),%xmm1,%xmm10 + vpaddq %xmm10,%xmm9,%xmm9 + vpmuludq 640(%rsp),%xmm6,%xmm10 + vpmuludq 624(%rsp),%xmm1,%xmm11 + vpaddq %xmm11,%xmm10,%xmm10 + vpmuludq 688(%rsp),%xmm6,%xmm11 + vpmuludq 656(%rsp),%xmm1,%xmm12 + vpaddq %xmm12,%xmm11,%xmm11 + vpmuludq 704(%rsp),%xmm6,%xmm12 + vpmuludq 688(%rsp),%xmm1,%xmm13 + vpaddq %xmm13,%xmm12,%xmm12 + vpmuludq 736(%rsp),%xmm6,%xmm13 + vpmuludq 720(%rsp),%xmm1,%xmm14 + vpaddq %xmm14,%xmm13,%xmm13 + vpmuludq 752(%rsp),%xmm6,%xmm14 + vpmuludq 736(%rsp),%xmm1,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpmuludq 480(%rsp),%xmm6,%xmm6 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 768(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vmovdqa 544(%rsp),%xmm1 + vpaddq %xmm1,%xmm2,%xmm2 + vpunpcklqdq %xmm2,%xmm1,%xmm15 + vpunpckhqdq %xmm2,%xmm1,%xmm1 + vpmuludq 480(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 512(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 560(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 576(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 624(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 640(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 688(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 704(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm15,%xmm15 + vpmuludq 736(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 752(%rsp),%xmm15,%xmm15 + vpaddq %xmm15,%xmm5,%xmm5 + vpmuludq 480(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 528(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 560(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 592(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 624(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 656(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 688(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 720(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 736(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 768(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vmovdqa 608(%rsp),%xmm1 + vpaddq %xmm1,%xmm3,%xmm3 + vpunpcklqdq %xmm3,%xmm1,%xmm2 + vpunpckhqdq %xmm3,%xmm1,%xmm1 + vpmuludq 480(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpmuludq 512(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm10,%xmm10 + vpmuludq 560(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 576(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm12,%xmm12 + vpmuludq 624(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 640(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 688(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 704(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 736(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 752(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 480(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 528(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 560(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 592(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 624(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 656(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 688(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 720(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 736(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 768(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vmovdqa 672(%rsp),%xmm1 + vpaddq %xmm1,%xmm4,%xmm4 + vpunpcklqdq %xmm4,%xmm1,%xmm2 + vpunpckhqdq %xmm4,%xmm1,%xmm1 + vpmuludq 480(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 512(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm12,%xmm12 + vpmuludq 560(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 576(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 624(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 640(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 688(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 704(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 736(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpmuludq 752(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 480(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 528(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 560(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 592(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 624(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 656(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 688(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 720(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 736(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 768(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vmovdqa 448(%rsp),%xmm1 + vpaddq %xmm1,%xmm0,%xmm0 + vpunpcklqdq %xmm0,%xmm1,%xmm2 + vpunpckhqdq %xmm0,%xmm1,%xmm0 + vpmuludq 480(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm13,%xmm13 + vpmuludq 512(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 560(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpmuludq 576(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm5,%xmm5 + vpmuludq 624(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vpmuludq 640(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm8,%xmm8 + vpmuludq 688(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vpmuludq 704(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm10,%xmm10 + vpmuludq 736(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq 752(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 480(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm0,%xmm0 + vpmuludq 528(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpmuludq 560(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm5,%xmm5 + vpmuludq 592(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vpmuludq 624(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm8,%xmm8 + vpmuludq 656(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vpmuludq 688(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm10,%xmm10 + vpmuludq 720(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq 736(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm12,%xmm12 + vpmuludq 768(%rsp),%xmm0,%xmm0 + vpaddq %xmm0,%xmm13,%xmm13 + vpsrlq $26,%xmm6,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm10,%xmm0 + vpaddq %xmm0,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm5,%xmm0 + vpaddq %xmm0,%xmm7,%xmm7 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm11,%xmm0 + vpaddq %xmm0,%xmm12,%xmm12 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm7,%xmm0 + vpaddq %xmm0,%xmm8,%xmm8 + vpand curve25519_sandy2x_m26(%rip),%xmm7,%xmm7 + vpsrlq $25,%xmm12,%xmm0 + vpaddq %xmm0,%xmm13,%xmm13 + vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm8,%xmm0 + vpaddq %xmm0,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm13,%xmm0 + vpaddq %xmm0,%xmm14,%xmm14 + vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 + vpsrlq $26,%xmm9,%xmm0 + vpaddq %xmm0,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $25,%xmm14,%xmm0 + vpsllq $4,%xmm0,%xmm1 + vpaddq %xmm0,%xmm6,%xmm6 + vpsllq $1,%xmm0,%xmm0 + vpaddq %xmm0,%xmm1,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $25,%xmm10,%xmm0 + vpaddq %xmm0,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $26,%xmm6,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpunpckhqdq %xmm5,%xmm6,%xmm1 + vpunpcklqdq %xmm5,%xmm6,%xmm0 + vpunpckhqdq %xmm8,%xmm7,%xmm3 + vpunpcklqdq %xmm8,%xmm7,%xmm2 + vpunpckhqdq %xmm10,%xmm9,%xmm5 + vpunpcklqdq %xmm10,%xmm9,%xmm4 + vpunpckhqdq %xmm12,%xmm11,%xmm7 + vpunpcklqdq %xmm12,%xmm11,%xmm6 + vpunpckhqdq %xmm14,%xmm13,%xmm9 + vpunpcklqdq %xmm14,%xmm13,%xmm8 + cmp $0,%rdx + jne .Lladder_loop + vmovdqu %xmm1,160(%rdi) + vmovdqu %xmm0,80(%rdi) + vmovdqu %xmm3,176(%rdi) + vmovdqu %xmm2,96(%rdi) + vmovdqu %xmm5,192(%rdi) + vmovdqu %xmm4,112(%rdi) + vmovdqu %xmm7,208(%rdi) + vmovdqu %xmm6,128(%rdi) + vmovdqu %xmm9,224(%rdi) + vmovdqu %xmm8,144(%rdi) + movq 1824(%rsp),%r11 + movq 1832(%rsp),%r12 + movq 1840(%rsp),%r13 + movq 1848(%rsp),%r14 + leave + ret +ENDPROC(curve25519_sandy2x_ladder) + +.align 32 +ENTRY(curve25519_sandy2x_ladder_base) + push %rbp + mov %rsp,%rbp + sub $1568,%rsp + and $-32,%rsp + movq %r11,1536(%rsp) + movq %r12,1544(%rsp) + movq %r13,1552(%rsp) + vmovdqa curve25519_sandy2x_v0_0(%rip),%xmm0 + vmovdqa curve25519_sandy2x_v1_0(%rip),%xmm1 + vmovdqa curve25519_sandy2x_v9_0(%rip),%xmm2 + vmovdqa %xmm2,0(%rsp) + vmovdqa %xmm0,16(%rsp) + vmovdqa %xmm0,32(%rsp) + vmovdqa %xmm0,48(%rsp) + vmovdqa %xmm0,64(%rsp) + vmovdqa %xmm1,80(%rsp) + vmovdqa %xmm0,96(%rsp) + vmovdqa %xmm0,112(%rsp) + vmovdqa %xmm0,128(%rsp) + vmovdqa %xmm0,144(%rsp) + vmovdqa %xmm1,%xmm0 + vpxor %xmm1,%xmm1,%xmm1 + vpxor %xmm2,%xmm2,%xmm2 + vpxor %xmm3,%xmm3,%xmm3 + vpxor %xmm4,%xmm4,%xmm4 + vpxor %xmm5,%xmm5,%xmm5 + vpxor %xmm6,%xmm6,%xmm6 + vpxor %xmm7,%xmm7,%xmm7 + vpxor %xmm8,%xmm8,%xmm8 + vpxor %xmm9,%xmm9,%xmm9 + movq 0(%rsi),%rdx + movq 8(%rsi),%rcx + movq 16(%rsi),%r8 + movq 24(%rsi),%r9 + shrd $1,%rcx,%rdx + shrd $1,%r8,%rcx + shrd $1,%r9,%r8 + shr $1,%r9 + xorq 0(%rsi),%rdx + xorq 8(%rsi),%rcx + xorq 16(%rsi),%r8 + xorq 24(%rsi),%r9 + leaq 512(%rsp),%rsi + mov $64,%rax + + .align 16 + .Lladder_base_small_loop: + mov %rdx,%r10 + mov %rcx,%r11 + mov %r8,%r12 + mov %r9,%r13 + shr $1,%rdx + shr $1,%rcx + shr $1,%r8 + shr $1,%r9 + and $1,%r10d + and $1,%r11d + and $1,%r12d + and $1,%r13d + neg %r10 + neg %r11 + neg %r12 + neg %r13 + movl %r10d,0(%rsi) + movl %r11d,256(%rsi) + movl %r12d,512(%rsi) + movl %r13d,768(%rsi) + add $4,%rsi + sub $1,%rax + jne .Lladder_base_small_loop + mov $255,%rdx + add $760,%rsi + + .align 16 + .Lladder_base_loop: + sub $1,%rdx + vbroadcastss 0(%rsi),%xmm10 + sub $4,%rsi + vmovdqa 0(%rsp),%xmm11 + vmovdqa 80(%rsp),%xmm12 + vpxor %xmm11,%xmm0,%xmm13 + vpand %xmm10,%xmm13,%xmm13 + vpxor %xmm13,%xmm0,%xmm0 + vpxor %xmm13,%xmm11,%xmm11 + vpxor %xmm12,%xmm1,%xmm13 + vpand %xmm10,%xmm13,%xmm13 + vpxor %xmm13,%xmm1,%xmm1 + vpxor %xmm13,%xmm12,%xmm12 + vmovdqa 16(%rsp),%xmm13 + vmovdqa 96(%rsp),%xmm14 + vpxor %xmm13,%xmm2,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm2,%xmm2 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm3,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm3,%xmm3 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,0(%rsp) + vmovdqa %xmm14,16(%rsp) + vmovdqa 32(%rsp),%xmm13 + vmovdqa 112(%rsp),%xmm14 + vpxor %xmm13,%xmm4,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm4,%xmm4 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm5,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm5,%xmm5 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,32(%rsp) + vmovdqa %xmm14,80(%rsp) + vmovdqa 48(%rsp),%xmm13 + vmovdqa 128(%rsp),%xmm14 + vpxor %xmm13,%xmm6,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm6,%xmm6 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm7,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm7,%xmm7 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,48(%rsp) + vmovdqa %xmm14,96(%rsp) + vmovdqa 64(%rsp),%xmm13 + vmovdqa 144(%rsp),%xmm14 + vpxor %xmm13,%xmm8,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm8,%xmm8 + vpxor %xmm15,%xmm13,%xmm13 + vpxor %xmm14,%xmm9,%xmm15 + vpand %xmm10,%xmm15,%xmm15 + vpxor %xmm15,%xmm9,%xmm9 + vpxor %xmm15,%xmm14,%xmm14 + vmovdqa %xmm13,64(%rsp) + vmovdqa %xmm14,112(%rsp) + vpaddq curve25519_sandy2x_subc0(%rip),%xmm11,%xmm10 + vpsubq %xmm12,%xmm10,%xmm10 + vpaddq %xmm12,%xmm11,%xmm11 + vpunpckhqdq %xmm10,%xmm11,%xmm12 + vpunpcklqdq %xmm10,%xmm11,%xmm10 + vpaddq %xmm1,%xmm0,%xmm11 + vpaddq curve25519_sandy2x_subc0(%rip),%xmm0,%xmm0 + vpsubq %xmm1,%xmm0,%xmm0 + vpunpckhqdq %xmm11,%xmm0,%xmm1 + vpunpcklqdq %xmm11,%xmm0,%xmm0 + vpmuludq %xmm0,%xmm10,%xmm11 + vpmuludq %xmm1,%xmm10,%xmm13 + vmovdqa %xmm1,128(%rsp) + vpaddq %xmm1,%xmm1,%xmm1 + vpmuludq %xmm0,%xmm12,%xmm14 + vmovdqa %xmm0,144(%rsp) + vpaddq %xmm14,%xmm13,%xmm13 + vpmuludq %xmm1,%xmm12,%xmm0 + vmovdqa %xmm1,160(%rsp) + vpaddq %xmm3,%xmm2,%xmm1 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm2,%xmm2 + vpsubq %xmm3,%xmm2,%xmm2 + vpunpckhqdq %xmm1,%xmm2,%xmm3 + vpunpcklqdq %xmm1,%xmm2,%xmm1 + vpmuludq %xmm1,%xmm10,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq %xmm3,%xmm10,%xmm2 + vmovdqa %xmm3,176(%rsp) + vpaddq %xmm3,%xmm3,%xmm3 + vpmuludq %xmm1,%xmm12,%xmm14 + vmovdqa %xmm1,192(%rsp) + vpaddq %xmm14,%xmm2,%xmm2 + vpmuludq %xmm3,%xmm12,%xmm1 + vmovdqa %xmm3,208(%rsp) + vpaddq %xmm5,%xmm4,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm4,%xmm4 + vpsubq %xmm5,%xmm4,%xmm4 + vpunpckhqdq %xmm3,%xmm4,%xmm5 + vpunpcklqdq %xmm3,%xmm4,%xmm3 + vpmuludq %xmm3,%xmm10,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq %xmm5,%xmm10,%xmm4 + vmovdqa %xmm5,224(%rsp) + vpaddq %xmm5,%xmm5,%xmm5 + vpmuludq %xmm3,%xmm12,%xmm14 + vmovdqa %xmm3,240(%rsp) + vpaddq %xmm14,%xmm4,%xmm4 + vpaddq %xmm7,%xmm6,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm6,%xmm6 + vpsubq %xmm7,%xmm6,%xmm6 + vpunpckhqdq %xmm3,%xmm6,%xmm7 + vpunpcklqdq %xmm3,%xmm6,%xmm3 + vpmuludq %xmm3,%xmm10,%xmm6 + vpmuludq %xmm5,%xmm12,%xmm14 + vmovdqa %xmm5,256(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm5,%xmm5 + vmovdqa %xmm5,272(%rsp) + vpaddq %xmm14,%xmm6,%xmm6 + vpmuludq %xmm7,%xmm10,%xmm5 + vmovdqa %xmm7,288(%rsp) + vpaddq %xmm7,%xmm7,%xmm7 + vpmuludq %xmm3,%xmm12,%xmm14 + vmovdqa %xmm3,304(%rsp) + vpaddq %xmm14,%xmm5,%xmm5 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vmovdqa %xmm3,320(%rsp) + vpaddq %xmm9,%xmm8,%xmm3 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm8,%xmm8 + vpsubq %xmm9,%xmm8,%xmm8 + vpunpckhqdq %xmm3,%xmm8,%xmm9 + vpunpcklqdq %xmm3,%xmm8,%xmm3 + vmovdqa %xmm3,336(%rsp) + vpmuludq %xmm7,%xmm12,%xmm8 + vmovdqa %xmm7,352(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm7,%xmm7 + vmovdqa %xmm7,368(%rsp) + vpmuludq %xmm3,%xmm10,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq %xmm9,%xmm10,%xmm7 + vmovdqa %xmm9,384(%rsp) + vpaddq %xmm9,%xmm9,%xmm9 + vpmuludq %xmm3,%xmm12,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vmovdqa %xmm3,400(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm12,%xmm12 + vpmuludq %xmm9,%xmm12,%xmm3 + vmovdqa %xmm9,416(%rsp) + vpaddq %xmm3,%xmm11,%xmm11 + vmovdqa 0(%rsp),%xmm3 + vmovdqa 16(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 192(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 176(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 240(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 224(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 304(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 288(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 336(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 384(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 160(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 192(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 208(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 240(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 256(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 304(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 352(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 336(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 416(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm0,%xmm0 + vmovdqa 32(%rsp),%xmm3 + vmovdqa 80(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 192(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 176(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 240(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 224(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 304(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 288(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 336(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 384(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 160(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 192(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 208(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 240(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 256(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 304(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 352(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 336(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 416(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm1,%xmm1 + vmovdqa 48(%rsp),%xmm3 + vmovdqa 96(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpmuludq 192(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 176(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 240(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 224(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 304(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 288(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 336(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 384(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 160(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 192(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 208(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 240(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 256(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 304(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 352(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 336(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 416(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm6,%xmm6 + vmovdqa 64(%rsp),%xmm3 + vmovdqa 112(%rsp),%xmm9 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm10 + vpsubq %xmm9,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm10,%xmm3,%xmm9 + vpunpcklqdq %xmm10,%xmm3,%xmm3 + vpmuludq 144(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpmuludq 128(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm3,%xmm3 + vpmuludq 192(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpmuludq 176(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm13,%xmm13 + vpmuludq 240(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpmuludq 224(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm2,%xmm2 + vpmuludq 304(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpmuludq 288(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpmuludq 336(%rsp),%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpmuludq 384(%rsp),%xmm3,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 144(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm9,%xmm9 + vpmuludq 160(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 192(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 208(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpmuludq 240(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpmuludq 256(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpmuludq 304(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpmuludq 352(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 336(%rsp),%xmm9,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 416(%rsp),%xmm9,%xmm9 + vpaddq %xmm9,%xmm8,%xmm8 + vpsrlq $25,%xmm4,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 + vpsrlq $26,%xmm11,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm6,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm13,%xmm3 + vpaddq %xmm3,%xmm0,%xmm0 + vpand curve25519_sandy2x_m25(%rip),%xmm13,%xmm13 + vpsrlq $25,%xmm5,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm0,%xmm3 + vpaddq %xmm3,%xmm2,%xmm2 + vpand curve25519_sandy2x_m26(%rip),%xmm0,%xmm0 + vpsrlq $26,%xmm8,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $25,%xmm2,%xmm3 + vpaddq %xmm3,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm2,%xmm2 + vpsrlq $25,%xmm7,%xmm3 + vpsllq $4,%xmm3,%xmm9 + vpaddq %xmm3,%xmm11,%xmm11 + vpsllq $1,%xmm3,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpaddq %xmm9,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $26,%xmm1,%xmm3 + vpaddq %xmm3,%xmm4,%xmm4 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $26,%xmm11,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $25,%xmm4,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm4,%xmm4 + vpunpcklqdq %xmm13,%xmm11,%xmm3 + vpunpckhqdq %xmm13,%xmm11,%xmm9 + vpaddq curve25519_sandy2x_subc0(%rip),%xmm9,%xmm10 + vpsubq %xmm3,%xmm10,%xmm10 + vpaddq %xmm9,%xmm3,%xmm3 + vpunpckhqdq %xmm3,%xmm10,%xmm9 + vpunpcklqdq %xmm3,%xmm10,%xmm10 + vpmuludq %xmm10,%xmm10,%xmm3 + vpaddq %xmm10,%xmm10,%xmm10 + vpmuludq %xmm9,%xmm10,%xmm11 + vpunpcklqdq %xmm2,%xmm0,%xmm12 + vpunpckhqdq %xmm2,%xmm0,%xmm0 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm0,%xmm2 + vpsubq %xmm12,%xmm2,%xmm2 + vpaddq %xmm0,%xmm12,%xmm12 + vpunpckhqdq %xmm12,%xmm2,%xmm0 + vpunpcklqdq %xmm12,%xmm2,%xmm2 + vpmuludq %xmm2,%xmm10,%xmm12 + vpaddq %xmm9,%xmm9,%xmm13 + vpmuludq %xmm13,%xmm9,%xmm9 + vpaddq %xmm9,%xmm12,%xmm12 + vpmuludq %xmm0,%xmm10,%xmm9 + vpmuludq %xmm2,%xmm13,%xmm14 + vpaddq %xmm14,%xmm9,%xmm9 + vpunpcklqdq %xmm4,%xmm1,%xmm14 + vpunpckhqdq %xmm4,%xmm1,%xmm1 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm1,%xmm4 + vpsubq %xmm14,%xmm4,%xmm4 + vpaddq %xmm1,%xmm14,%xmm14 + vpunpckhqdq %xmm14,%xmm4,%xmm1 + vpunpcklqdq %xmm14,%xmm4,%xmm4 + vmovdqa %xmm1,0(%rsp) + vpaddq %xmm1,%xmm1,%xmm1 + vmovdqa %xmm1,16(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vmovdqa %xmm1,32(%rsp) + vpmuludq %xmm4,%xmm10,%xmm1 + vpmuludq %xmm2,%xmm2,%xmm14 + vpaddq %xmm14,%xmm1,%xmm1 + vpmuludq 0(%rsp),%xmm10,%xmm14 + vpmuludq %xmm4,%xmm13,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpunpcklqdq %xmm5,%xmm6,%xmm15 + vpunpckhqdq %xmm5,%xmm6,%xmm5 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm6 + vpsubq %xmm15,%xmm6,%xmm6 + vpaddq %xmm5,%xmm15,%xmm15 + vpunpckhqdq %xmm15,%xmm6,%xmm5 + vpunpcklqdq %xmm15,%xmm6,%xmm6 + vmovdqa %xmm6,48(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm6,%xmm6 + vmovdqa %xmm6,64(%rsp) + vmovdqa %xmm5,80(%rsp) + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm5,%xmm5 + vmovdqa %xmm5,96(%rsp) + vpmuludq 48(%rsp),%xmm10,%xmm5 + vpaddq %xmm0,%xmm0,%xmm6 + vpmuludq %xmm6,%xmm0,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpmuludq 80(%rsp),%xmm10,%xmm0 + vpmuludq %xmm4,%xmm6,%xmm15 + vpaddq %xmm15,%xmm0,%xmm0 + vpmuludq %xmm6,%xmm13,%xmm15 + vpaddq %xmm15,%xmm1,%xmm1 + vpmuludq %xmm6,%xmm2,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpunpcklqdq %xmm7,%xmm8,%xmm15 + vpunpckhqdq %xmm7,%xmm8,%xmm7 + vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm8 + vpsubq %xmm15,%xmm8,%xmm8 + vpaddq %xmm7,%xmm15,%xmm15 + vpunpckhqdq %xmm15,%xmm8,%xmm7 + vpunpcklqdq %xmm15,%xmm8,%xmm8 + vmovdqa %xmm8,112(%rsp) + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm8,%xmm8 + vmovdqa %xmm8,160(%rsp) + vpmuludq 112(%rsp),%xmm10,%xmm8 + vpmuludq %xmm7,%xmm10,%xmm10 + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm7,%xmm15 + vpmuludq %xmm15,%xmm7,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq %xmm15,%xmm13,%xmm7 + vpaddq %xmm7,%xmm3,%xmm3 + vpmuludq %xmm15,%xmm2,%xmm7 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq 80(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm7,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq 16(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 48(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm0,%xmm0 + vpmuludq 112(%rsp),%xmm13,%xmm7 + vpaddq %xmm7,%xmm10,%xmm10 + vpmuludq %xmm15,%xmm6,%xmm7 + vpaddq %xmm7,%xmm12,%xmm12 + vpmuludq %xmm15,%xmm4,%xmm7 + vpaddq %xmm7,%xmm9,%xmm9 + vpaddq %xmm2,%xmm2,%xmm2 + vpmuludq %xmm4,%xmm2,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 160(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm3,%xmm3 + vpmuludq 160(%rsp),%xmm6,%xmm7 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq 0(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm0,%xmm0 + vpmuludq 48(%rsp),%xmm2,%xmm7 + vpaddq %xmm7,%xmm8,%xmm8 + vpmuludq 80(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 96(%rsp),%xmm4,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq %xmm4,%xmm4,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpaddq %xmm4,%xmm4,%xmm2 + vpmuludq 160(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vpmuludq 16(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq 48(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm14,%xmm14 + vpmuludq 96(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 160(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 16(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm8,%xmm8 + vpmuludq 48(%rsp),%xmm6,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 80(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vpmuludq 112(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm0,%xmm0 + vmovdqa 48(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 160(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 80(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 160(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm14,%xmm14 + vpmuludq 64(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 64(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vmovdqa 16(%rsp),%xmm4 + vpmuludq 96(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vmovdqa 48(%rsp),%xmm4 + vpmuludq 96(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 0(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vmovdqa 32(%rsp),%xmm2 + vpmuludq 0(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vmovdqa 64(%rsp),%xmm2 + vpmuludq 48(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vmovdqa 96(%rsp),%xmm2 + vpmuludq 80(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vmovdqa 160(%rsp),%xmm2 + vpmuludq 112(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpsrlq $26,%xmm3,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 + vpsrlq $25,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $25,%xmm11,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpand curve25519_sandy2x_m25(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm5,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm12,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpand curve25519_sandy2x_m26(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm0,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm9,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm9,%xmm9 + vpsrlq $26,%xmm8,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $25,%xmm10,%xmm2 + vpsllq $4,%xmm2,%xmm4 + vpaddq %xmm2,%xmm3,%xmm3 + vpsllq $1,%xmm2,%xmm2 + vpaddq %xmm2,%xmm4,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $26,%xmm3,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m26(%rip),%xmm3,%xmm3 + vpunpckhqdq %xmm11,%xmm3,%xmm2 + vmovdqa %xmm2,0(%rsp) + vpunpcklqdq %xmm11,%xmm3,%xmm2 + vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm2,%xmm2 + vmovdqa %xmm2,80(%rsp) + vpunpckhqdq %xmm9,%xmm12,%xmm2 + vmovdqa %xmm2,16(%rsp) + vpunpcklqdq %xmm9,%xmm12,%xmm2 + vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm2,%xmm2 + vmovdqa %xmm2,96(%rsp) + vpunpckhqdq %xmm14,%xmm1,%xmm2 + vmovdqa %xmm2,32(%rsp) + vpunpcklqdq %xmm14,%xmm1,%xmm1 + vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm1,%xmm1 + vmovdqa %xmm1,112(%rsp) + vpunpckhqdq %xmm0,%xmm5,%xmm1 + vmovdqa %xmm1,48(%rsp) + vpunpcklqdq %xmm0,%xmm5,%xmm0 + vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm0,%xmm0 + vmovdqa %xmm0,160(%rsp) + vpunpckhqdq %xmm10,%xmm8,%xmm0 + vmovdqa %xmm0,64(%rsp) + vpunpcklqdq %xmm10,%xmm8,%xmm0 + vpmuludq curve25519_sandy2x_v9_9(%rip),%xmm0,%xmm0 + vmovdqa %xmm0,208(%rsp) + vmovdqa 144(%rsp),%xmm0 + vpmuludq %xmm0,%xmm0,%xmm1 + vpaddq %xmm0,%xmm0,%xmm0 + vmovdqa 128(%rsp),%xmm2 + vpmuludq %xmm2,%xmm0,%xmm3 + vmovdqa 192(%rsp),%xmm4 + vpmuludq %xmm4,%xmm0,%xmm5 + vmovdqa 176(%rsp),%xmm6 + vpmuludq %xmm6,%xmm0,%xmm7 + vmovdqa 240(%rsp),%xmm8 + vpmuludq %xmm8,%xmm0,%xmm9 + vpmuludq 224(%rsp),%xmm0,%xmm10 + vpmuludq 304(%rsp),%xmm0,%xmm11 + vpmuludq 288(%rsp),%xmm0,%xmm12 + vpmuludq 336(%rsp),%xmm0,%xmm13 + vmovdqa 384(%rsp),%xmm14 + vpmuludq %xmm14,%xmm0,%xmm0 + vpmuludq curve25519_sandy2x_v38_38(%rip),%xmm14,%xmm15 + vpmuludq %xmm15,%xmm14,%xmm14 + vpaddq %xmm14,%xmm13,%xmm13 + vpaddq %xmm6,%xmm6,%xmm14 + vpmuludq %xmm14,%xmm6,%xmm6 + vpaddq %xmm6,%xmm11,%xmm11 + vpaddq %xmm2,%xmm2,%xmm6 + vpmuludq %xmm6,%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq %xmm15,%xmm6,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vpmuludq %xmm15,%xmm4,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpmuludq 256(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 304(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 352(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 336(%rsp),%xmm6,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq %xmm4,%xmm6,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq %xmm14,%xmm6,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq %xmm8,%xmm6,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq %xmm15,%xmm14,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq %xmm15,%xmm8,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq %xmm4,%xmm4,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq %xmm14,%xmm4,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpaddq %xmm4,%xmm4,%xmm2 + vpmuludq %xmm8,%xmm2,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vpmuludq 400(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpmuludq 400(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vpmuludq 224(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vpmuludq 304(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm13,%xmm13 + vpmuludq 288(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpmuludq 368(%rsp),%xmm8,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpmuludq %xmm8,%xmm14,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq %xmm8,%xmm8,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpaddq %xmm8,%xmm8,%xmm2 + vpmuludq 400(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vpmuludq 256(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 304(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 368(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 256(%rsp),%xmm4 + vpmuludq 400(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm7,%xmm7 + vpmuludq 256(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm13,%xmm13 + vpmuludq 304(%rsp),%xmm14,%xmm4 + vpaddq %xmm4,%xmm0,%xmm0 + vpmuludq 352(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm11,%xmm11 + vpmuludq 336(%rsp),%xmm15,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vmovdqa 304(%rsp),%xmm4 + vpaddq %xmm4,%xmm4,%xmm4 + vpmuludq 400(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm9,%xmm9 + vpmuludq 320(%rsp),%xmm2,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vmovdqa 256(%rsp),%xmm4 + vpmuludq 320(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm3,%xmm3 + vmovdqa 256(%rsp),%xmm4 + vpmuludq 368(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm5,%xmm5 + vmovdqa 304(%rsp),%xmm4 + vpmuludq 368(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm7,%xmm7 + vmovdqa 352(%rsp),%xmm4 + vpmuludq 400(%rsp),%xmm4,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + vpmuludq 224(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vmovdqa 272(%rsp),%xmm2 + vpmuludq 224(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm1,%xmm1 + vmovdqa 320(%rsp),%xmm2 + vpmuludq 304(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vmovdqa 368(%rsp),%xmm2 + vpmuludq 288(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vmovdqa 400(%rsp),%xmm2 + vpmuludq 336(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpsrlq $25,%xmm10,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm3,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 + vpsrlq $26,%xmm11,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm5,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm5,%xmm5 + vpsrlq $25,%xmm12,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm7,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $26,%xmm13,%xmm2 + vpaddq %xmm2,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 + vpsrlq $26,%xmm9,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $25,%xmm0,%xmm2 + vpsllq $4,%xmm2,%xmm4 + vpaddq %xmm2,%xmm1,%xmm1 + vpsllq $1,%xmm2,%xmm2 + vpaddq %xmm2,%xmm4,%xmm4 + vpaddq %xmm4,%xmm1,%xmm1 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm10,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $26,%xmm1,%xmm2 + vpaddq %xmm2,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm1,%xmm1 + vpunpckhqdq %xmm3,%xmm1,%xmm2 + vpunpcklqdq %xmm3,%xmm1,%xmm1 + vmovdqa %xmm1,176(%rsp) + vpaddq curve25519_sandy2x_subc0(%rip),%xmm2,%xmm3 + vpsubq %xmm1,%xmm3,%xmm3 + vpunpckhqdq %xmm3,%xmm2,%xmm1 + vpunpcklqdq %xmm3,%xmm2,%xmm2 + vmovdqa %xmm2,192(%rsp) + vmovdqa %xmm1,224(%rsp) + vpsllq $1,%xmm1,%xmm1 + vmovdqa %xmm1,240(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm3,%xmm3 + vmovdqa 80(%rsp),%xmm1 + vpunpcklqdq %xmm1,%xmm3,%xmm2 + vpunpckhqdq %xmm1,%xmm3,%xmm1 + vpunpckhqdq %xmm7,%xmm5,%xmm3 + vpunpcklqdq %xmm7,%xmm5,%xmm4 + vmovdqa %xmm4,256(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm3,%xmm5 + vpsubq %xmm4,%xmm5,%xmm5 + vpunpckhqdq %xmm5,%xmm3,%xmm4 + vpunpcklqdq %xmm5,%xmm3,%xmm3 + vmovdqa %xmm3,272(%rsp) + vmovdqa %xmm4,288(%rsp) + vpsllq $1,%xmm4,%xmm4 + vmovdqa %xmm4,304(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm5,%xmm5 + vmovdqa 96(%rsp),%xmm3 + vpunpcklqdq %xmm3,%xmm5,%xmm4 + vpunpckhqdq %xmm3,%xmm5,%xmm3 + vpunpckhqdq %xmm10,%xmm9,%xmm5 + vpunpcklqdq %xmm10,%xmm9,%xmm6 + vmovdqa %xmm6,320(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm5,%xmm7 + vpsubq %xmm6,%xmm7,%xmm7 + vpunpckhqdq %xmm7,%xmm5,%xmm6 + vpunpcklqdq %xmm7,%xmm5,%xmm5 + vmovdqa %xmm5,336(%rsp) + vmovdqa %xmm6,352(%rsp) + vpsllq $1,%xmm6,%xmm6 + vmovdqa %xmm6,368(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm7,%xmm7 + vmovdqa 112(%rsp),%xmm5 + vpunpcklqdq %xmm5,%xmm7,%xmm6 + vpunpckhqdq %xmm5,%xmm7,%xmm5 + vpunpckhqdq %xmm12,%xmm11,%xmm7 + vpunpcklqdq %xmm12,%xmm11,%xmm8 + vmovdqa %xmm8,384(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm7,%xmm9 + vpsubq %xmm8,%xmm9,%xmm9 + vpunpckhqdq %xmm9,%xmm7,%xmm8 + vpunpcklqdq %xmm9,%xmm7,%xmm7 + vmovdqa %xmm7,400(%rsp) + vmovdqa %xmm8,416(%rsp) + vpsllq $1,%xmm8,%xmm8 + vmovdqa %xmm8,432(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm9,%xmm9 + vmovdqa 160(%rsp),%xmm7 + vpunpcklqdq %xmm7,%xmm9,%xmm8 + vpunpckhqdq %xmm7,%xmm9,%xmm7 + vpunpckhqdq %xmm0,%xmm13,%xmm9 + vpunpcklqdq %xmm0,%xmm13,%xmm0 + vmovdqa %xmm0,160(%rsp) + vpaddq curve25519_sandy2x_subc2(%rip),%xmm9,%xmm10 + vpsubq %xmm0,%xmm10,%xmm10 + vpunpckhqdq %xmm10,%xmm9,%xmm0 + vpunpcklqdq %xmm10,%xmm9,%xmm9 + vmovdqa %xmm9,448(%rsp) + vmovdqa %xmm0,464(%rsp) + vpsllq $1,%xmm0,%xmm0 + vmovdqa %xmm0,480(%rsp) + vpmuludq curve25519_sandy2x_v121666_121666(%rip),%xmm10,%xmm10 + vmovdqa 208(%rsp),%xmm0 + vpunpcklqdq %xmm0,%xmm10,%xmm9 + vpunpckhqdq %xmm0,%xmm10,%xmm0 + vpsrlq $26,%xmm2,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 + vpsrlq $25,%xmm5,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $25,%xmm1,%xmm10 + vpaddq %xmm10,%xmm4,%xmm4 + vpand curve25519_sandy2x_m25(%rip),%xmm1,%xmm1 + vpsrlq $26,%xmm8,%xmm10 + vpaddq %xmm10,%xmm7,%xmm7 + vpand curve25519_sandy2x_m26(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm4,%xmm10 + vpaddq %xmm10,%xmm3,%xmm3 + vpand curve25519_sandy2x_m26(%rip),%xmm4,%xmm4 + vpsrlq $25,%xmm7,%xmm10 + vpaddq %xmm10,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm7,%xmm7 + vpsrlq $25,%xmm3,%xmm10 + vpaddq %xmm10,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm3,%xmm3 + vpsrlq $26,%xmm9,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $26,%xmm6,%xmm10 + vpaddq %xmm10,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm0,%xmm10 + vpsllq $4,%xmm10,%xmm11 + vpaddq %xmm10,%xmm2,%xmm2 + vpsllq $1,%xmm10,%xmm10 + vpaddq %xmm10,%xmm11,%xmm11 + vpaddq %xmm11,%xmm2,%xmm2 + vpand curve25519_sandy2x_m25(%rip),%xmm0,%xmm0 + vpsrlq $25,%xmm5,%xmm10 + vpaddq %xmm10,%xmm8,%xmm8 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm2,%xmm10 + vpaddq %xmm10,%xmm1,%xmm1 + vpand curve25519_sandy2x_m26(%rip),%xmm2,%xmm2 + vpunpckhqdq %xmm1,%xmm2,%xmm10 + vmovdqa %xmm10,80(%rsp) + vpunpcklqdq %xmm1,%xmm2,%xmm1 + vpunpckhqdq %xmm3,%xmm4,%xmm2 + vmovdqa %xmm2,96(%rsp) + vpunpcklqdq %xmm3,%xmm4,%xmm2 + vpunpckhqdq %xmm5,%xmm6,%xmm3 + vmovdqa %xmm3,112(%rsp) + vpunpcklqdq %xmm5,%xmm6,%xmm3 + vpunpckhqdq %xmm7,%xmm8,%xmm4 + vmovdqa %xmm4,128(%rsp) + vpunpcklqdq %xmm7,%xmm8,%xmm4 + vpunpckhqdq %xmm0,%xmm9,%xmm5 + vmovdqa %xmm5,144(%rsp) + vpunpcklqdq %xmm0,%xmm9,%xmm0 + vmovdqa 176(%rsp),%xmm5 + vpaddq %xmm5,%xmm1,%xmm1 + vpunpcklqdq %xmm1,%xmm5,%xmm6 + vpunpckhqdq %xmm1,%xmm5,%xmm1 + vpmuludq 224(%rsp),%xmm6,%xmm5 + vpmuludq 192(%rsp),%xmm1,%xmm7 + vpaddq %xmm7,%xmm5,%xmm5 + vpmuludq 272(%rsp),%xmm6,%xmm7 + vpmuludq 240(%rsp),%xmm1,%xmm8 + vpaddq %xmm8,%xmm7,%xmm7 + vpmuludq 288(%rsp),%xmm6,%xmm8 + vpmuludq 272(%rsp),%xmm1,%xmm9 + vpaddq %xmm9,%xmm8,%xmm8 + vpmuludq 336(%rsp),%xmm6,%xmm9 + vpmuludq 304(%rsp),%xmm1,%xmm10 + vpaddq %xmm10,%xmm9,%xmm9 + vpmuludq 352(%rsp),%xmm6,%xmm10 + vpmuludq 336(%rsp),%xmm1,%xmm11 + vpaddq %xmm11,%xmm10,%xmm10 + vpmuludq 400(%rsp),%xmm6,%xmm11 + vpmuludq 368(%rsp),%xmm1,%xmm12 + vpaddq %xmm12,%xmm11,%xmm11 + vpmuludq 416(%rsp),%xmm6,%xmm12 + vpmuludq 400(%rsp),%xmm1,%xmm13 + vpaddq %xmm13,%xmm12,%xmm12 + vpmuludq 448(%rsp),%xmm6,%xmm13 + vpmuludq 432(%rsp),%xmm1,%xmm14 + vpaddq %xmm14,%xmm13,%xmm13 + vpmuludq 464(%rsp),%xmm6,%xmm14 + vpmuludq 448(%rsp),%xmm1,%xmm15 + vpaddq %xmm15,%xmm14,%xmm14 + vpmuludq 192(%rsp),%xmm6,%xmm6 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 480(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vmovdqa 256(%rsp),%xmm1 + vpaddq %xmm1,%xmm2,%xmm2 + vpunpcklqdq %xmm2,%xmm1,%xmm15 + vpunpckhqdq %xmm2,%xmm1,%xmm1 + vpmuludq 192(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 224(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 272(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 288(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 336(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 352(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 400(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 416(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm15,%xmm15 + vpmuludq 448(%rsp),%xmm15,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 464(%rsp),%xmm15,%xmm15 + vpaddq %xmm15,%xmm5,%xmm5 + vpmuludq 192(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 240(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 272(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 304(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 336(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 368(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 400(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 432(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 448(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 480(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vmovdqa 320(%rsp),%xmm1 + vpaddq %xmm1,%xmm3,%xmm3 + vpunpcklqdq %xmm3,%xmm1,%xmm2 + vpunpckhqdq %xmm3,%xmm1,%xmm1 + vpmuludq 192(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpmuludq 224(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm10,%xmm10 + vpmuludq 272(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 288(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm12,%xmm12 + vpmuludq 336(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 352(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 400(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 416(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 448(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 464(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 192(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 240(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq 272(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 304(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 336(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 368(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 400(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 432(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 448(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 480(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vmovdqa 384(%rsp),%xmm1 + vpaddq %xmm1,%xmm4,%xmm4 + vpunpcklqdq %xmm4,%xmm1,%xmm2 + vpunpckhqdq %xmm4,%xmm1,%xmm1 + vpmuludq 192(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm11,%xmm11 + vpmuludq 224(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm12,%xmm12 + vpmuludq 272(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpmuludq 288(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 336(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm6,%xmm6 + vpmuludq 352(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm5,%xmm5 + vpmuludq 400(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm7,%xmm7 + vpmuludq 416(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm8,%xmm8 + vpmuludq 448(%rsp),%xmm2,%xmm3 + vpaddq %xmm3,%xmm9,%xmm9 + vpmuludq 464(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 192(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 240(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq 272(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm1,%xmm1 + vpmuludq 304(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm6,%xmm6 + vpmuludq 336(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm5,%xmm5 + vpmuludq 368(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm7,%xmm7 + vpmuludq 400(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm8,%xmm8 + vpmuludq 432(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm9,%xmm9 + vpmuludq 448(%rsp),%xmm1,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + vpmuludq 480(%rsp),%xmm1,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vmovdqa 160(%rsp),%xmm1 + vpaddq %xmm1,%xmm0,%xmm0 + vpunpcklqdq %xmm0,%xmm1,%xmm2 + vpunpckhqdq %xmm0,%xmm1,%xmm0 + vpmuludq 192(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm13,%xmm13 + vpmuludq 224(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm2,%xmm2 + vpmuludq 272(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpmuludq 288(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm5,%xmm5 + vpmuludq 336(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vpmuludq 352(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm8,%xmm8 + vpmuludq 400(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vpmuludq 416(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm10,%xmm10 + vpmuludq 448(%rsp),%xmm2,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq 464(%rsp),%xmm2,%xmm2 + vpaddq %xmm2,%xmm12,%xmm12 + vpmuludq 192(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm14,%xmm14 + vpmuludq curve25519_sandy2x_v19_19(%rip),%xmm0,%xmm0 + vpmuludq 240(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpmuludq 272(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm5,%xmm5 + vpmuludq 304(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm7,%xmm7 + vpmuludq 336(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm8,%xmm8 + vpmuludq 368(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm9,%xmm9 + vpmuludq 400(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm10,%xmm10 + vpmuludq 432(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq 448(%rsp),%xmm0,%xmm1 + vpaddq %xmm1,%xmm12,%xmm12 + vpmuludq 480(%rsp),%xmm0,%xmm0 + vpaddq %xmm0,%xmm13,%xmm13 + vpsrlq $26,%xmm6,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpsrlq $25,%xmm10,%xmm0 + vpaddq %xmm0,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $25,%xmm5,%xmm0 + vpaddq %xmm0,%xmm7,%xmm7 + vpand curve25519_sandy2x_m25(%rip),%xmm5,%xmm5 + vpsrlq $26,%xmm11,%xmm0 + vpaddq %xmm0,%xmm12,%xmm12 + vpand curve25519_sandy2x_m26(%rip),%xmm11,%xmm11 + vpsrlq $26,%xmm7,%xmm0 + vpaddq %xmm0,%xmm8,%xmm8 + vpand curve25519_sandy2x_m26(%rip),%xmm7,%xmm7 + vpsrlq $25,%xmm12,%xmm0 + vpaddq %xmm0,%xmm13,%xmm13 + vpand curve25519_sandy2x_m25(%rip),%xmm12,%xmm12 + vpsrlq $25,%xmm8,%xmm0 + vpaddq %xmm0,%xmm9,%xmm9 + vpand curve25519_sandy2x_m25(%rip),%xmm8,%xmm8 + vpsrlq $26,%xmm13,%xmm0 + vpaddq %xmm0,%xmm14,%xmm14 + vpand curve25519_sandy2x_m26(%rip),%xmm13,%xmm13 + vpsrlq $26,%xmm9,%xmm0 + vpaddq %xmm0,%xmm10,%xmm10 + vpand curve25519_sandy2x_m26(%rip),%xmm9,%xmm9 + vpsrlq $25,%xmm14,%xmm0 + vpsllq $4,%xmm0,%xmm1 + vpaddq %xmm0,%xmm6,%xmm6 + vpsllq $1,%xmm0,%xmm0 + vpaddq %xmm0,%xmm1,%xmm1 + vpaddq %xmm1,%xmm6,%xmm6 + vpand curve25519_sandy2x_m25(%rip),%xmm14,%xmm14 + vpsrlq $25,%xmm10,%xmm0 + vpaddq %xmm0,%xmm11,%xmm11 + vpand curve25519_sandy2x_m25(%rip),%xmm10,%xmm10 + vpsrlq $26,%xmm6,%xmm0 + vpaddq %xmm0,%xmm5,%xmm5 + vpand curve25519_sandy2x_m26(%rip),%xmm6,%xmm6 + vpunpckhqdq %xmm5,%xmm6,%xmm1 + vpunpcklqdq %xmm5,%xmm6,%xmm0 + vpunpckhqdq %xmm8,%xmm7,%xmm3 + vpunpcklqdq %xmm8,%xmm7,%xmm2 + vpunpckhqdq %xmm10,%xmm9,%xmm5 + vpunpcklqdq %xmm10,%xmm9,%xmm4 + vpunpckhqdq %xmm12,%xmm11,%xmm7 + vpunpcklqdq %xmm12,%xmm11,%xmm6 + vpunpckhqdq %xmm14,%xmm13,%xmm9 + vpunpcklqdq %xmm14,%xmm13,%xmm8 + cmp $0,%rdx + jne .Lladder_base_loop + vmovdqu %xmm1,80(%rdi) + vmovdqu %xmm0,0(%rdi) + vmovdqu %xmm3,96(%rdi) + vmovdqu %xmm2,16(%rdi) + vmovdqu %xmm5,112(%rdi) + vmovdqu %xmm4,32(%rdi) + vmovdqu %xmm7,128(%rdi) + vmovdqu %xmm6,48(%rdi) + vmovdqu %xmm9,144(%rdi) + vmovdqu %xmm8,64(%rdi) + movq 1536(%rsp),%r11 + movq 1544(%rsp),%r12 + movq 1552(%rsp),%r13 + leave + ret +ENDPROC(curve25519_sandy2x_ladder_base) +#endif /* CONFIG_AS_AVX */ diff --git a/src/crypto/poly1305-avx2-x86_64.S b/src/crypto/poly1305-avx2-x86_64.S deleted file mode 100644 index 35d958f..0000000 --- a/src/crypto/poly1305-avx2-x86_64.S +++ /dev/null @@ -1,387 +0,0 @@ -/* - * Poly1305 authenticator algorithm, RFC7539, x64 AVX2 functions - * - * Copyright (C) 2015 Martin Willi - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - */ - -#include - -.section .rodata.cst32.ANMASK, "aM", @progbits, 32 -.align 32 -ANMASK: .octa 0x0000000003ffffff0000000003ffffff - .octa 0x0000000003ffffff0000000003ffffff -.section .rodata.cst32.ORMASK, "aM", @progbits, 32 -.align 32 -ORMASK: .octa 0x00000000010000000000000001000000 - .octa 0x00000000010000000000000001000000 - -.text - -#define h0 0x00(%rdi) -#define h1 0x04(%rdi) -#define h2 0x08(%rdi) -#define h3 0x0c(%rdi) -#define h4 0x10(%rdi) -#define r0 0x00(%rdx) -#define r1 0x04(%rdx) -#define r2 0x08(%rdx) -#define r3 0x0c(%rdx) -#define r4 0x10(%rdx) -#define u0 0x00(%r8) -#define u1 0x04(%r8) -#define u2 0x08(%r8) -#define u3 0x0c(%r8) -#define u4 0x10(%r8) -#define w0 0x14(%r8) -#define w1 0x18(%r8) -#define w2 0x1c(%r8) -#define w3 0x20(%r8) -#define w4 0x24(%r8) -#define y0 0x28(%r8) -#define y1 0x2c(%r8) -#define y2 0x30(%r8) -#define y3 0x34(%r8) -#define y4 0x38(%r8) -#define m %rsi -#define hc0 %ymm0 -#define hc1 %ymm1 -#define hc2 %ymm2 -#define hc3 %ymm3 -#define hc4 %ymm4 -#define hc0x %xmm0 -#define hc1x %xmm1 -#define hc2x %xmm2 -#define hc3x %xmm3 -#define hc4x %xmm4 -#define t1 %ymm5 -#define t2 %ymm6 -#define t1x %xmm5 -#define t2x %xmm6 -#define ruwy0 %ymm7 -#define ruwy1 %ymm8 -#define ruwy2 %ymm9 -#define ruwy3 %ymm10 -#define ruwy4 %ymm11 -#define ruwy0x %xmm7 -#define ruwy1x %xmm8 -#define ruwy2x %xmm9 -#define ruwy3x %xmm10 -#define ruwy4x %xmm11 -#define svxz1 %ymm12 -#define svxz2 %ymm13 -#define svxz3 %ymm14 -#define svxz4 %ymm15 -#define d0 %r9 -#define d1 %r10 -#define d2 %r11 -#define d3 %r12 -#define d4 %r13 - -ENTRY(poly1305_asm_4block_avx2) - # %rdi: Accumulator h[5] - # %rsi: 64 byte input block m - # %rdx: Poly1305 key r[5] - # %rcx: Quadblock count - # %r8: Poly1305 derived key r^2 u[5], r^3 w[5], r^4 y[5], - - # This four-block variant uses loop unrolled block processing. It - # requires 4 Poly1305 keys: r, r^2, r^3 and r^4: - # h = (h + m) * r => h = (h + m1) * r^4 + m2 * r^3 + m3 * r^2 + m4 * r - - vzeroupper - push %rbx - push %r12 - push %r13 - - # combine r0,u0,w0,y0 - vmovd y0,ruwy0x - vmovd w0,t1x - vpunpcklqdq t1,ruwy0,ruwy0 - vmovd u0,t1x - vmovd r0,t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,ruwy0,ruwy0 - - # combine r1,u1,w1,y1 and s1=r1*5,v1=u1*5,x1=w1*5,z1=y1*5 - vmovd y1,ruwy1x - vmovd w1,t1x - vpunpcklqdq t1,ruwy1,ruwy1 - vmovd u1,t1x - vmovd r1,t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,ruwy1,ruwy1 - vpslld $2,ruwy1,svxz1 - vpaddd ruwy1,svxz1,svxz1 - - # combine r2,u2,w2,y2 and s2=r2*5,v2=u2*5,x2=w2*5,z2=y2*5 - vmovd y2,ruwy2x - vmovd w2,t1x - vpunpcklqdq t1,ruwy2,ruwy2 - vmovd u2,t1x - vmovd r2,t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,ruwy2,ruwy2 - vpslld $2,ruwy2,svxz2 - vpaddd ruwy2,svxz2,svxz2 - - # combine r3,u3,w3,y3 and s3=r3*5,v3=u3*5,x3=w3*5,z3=y3*5 - vmovd y3,ruwy3x - vmovd w3,t1x - vpunpcklqdq t1,ruwy3,ruwy3 - vmovd u3,t1x - vmovd r3,t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,ruwy3,ruwy3 - vpslld $2,ruwy3,svxz3 - vpaddd ruwy3,svxz3,svxz3 - - # combine r4,u4,w4,y4 and s4=r4*5,v4=u4*5,x4=w4*5,z4=y4*5 - vmovd y4,ruwy4x - vmovd w4,t1x - vpunpcklqdq t1,ruwy4,ruwy4 - vmovd u4,t1x - vmovd r4,t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,ruwy4,ruwy4 - vpslld $2,ruwy4,svxz4 - vpaddd ruwy4,svxz4,svxz4 - -.Ldoblock4: - # hc0 = [m[48-51] & 0x3ffffff, m[32-35] & 0x3ffffff, - # m[16-19] & 0x3ffffff, m[ 0- 3] & 0x3ffffff + h0] - vmovd 0x00(m),hc0x - vmovd 0x10(m),t1x - vpunpcklqdq t1,hc0,hc0 - vmovd 0x20(m),t1x - vmovd 0x30(m),t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,hc0,hc0 - vpand ANMASK(%rip),hc0,hc0 - vmovd h0,t1x - vpaddd t1,hc0,hc0 - # hc1 = [(m[51-54] >> 2) & 0x3ffffff, (m[35-38] >> 2) & 0x3ffffff, - # (m[19-22] >> 2) & 0x3ffffff, (m[ 3- 6] >> 2) & 0x3ffffff + h1] - vmovd 0x03(m),hc1x - vmovd 0x13(m),t1x - vpunpcklqdq t1,hc1,hc1 - vmovd 0x23(m),t1x - vmovd 0x33(m),t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,hc1,hc1 - vpsrld $2,hc1,hc1 - vpand ANMASK(%rip),hc1,hc1 - vmovd h1,t1x - vpaddd t1,hc1,hc1 - # hc2 = [(m[54-57] >> 4) & 0x3ffffff, (m[38-41] >> 4) & 0x3ffffff, - # (m[22-25] >> 4) & 0x3ffffff, (m[ 6- 9] >> 4) & 0x3ffffff + h2] - vmovd 0x06(m),hc2x - vmovd 0x16(m),t1x - vpunpcklqdq t1,hc2,hc2 - vmovd 0x26(m),t1x - vmovd 0x36(m),t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,hc2,hc2 - vpsrld $4,hc2,hc2 - vpand ANMASK(%rip),hc2,hc2 - vmovd h2,t1x - vpaddd t1,hc2,hc2 - # hc3 = [(m[57-60] >> 6) & 0x3ffffff, (m[41-44] >> 6) & 0x3ffffff, - # (m[25-28] >> 6) & 0x3ffffff, (m[ 9-12] >> 6) & 0x3ffffff + h3] - vmovd 0x09(m),hc3x - vmovd 0x19(m),t1x - vpunpcklqdq t1,hc3,hc3 - vmovd 0x29(m),t1x - vmovd 0x39(m),t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,hc3,hc3 - vpsrld $6,hc3,hc3 - vpand ANMASK(%rip),hc3,hc3 - vmovd h3,t1x - vpaddd t1,hc3,hc3 - # hc4 = [(m[60-63] >> 8) | (1<<24), (m[44-47] >> 8) | (1<<24), - # (m[28-31] >> 8) | (1<<24), (m[12-15] >> 8) | (1<<24) + h4] - vmovd 0x0c(m),hc4x - vmovd 0x1c(m),t1x - vpunpcklqdq t1,hc4,hc4 - vmovd 0x2c(m),t1x - vmovd 0x3c(m),t2x - vpunpcklqdq t2,t1,t1 - vperm2i128 $0x20,t1,hc4,hc4 - vpsrld $8,hc4,hc4 - vpor ORMASK(%rip),hc4,hc4 - vmovd h4,t1x - vpaddd t1,hc4,hc4 - - # t1 = [ hc0[3] * r0, hc0[2] * u0, hc0[1] * w0, hc0[0] * y0 ] - vpmuludq hc0,ruwy0,t1 - # t1 += [ hc1[3] * s4, hc1[2] * v4, hc1[1] * x4, hc1[0] * z4 ] - vpmuludq hc1,svxz4,t2 - vpaddq t2,t1,t1 - # t1 += [ hc2[3] * s3, hc2[2] * v3, hc2[1] * x3, hc2[0] * z3 ] - vpmuludq hc2,svxz3,t2 - vpaddq t2,t1,t1 - # t1 += [ hc3[3] * s2, hc3[2] * v2, hc3[1] * x2, hc3[0] * z2 ] - vpmuludq hc3,svxz2,t2 - vpaddq t2,t1,t1 - # t1 += [ hc4[3] * s1, hc4[2] * v1, hc4[1] * x1, hc4[0] * z1 ] - vpmuludq hc4,svxz1,t2 - vpaddq t2,t1,t1 - # d0 = t1[0] + t1[1] + t[2] + t[3] - vpermq $0xee,t1,t2 - vpaddq t2,t1,t1 - vpsrldq $8,t1,t2 - vpaddq t2,t1,t1 - vmovq t1x,d0 - - # t1 = [ hc0[3] * r1, hc0[2] * u1,hc0[1] * w1, hc0[0] * y1 ] - vpmuludq hc0,ruwy1,t1 - # t1 += [ hc1[3] * r0, hc1[2] * u0, hc1[1] * w0, hc1[0] * y0 ] - vpmuludq hc1,ruwy0,t2 - vpaddq t2,t1,t1 - # t1 += [ hc2[3] * s4, hc2[2] * v4, hc2[1] * x4, hc2[0] * z4 ] - vpmuludq hc2,svxz4,t2 - vpaddq t2,t1,t1 - # t1 += [ hc3[3] * s3, hc3[2] * v3, hc3[1] * x3, hc3[0] * z3 ] - vpmuludq hc3,svxz3,t2 - vpaddq t2,t1,t1 - # t1 += [ hc4[3] * s2, hc4[2] * v2, hc4[1] * x2, hc4[0] * z2 ] - vpmuludq hc4,svxz2,t2 - vpaddq t2,t1,t1 - # d1 = t1[0] + t1[1] + t1[3] + t1[4] - vpermq $0xee,t1,t2 - vpaddq t2,t1,t1 - vpsrldq $8,t1,t2 - vpaddq t2,t1,t1 - vmovq t1x,d1 - - # t1 = [ hc0[3] * r2, hc0[2] * u2, hc0[1] * w2, hc0[0] * y2 ] - vpmuludq hc0,ruwy2,t1 - # t1 += [ hc1[3] * r1, hc1[2] * u1, hc1[1] * w1, hc1[0] * y1 ] - vpmuludq hc1,ruwy1,t2 - vpaddq t2,t1,t1 - # t1 += [ hc2[3] * r0, hc2[2] * u0, hc2[1] * w0, hc2[0] * y0 ] - vpmuludq hc2,ruwy0,t2 - vpaddq t2,t1,t1 - # t1 += [ hc3[3] * s4, hc3[2] * v4, hc3[1] * x4, hc3[0] * z4 ] - vpmuludq hc3,svxz4,t2 - vpaddq t2,t1,t1 - # t1 += [ hc4[3] * s3, hc4[2] * v3, hc4[1] * x3, hc4[0] * z3 ] - vpmuludq hc4,svxz3,t2 - vpaddq t2,t1,t1 - # d2 = t1[0] + t1[1] + t1[2] + t1[3] - vpermq $0xee,t1,t2 - vpaddq t2,t1,t1 - vpsrldq $8,t1,t2 - vpaddq t2,t1,t1 - vmovq t1x,d2 - - # t1 = [ hc0[3] * r3, hc0[2] * u3, hc0[1] * w3, hc0[0] * y3 ] - vpmuludq hc0,ruwy3,t1 - # t1 += [ hc1[3] * r2, hc1[2] * u2, hc1[1] * w2, hc1[0] * y2 ] - vpmuludq hc1,ruwy2,t2 - vpaddq t2,t1,t1 - # t1 += [ hc2[3] * r1, hc2[2] * u1, hc2[1] * w1, hc2[0] * y1 ] - vpmuludq hc2,ruwy1,t2 - vpaddq t2,t1,t1 - # t1 += [ hc3[3] * r0, hc3[2] * u0, hc3[1] * w0, hc3[0] * y0 ] - vpmuludq hc3,ruwy0,t2 - vpaddq t2,t1,t1 - # t1 += [ hc4[3] * s4, hc4[2] * v4, hc4[1] * x4, hc4[0] * z4 ] - vpmuludq hc4,svxz4,t2 - vpaddq t2,t1,t1 - # d3 = t1[0] + t1[1] + t1[2] + t1[3] - vpermq $0xee,t1,t2 - vpaddq t2,t1,t1 - vpsrldq $8,t1,t2 - vpaddq t2,t1,t1 - vmovq t1x,d3 - - # t1 = [ hc0[3] * r4, hc0[2] * u4, hc0[1] * w4, hc0[0] * y4 ] - vpmuludq hc0,ruwy4,t1 - # t1 += [ hc1[3] * r3, hc1[2] * u3, hc1[1] * w3, hc1[0] * y3 ] - vpmuludq hc1,ruwy3,t2 - vpaddq t2,t1,t1 - # t1 += [ hc2[3] * r2, hc2[2] * u2, hc2[1] * w2, hc2[0] * y2 ] - vpmuludq hc2,ruwy2,t2 - vpaddq t2,t1,t1 - # t1 += [ hc3[3] * r1, hc3[2] * u1, hc3[1] * w1, hc3[0] * y1 ] - vpmuludq hc3,ruwy1,t2 - vpaddq t2,t1,t1 - # t1 += [ hc4[3] * r0, hc4[2] * u0, hc4[1] * w0, hc4[0] * y0 ] - vpmuludq hc4,ruwy0,t2 - vpaddq t2,t1,t1 - # d4 = t1[0] + t1[1] + t1[2] + t1[3] - vpermq $0xee,t1,t2 - vpaddq t2,t1,t1 - vpsrldq $8,t1,t2 - vpaddq t2,t1,t1 - vmovq t1x,d4 - - # d1 += d0 >> 26 - mov d0,%rax - shr $26,%rax - add %rax,d1 - # h0 = d0 & 0x3ffffff - mov d0,%rbx - and $0x3ffffff,%ebx - - # d2 += d1 >> 26 - mov d1,%rax - shr $26,%rax - add %rax,d2 - # h1 = d1 & 0x3ffffff - mov d1,%rax - and $0x3ffffff,%eax - mov %eax,h1 - - # d3 += d2 >> 26 - mov d2,%rax - shr $26,%rax - add %rax,d3 - # h2 = d2 & 0x3ffffff - mov d2,%rax - and $0x3ffffff,%eax - mov %eax,h2 - - # d4 += d3 >> 26 - mov d3,%rax - shr $26,%rax - add %rax,d4 - # h3 = d3 & 0x3ffffff - mov d3,%rax - and $0x3ffffff,%eax - mov %eax,h3 - - # h0 += (d4 >> 26) * 5 - mov d4,%rax - shr $26,%rax - lea (%eax,%eax,4),%eax - add %eax,%ebx - # h4 = d4 & 0x3ffffff - mov d4,%rax - and $0x3ffffff,%eax - mov %eax,h4 - - # h1 += h0 >> 26 - mov %ebx,%eax - shr $26,%eax - add %eax,h1 - # h0 = h0 & 0x3ffffff - andl $0x3ffffff,%ebx - mov %ebx,h0 - - add $0x40,m - dec %rcx - jnz .Ldoblock4 - - vzeroupper - pop %r13 - pop %r12 - pop %rbx - ret -ENDPROC(poly1305_asm_4block_avx2) diff --git a/src/crypto/poly1305-sse2-x86_64.S b/src/crypto/poly1305-sse2-x86_64.S deleted file mode 100644 index 2a3a4e6..0000000 --- a/src/crypto/poly1305-sse2-x86_64.S +++ /dev/null @@ -1,583 +0,0 @@ -/* - * Poly1305 authenticator algorithm, RFC7539, x64 SSE2 functions - * - * Copyright (C) 2015 Martin Willi - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - */ - -#include - -.section .rodata.cst16.ANMASK, "aM", @progbits, 16 -.align 16 -ANMASK: .octa 0x0000000003ffffff0000000003ffffff -.section .rodata.cst16.ORMASK, "aM", @progbits, 16 -.align 16 -ORMASK: .octa 0x00000000010000000000000001000000 - -.text - -#define h0 0x00(%rdi) -#define h1 0x04(%rdi) -#define h2 0x08(%rdi) -#define h3 0x0c(%rdi) -#define h4 0x10(%rdi) -#define r0 0x00(%rdx) -#define r1 0x04(%rdx) -#define r2 0x08(%rdx) -#define r3 0x0c(%rdx) -#define r4 0x10(%rdx) -#define s1 0x00(%rsp) -#define s2 0x04(%rsp) -#define s3 0x08(%rsp) -#define s4 0x0c(%rsp) -#define m %rsi -#define h01 %xmm0 -#define h23 %xmm1 -#define h44 %xmm2 -#define t1 %xmm3 -#define t2 %xmm4 -#define t3 %xmm5 -#define t4 %xmm6 -#define mask %xmm7 -#define d0 %r8 -#define d1 %r9 -#define d2 %r10 -#define d3 %r11 -#define d4 %r12 - -ENTRY(poly1305_asm_block_sse2) - # %rdi: Accumulator h[5] - # %rsi: 16 byte input block m - # %rdx: Poly1305 key r[5] - # %rcx: Block count - - # This single block variant tries to improve performance by doing two - # multiplications in parallel using SSE instructions. There is quite - # some quardword packing involved, hence the speedup is marginal. - - push %rbx - push %r12 - sub $0x10,%rsp - - # s1..s4 = r1..r4 * 5 - mov r1,%eax - lea (%eax,%eax,4),%eax - mov %eax,s1 - mov r2,%eax - lea (%eax,%eax,4),%eax - mov %eax,s2 - mov r3,%eax - lea (%eax,%eax,4),%eax - mov %eax,s3 - mov r4,%eax - lea (%eax,%eax,4),%eax - mov %eax,s4 - - movdqa ANMASK(%rip),mask - -.Ldoblock: - # h01 = [0, h1, 0, h0] - # h23 = [0, h3, 0, h2] - # h44 = [0, h4, 0, h4] - movd h0,h01 - movd h1,t1 - movd h2,h23 - movd h3,t2 - movd h4,h44 - punpcklqdq t1,h01 - punpcklqdq t2,h23 - punpcklqdq h44,h44 - - # h01 += [ (m[3-6] >> 2) & 0x3ffffff, m[0-3] & 0x3ffffff ] - movd 0x00(m),t1 - movd 0x03(m),t2 - psrld $2,t2 - punpcklqdq t2,t1 - pand mask,t1 - paddd t1,h01 - # h23 += [ (m[9-12] >> 6) & 0x3ffffff, (m[6-9] >> 4) & 0x3ffffff ] - movd 0x06(m),t1 - movd 0x09(m),t2 - psrld $4,t1 - psrld $6,t2 - punpcklqdq t2,t1 - pand mask,t1 - paddd t1,h23 - # h44 += [ (m[12-15] >> 8) | (1 << 24), (m[12-15] >> 8) | (1 << 24) ] - mov 0x0c(m),%eax - shr $8,%eax - or $0x01000000,%eax - movd %eax,t1 - pshufd $0xc4,t1,t1 - paddd t1,h44 - - # t1[0] = h0 * r0 + h2 * s3 - # t1[1] = h1 * s4 + h3 * s2 - movd r0,t1 - movd s4,t2 - punpcklqdq t2,t1 - pmuludq h01,t1 - movd s3,t2 - movd s2,t3 - punpcklqdq t3,t2 - pmuludq h23,t2 - paddq t2,t1 - # t2[0] = h0 * r1 + h2 * s4 - # t2[1] = h1 * r0 + h3 * s3 - movd r1,t2 - movd r0,t3 - punpcklqdq t3,t2 - pmuludq h01,t2 - movd s4,t3 - movd s3,t4 - punpcklqdq t4,t3 - pmuludq h23,t3 - paddq t3,t2 - # t3[0] = h4 * s1 - # t3[1] = h4 * s2 - movd s1,t3 - movd s2,t4 - punpcklqdq t4,t3 - pmuludq h44,t3 - # d0 = t1[0] + t1[1] + t3[0] - # d1 = t2[0] + t2[1] + t3[1] - movdqa t1,t4 - punpcklqdq t2,t4 - punpckhqdq t2,t1 - paddq t4,t1 - paddq t3,t1 - movq t1,d0 - psrldq $8,t1 - movq t1,d1 - - # t1[0] = h0 * r2 + h2 * r0 - # t1[1] = h1 * r1 + h3 * s4 - movd r2,t1 - movd r1,t2 - punpcklqdq t2,t1 - pmuludq h01,t1 - movd r0,t2 - movd s4,t3 - punpcklqdq t3,t2 - pmuludq h23,t2 - paddq t2,t1 - # t2[0] = h0 * r3 + h2 * r1 - # t2[1] = h1 * r2 + h3 * r0 - movd r3,t2 - movd r2,t3 - punpcklqdq t3,t2 - pmuludq h01,t2 - movd r1,t3 - movd r0,t4 - punpcklqdq t4,t3 - pmuludq h23,t3 - paddq t3,t2 - # t3[0] = h4 * s3 - # t3[1] = h4 * s4 - movd s3,t3 - movd s4,t4 - punpcklqdq t4,t3 - pmuludq h44,t3 - # d2 = t1[0] + t1[1] + t3[0] - # d3 = t2[0] + t2[1] + t3[1] - movdqa t1,t4 - punpcklqdq t2,t4 - punpckhqdq t2,t1 - paddq t4,t1 - paddq t3,t1 - movq t1,d2 - psrldq $8,t1 - movq t1,d3 - - # t1[0] = h0 * r4 + h2 * r2 - # t1[1] = h1 * r3 + h3 * r1 - movd r4,t1 - movd r3,t2 - punpcklqdq t2,t1 - pmuludq h01,t1 - movd r2,t2 - movd r1,t3 - punpcklqdq t3,t2 - pmuludq h23,t2 - paddq t2,t1 - # t3[0] = h4 * r0 - movd r0,t3 - pmuludq h44,t3 - # d4 = t1[0] + t1[1] + t3[0] - movdqa t1,t4 - psrldq $8,t4 - paddq t4,t1 - paddq t3,t1 - movq t1,d4 - - # d1 += d0 >> 26 - mov d0,%rax - shr $26,%rax - add %rax,d1 - # h0 = d0 & 0x3ffffff - mov d0,%rbx - and $0x3ffffff,%ebx - - # d2 += d1 >> 26 - mov d1,%rax - shr $26,%rax - add %rax,d2 - # h1 = d1 & 0x3ffffff - mov d1,%rax - and $0x3ffffff,%eax - mov %eax,h1 - - # d3 += d2 >> 26 - mov d2,%rax - shr $26,%rax - add %rax,d3 - # h2 = d2 & 0x3ffffff - mov d2,%rax - and $0x3ffffff,%eax - mov %eax,h2 - - # d4 += d3 >> 26 - mov d3,%rax - shr $26,%rax - add %rax,d4 - # h3 = d3 & 0x3ffffff - mov d3,%rax - and $0x3ffffff,%eax - mov %eax,h3 - - # h0 += (d4 >> 26) * 5 - mov d4,%rax - shr $26,%rax - lea (%eax,%eax,4),%eax - add %eax,%ebx - # h4 = d4 & 0x3ffffff - mov d4,%rax - and $0x3ffffff,%eax - mov %eax,h4 - - # h1 += h0 >> 26 - mov %ebx,%eax - shr $26,%eax - add %eax,h1 - # h0 = h0 & 0x3ffffff - andl $0x3ffffff,%ebx - mov %ebx,h0 - - add $0x10,m - dec %rcx - jnz .Ldoblock - - add $0x10,%rsp - pop %r12 - pop %rbx - ret -ENDPROC(poly1305_asm_block_sse2) - - -#define u0 0x00(%r8) -#define u1 0x04(%r8) -#define u2 0x08(%r8) -#define u3 0x0c(%r8) -#define u4 0x10(%r8) -#define hc0 %xmm0 -#define hc1 %xmm1 -#define hc2 %xmm2 -#define hc3 %xmm5 -#define hc4 %xmm6 -#define ru0 %xmm7 -#define ru1 %xmm8 -#define ru2 %xmm9 -#define ru3 %xmm10 -#define ru4 %xmm11 -#define sv1 %xmm12 -#define sv2 %xmm13 -#define sv3 %xmm14 -#define sv4 %xmm15 -#undef d0 -#define d0 %r13 - -ENTRY(poly1305_asm_2block_sse2) - # %rdi: Accumulator h[5] - # %rsi: 16 byte input block m - # %rdx: Poly1305 key r[5] - # %rcx: Doubleblock count - # %r8: Poly1305 derived key r^2 u[5] - - # This two-block variant further improves performance by using loop - # unrolled block processing. This is more straight forward and does - # less byte shuffling, but requires a second Poly1305 key r^2: - # h = (h + m) * r => h = (h + m1) * r^2 + m2 * r - - push %rbx - push %r12 - push %r13 - - # combine r0,u0 - movd u0,ru0 - movd r0,t1 - punpcklqdq t1,ru0 - - # combine r1,u1 and s1=r1*5,v1=u1*5 - movd u1,ru1 - movd r1,t1 - punpcklqdq t1,ru1 - movdqa ru1,sv1 - pslld $2,sv1 - paddd ru1,sv1 - - # combine r2,u2 and s2=r2*5,v2=u2*5 - movd u2,ru2 - movd r2,t1 - punpcklqdq t1,ru2 - movdqa ru2,sv2 - pslld $2,sv2 - paddd ru2,sv2 - - # combine r3,u3 and s3=r3*5,v3=u3*5 - movd u3,ru3 - movd r3,t1 - punpcklqdq t1,ru3 - movdqa ru3,sv3 - pslld $2,sv3 - paddd ru3,sv3 - - # combine r4,u4 and s4=r4*5,v4=u4*5 - movd u4,ru4 - movd r4,t1 - punpcklqdq t1,ru4 - movdqa ru4,sv4 - pslld $2,sv4 - paddd ru4,sv4 - -.Ldoblock2: - # hc0 = [ m[16-19] & 0x3ffffff, h0 + m[0-3] & 0x3ffffff ] - movd 0x00(m),hc0 - movd 0x10(m),t1 - punpcklqdq t1,hc0 - pand ANMASK(%rip),hc0 - movd h0,t1 - paddd t1,hc0 - # hc1 = [ (m[19-22] >> 2) & 0x3ffffff, h1 + (m[3-6] >> 2) & 0x3ffffff ] - movd 0x03(m),hc1 - movd 0x13(m),t1 - punpcklqdq t1,hc1 - psrld $2,hc1 - pand ANMASK(%rip),hc1 - movd h1,t1 - paddd t1,hc1 - # hc2 = [ (m[22-25] >> 4) & 0x3ffffff, h2 + (m[6-9] >> 4) & 0x3ffffff ] - movd 0x06(m),hc2 - movd 0x16(m),t1 - punpcklqdq t1,hc2 - psrld $4,hc2 - pand ANMASK(%rip),hc2 - movd h2,t1 - paddd t1,hc2 - # hc3 = [ (m[25-28] >> 6) & 0x3ffffff, h3 + (m[9-12] >> 6) & 0x3ffffff ] - movd 0x09(m),hc3 - movd 0x19(m),t1 - punpcklqdq t1,hc3 - psrld $6,hc3 - pand ANMASK(%rip),hc3 - movd h3,t1 - paddd t1,hc3 - # hc4 = [ (m[28-31] >> 8) | (1<<24), h4 + (m[12-15] >> 8) | (1<<24) ] - movd 0x0c(m),hc4 - movd 0x1c(m),t1 - punpcklqdq t1,hc4 - psrld $8,hc4 - por ORMASK(%rip),hc4 - movd h4,t1 - paddd t1,hc4 - - # t1 = [ hc0[1] * r0, hc0[0] * u0 ] - movdqa ru0,t1 - pmuludq hc0,t1 - # t1 += [ hc1[1] * s4, hc1[0] * v4 ] - movdqa sv4,t2 - pmuludq hc1,t2 - paddq t2,t1 - # t1 += [ hc2[1] * s3, hc2[0] * v3 ] - movdqa sv3,t2 - pmuludq hc2,t2 - paddq t2,t1 - # t1 += [ hc3[1] * s2, hc3[0] * v2 ] - movdqa sv2,t2 - pmuludq hc3,t2 - paddq t2,t1 - # t1 += [ hc4[1] * s1, hc4[0] * v1 ] - movdqa sv1,t2 - pmuludq hc4,t2 - paddq t2,t1 - # d0 = t1[0] + t1[1] - movdqa t1,t2 - psrldq $8,t2 - paddq t2,t1 - movq t1,d0 - - # t1 = [ hc0[1] * r1, hc0[0] * u1 ] - movdqa ru1,t1 - pmuludq hc0,t1 - # t1 += [ hc1[1] * r0, hc1[0] * u0 ] - movdqa ru0,t2 - pmuludq hc1,t2 - paddq t2,t1 - # t1 += [ hc2[1] * s4, hc2[0] * v4 ] - movdqa sv4,t2 - pmuludq hc2,t2 - paddq t2,t1 - # t1 += [ hc3[1] * s3, hc3[0] * v3 ] - movdqa sv3,t2 - pmuludq hc3,t2 - paddq t2,t1 - # t1 += [ hc4[1] * s2, hc4[0] * v2 ] - movdqa sv2,t2 - pmuludq hc4,t2 - paddq t2,t1 - # d1 = t1[0] + t1[1] - movdqa t1,t2 - psrldq $8,t2 - paddq t2,t1 - movq t1,d1 - - # t1 = [ hc0[1] * r2, hc0[0] * u2 ] - movdqa ru2,t1 - pmuludq hc0,t1 - # t1 += [ hc1[1] * r1, hc1[0] * u1 ] - movdqa ru1,t2 - pmuludq hc1,t2 - paddq t2,t1 - # t1 += [ hc2[1] * r0, hc2[0] * u0 ] - movdqa ru0,t2 - pmuludq hc2,t2 - paddq t2,t1 - # t1 += [ hc3[1] * s4, hc3[0] * v4 ] - movdqa sv4,t2 - pmuludq hc3,t2 - paddq t2,t1 - # t1 += [ hc4[1] * s3, hc4[0] * v3 ] - movdqa sv3,t2 - pmuludq hc4,t2 - paddq t2,t1 - # d2 = t1[0] + t1[1] - movdqa t1,t2 - psrldq $8,t2 - paddq t2,t1 - movq t1,d2 - - # t1 = [ hc0[1] * r3, hc0[0] * u3 ] - movdqa ru3,t1 - pmuludq hc0,t1 - # t1 += [ hc1[1] * r2, hc1[0] * u2 ] - movdqa ru2,t2 - pmuludq hc1,t2 - paddq t2,t1 - # t1 += [ hc2[1] * r1, hc2[0] * u1 ] - movdqa ru1,t2 - pmuludq hc2,t2 - paddq t2,t1 - # t1 += [ hc3[1] * r0, hc3[0] * u0 ] - movdqa ru0,t2 - pmuludq hc3,t2 - paddq t2,t1 - # t1 += [ hc4[1] * s4, hc4[0] * v4 ] - movdqa sv4,t2 - pmuludq hc4,t2 - paddq t2,t1 - # d3 = t1[0] + t1[1] - movdqa t1,t2 - psrldq $8,t2 - paddq t2,t1 - movq t1,d3 - - # t1 = [ hc0[1] * r4, hc0[0] * u4 ] - movdqa ru4,t1 - pmuludq hc0,t1 - # t1 += [ hc1[1] * r3, hc1[0] * u3 ] - movdqa ru3,t2 - pmuludq hc1,t2 - paddq t2,t1 - # t1 += [ hc2[1] * r2, hc2[0] * u2 ] - movdqa ru2,t2 - pmuludq hc2,t2 - paddq t2,t1 - # t1 += [ hc3[1] * r1, hc3[0] * u1 ] - movdqa ru1,t2 - pmuludq hc3,t2 - paddq t2,t1 - # t1 += [ hc4[1] * r0, hc4[0] * u0 ] - movdqa ru0,t2 - pmuludq hc4,t2 - paddq t2,t1 - # d4 = t1[0] + t1[1] - movdqa t1,t2 - psrldq $8,t2 - paddq t2,t1 - movq t1,d4 - - # d1 += d0 >> 26 - mov d0,%rax - shr $26,%rax - add %rax,d1 - # h0 = d0 & 0x3ffffff - mov d0,%rbx - and $0x3ffffff,%ebx - - # d2 += d1 >> 26 - mov d1,%rax - shr $26,%rax - add %rax,d2 - # h1 = d1 & 0x3ffffff - mov d1,%rax - and $0x3ffffff,%eax - mov %eax,h1 - - # d3 += d2 >> 26 - mov d2,%rax - shr $26,%rax - add %rax,d3 - # h2 = d2 & 0x3ffffff - mov d2,%rax - and $0x3ffffff,%eax - mov %eax,h2 - - # d4 += d3 >> 26 - mov d3,%rax - shr $26,%rax - add %rax,d4 - # h3 = d3 & 0x3ffffff - mov d3,%rax - and $0x3ffffff,%eax - mov %eax,h3 - - # h0 += (d4 >> 26) * 5 - mov d4,%rax - shr $26,%rax - lea (%eax,%eax,4),%eax - add %eax,%ebx - # h4 = d4 & 0x3ffffff - mov d4,%rax - and $0x3ffffff,%eax - mov %eax,h4 - - # h1 += h0 >> 26 - mov %ebx,%eax - shr $26,%eax - add %eax,h1 - # h0 = h0 & 0x3ffffff - andl $0x3ffffff,%ebx - mov %ebx,h0 - - add $0x20,m - dec %rcx - jnz .Ldoblock2 - - pop %r13 - pop %r12 - pop %rbx - ret -ENDPROC(poly1305_asm_2block_sse2) diff --git a/src/crypto/poly1305-x86_64.S b/src/crypto/poly1305-x86_64.S new file mode 100644 index 0000000..418f661 --- /dev/null +++ b/src/crypto/poly1305-x86_64.S @@ -0,0 +1,2814 @@ +/* Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * - Redistributions of source code must retain copyright notices, + * this list of conditions and the following disclaimer. + * - Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * - Neither the name of the CRYPTOGAMS nor the names of its + * copyright holder and contributors may be used to endorse or + * promote products derived from this software without specific + * prior written permission. + * ALTERNATIVELY, provided that this notice is retained in full, this + * product may be distributed under the terms of the GNU General Public + * License (GPL), in which case the provisions of the GPL apply INSTEAD OF + * those given above. + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include + +.section .rodata.cst32.Lmask24, "aM", @progbits, 32 +.align 32 +.Lconst: +.Lmask24: +.long 0x0ffffff,0,0x0ffffff,0,0x0ffffff,0,0x0ffffff,0 +.section .rodata.cst32.L129, "aM", @progbits, 32 +.align 32 +.L129: +.long 16777216,0,16777216,0,16777216,0,16777216,0 +.section .rodata.cst32.Lmask26, "aM", @progbits, 32 +.align 32 +.Lmask26: +.long 0x3ffffff,0,0x3ffffff,0,0x3ffffff,0,0x3ffffff,0 +.section .rodata.cst32.Lpermd_avx2, "aM", @progbits, 32 +.align 32 +.Lpermd_avx2: +.long 2,2,2,3,2,0,2,1 +.section .rodata.cst64.Lpermd_avx512, "aM", @progbits, 64 +.align 64 +.Lpermd_avx512: +.long 0,0,0,1, 0,2,0,3, 0,4,0,5, 0,6,0,7 + +.text + +.align 32 +ENTRY(poly1305_init_x86_64) + xorq %rax,%rax + movq %rax,0(%rdi) + movq %rax,8(%rdi) + movq %rax,16(%rdi) + + cmpq $0,%rsi + je .Lno_key + + movq $0x0ffffffc0fffffff,%rax + movq $0x0ffffffc0ffffffc,%rcx + andq 0(%rsi),%rax + andq 8(%rsi),%rcx + movq %rax,24(%rdi) + movq %rcx,32(%rdi) + movl $1,%eax +.Lno_key: + ret +ENDPROC(poly1305_init_x86_64) + +.align 32 +ENTRY(poly1305_blocks_x86_64) +.Lblocks: + shrq $4,%rdx + jz .Lno_data + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lblocks_body: + + movq %rdx,%r15 + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + movq 0(%rdi),%r14 + movq 8(%rdi),%rbx + movq 16(%rdi),%rbp + + movq %r13,%r12 + shrq $2,%r13 + movq %r12,%rax + addq %r12,%r13 + jmp .Loop + +.align 32 +.Loop: + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + mulq %r14 + movq %rax,%r9 + movq %r11,%rax + movq %rdx,%r10 + + mulq %r14 + movq %rax,%r14 + movq %r11,%rax + movq %rdx,%r8 + + mulq %rbx + addq %rax,%r9 + movq %r13,%rax + adcq %rdx,%r10 + + mulq %rbx + movq %rbp,%rbx + addq %rax,%r14 + adcq %rdx,%r8 + + imulq %r13,%rbx + addq %rbx,%r9 + movq %r8,%rbx + adcq $0,%r10 + + imulq %r11,%rbp + addq %r9,%rbx + movq $-4,%rax + adcq %rbp,%r10 + + andq %r10,%rax + movq %r10,%rbp + shrq $2,%r10 + andq $3,%rbp + addq %r10,%rax + addq %rax,%r14 + adcq $0,%rbx + adcq $0,%rbp + movq %r12,%rax + decq %r15 + jnz .Loop + + movq %r14,0(%rdi) + movq %rbx,8(%rdi) + movq %rbp,16(%rdi) + + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rsp + +.Lno_data: +.Lblocks_epilogue: + ret +ENDPROC(poly1305_blocks_x86_64) + +.align 32 +ENTRY(poly1305_emit_x86_64) +.Lemit: + movq 0(%rdi),%r8 + movq 8(%rdi),%r9 + movq 16(%rdi),%r10 + + movq %r8,%rax + addq $5,%r8 + movq %r9,%rcx + adcq $0,%r9 + adcq $0,%r10 + shrq $2,%r10 + cmovnzq %r8,%rax + cmovnzq %r9,%rcx + + addq 0(%rdx),%rax + adcq 8(%rdx),%rcx + movq %rax,0(%rsi) + movq %rcx,8(%rsi) + + ret +ENDPROC(poly1305_emit_x86_64) + +.macro __poly1305_block + mulq %r14 + movq %rax,%r9 + movq %r11,%rax + movq %rdx,%r10 + + mulq %r14 + movq %rax,%r14 + movq %r11,%rax + movq %rdx,%r8 + + mulq %rbx + addq %rax,%r9 + movq %r13,%rax + adcq %rdx,%r10 + + mulq %rbx + movq %rbp,%rbx + addq %rax,%r14 + adcq %rdx,%r8 + + imulq %r13,%rbx + addq %rbx,%r9 + movq %r8,%rbx + adcq $0,%r10 + + imulq %r11,%rbp + addq %r9,%rbx + movq $-4,%rax + adcq %rbp,%r10 + + andq %r10,%rax + movq %r10,%rbp + shrq $2,%r10 + andq $3,%rbp + addq %r10,%rax + addq %rax,%r14 + adcq $0,%rbx + adcq $0,%rbp +.endm + +.macro __poly1305_init_avx + movq %r11,%r14 + movq %r12,%rbx + xorq %rbp,%rbp + + leaq 48+64(%rdi),%rdi + + movq %r12,%rax + __poly1305_block + + movl $0x3ffffff,%eax + movl $0x3ffffff,%edx + movq %r14,%r8 + andl %r14d,%eax + movq %r11,%r9 + andl %r11d,%edx + movl %eax,-64(%rdi) + shrq $26,%r8 + movl %edx,-60(%rdi) + shrq $26,%r9 + + movl $0x3ffffff,%eax + movl $0x3ffffff,%edx + andl %r8d,%eax + andl %r9d,%edx + movl %eax,-48(%rdi) + leal (%rax,%rax,4),%eax + movl %edx,-44(%rdi) + leal (%rdx,%rdx,4),%edx + movl %eax,-32(%rdi) + shrq $26,%r8 + movl %edx,-28(%rdi) + shrq $26,%r9 + + movq %rbx,%rax + movq %r12,%rdx + shlq $12,%rax + shlq $12,%rdx + orq %r8,%rax + orq %r9,%rdx + andl $0x3ffffff,%eax + andl $0x3ffffff,%edx + movl %eax,-16(%rdi) + leal (%rax,%rax,4),%eax + movl %edx,-12(%rdi) + leal (%rdx,%rdx,4),%edx + movl %eax,0(%rdi) + movq %rbx,%r8 + movl %edx,4(%rdi) + movq %r12,%r9 + + movl $0x3ffffff,%eax + movl $0x3ffffff,%edx + shrq $14,%r8 + shrq $14,%r9 + andl %r8d,%eax + andl %r9d,%edx + movl %eax,16(%rdi) + leal (%rax,%rax,4),%eax + movl %edx,20(%rdi) + leal (%rdx,%rdx,4),%edx + movl %eax,32(%rdi) + shrq $26,%r8 + movl %edx,36(%rdi) + shrq $26,%r9 + + movq %rbp,%rax + shlq $24,%rax + orq %rax,%r8 + movl %r8d,48(%rdi) + leaq (%r8,%r8,4),%r8 + movl %r9d,52(%rdi) + leaq (%r9,%r9,4),%r9 + movl %r8d,64(%rdi) + movl %r9d,68(%rdi) + + movq %r12,%rax + __poly1305_block + + movl $0x3ffffff,%eax + movq %r14,%r8 + andl %r14d,%eax + shrq $26,%r8 + movl %eax,-52(%rdi) + + movl $0x3ffffff,%edx + andl %r8d,%edx + movl %edx,-36(%rdi) + leal (%rdx,%rdx,4),%edx + shrq $26,%r8 + movl %edx,-20(%rdi) + + movq %rbx,%rax + shlq $12,%rax + orq %r8,%rax + andl $0x3ffffff,%eax + movl %eax,-4(%rdi) + leal (%rax,%rax,4),%eax + movq %rbx,%r8 + movl %eax,12(%rdi) + + movl $0x3ffffff,%edx + shrq $14,%r8 + andl %r8d,%edx + movl %edx,28(%rdi) + leal (%rdx,%rdx,4),%edx + shrq $26,%r8 + movl %edx,44(%rdi) + + movq %rbp,%rax + shlq $24,%rax + orq %rax,%r8 + movl %r8d,60(%rdi) + leaq (%r8,%r8,4),%r8 + movl %r8d,76(%rdi) + + movq %r12,%rax + __poly1305_block + + movl $0x3ffffff,%eax + movq %r14,%r8 + andl %r14d,%eax + shrq $26,%r8 + movl %eax,-56(%rdi) + + movl $0x3ffffff,%edx + andl %r8d,%edx + movl %edx,-40(%rdi) + leal (%rdx,%rdx,4),%edx + shrq $26,%r8 + movl %edx,-24(%rdi) + + movq %rbx,%rax + shlq $12,%rax + orq %r8,%rax + andl $0x3ffffff,%eax + movl %eax,-8(%rdi) + leal (%rax,%rax,4),%eax + movq %rbx,%r8 + movl %eax,8(%rdi) + + movl $0x3ffffff,%edx + shrq $14,%r8 + andl %r8d,%edx + movl %edx,24(%rdi) + leal (%rdx,%rdx,4),%edx + shrq $26,%r8 + movl %edx,40(%rdi) + + movq %rbp,%rax + shlq $24,%rax + orq %rax,%r8 + movl %r8d,56(%rdi) + leaq (%r8,%r8,4),%r8 + movl %r8d,72(%rdi) + + leaq -48-64(%rdi),%rdi +.endm + +#ifdef CONFIG_AS_AVX +.align 32 +ENTRY(poly1305_blocks_avx) + + movl 20(%rdi),%r8d + cmpq $128,%rdx + jae .Lblocks_avx + testl %r8d,%r8d + jz .Lblocks + +.Lblocks_avx: + andq $-16,%rdx + jz .Lno_data_avx + + vzeroupper + + testl %r8d,%r8d + jz .Lbase2_64_avx + + testq $31,%rdx + jz .Leven_avx + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lblocks_avx_body: + + movq %rdx,%r15 + + movq 0(%rdi),%r8 + movq 8(%rdi),%r9 + movl 16(%rdi),%ebp + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + + movl %r8d,%r14d + andq $-2147483648,%r8 + movq %r9,%r12 + movl %r9d,%ebx + andq $-2147483648,%r9 + + shrq $6,%r8 + shlq $52,%r12 + addq %r8,%r14 + shrq $12,%rbx + shrq $18,%r9 + addq %r12,%r14 + adcq %r9,%rbx + + movq %rbp,%r8 + shlq $40,%r8 + shrq $24,%rbp + addq %r8,%rbx + adcq $0,%rbp + + movq $-4,%r9 + movq %rbp,%r8 + andq %rbp,%r9 + shrq $2,%r8 + andq $3,%rbp + addq %r9,%r8 + addq %r8,%r14 + adcq $0,%rbx + adcq $0,%rbp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + + __poly1305_block + + testq %rcx,%rcx + jz .Lstore_base2_64_avx + + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r11 + movq %rbx,%r12 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r11 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r11,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r12 + andq $0x3ffffff,%rbx + orq %r12,%rbp + + subq $16,%r15 + jz .Lstore_base2_26_avx + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + jmp .Lproceed_avx + +.align 32 +.Lstore_base2_64_avx: + movq %r14,0(%rdi) + movq %rbx,8(%rdi) + movq %rbp,16(%rdi) + jmp .Ldone_avx + +.align 16 +.Lstore_base2_26_avx: + movl %eax,0(%rdi) + movl %edx,4(%rdi) + movl %r14d,8(%rdi) + movl %ebx,12(%rdi) + movl %ebp,16(%rdi) +.align 16 +.Ldone_avx: + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rsp + +.Lno_data_avx: +.Lblocks_avx_epilogue: + ret + +.align 32 +.Lbase2_64_avx: + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lbase2_64_avx_body: + + movq %rdx,%r15 + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + movq 0(%rdi),%r14 + movq 8(%rdi),%rbx + movl 16(%rdi),%ebp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + + testq $31,%rdx + jz .Linit_avx + + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + subq $16,%r15 + + __poly1305_block + +.Linit_avx: + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r8 + movq %rbx,%r9 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r8 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r8,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r9 + andq $0x3ffffff,%rbx + orq %r9,%rbp + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + movl $1,20(%rdi) + + __poly1305_init_avx + +.Lproceed_avx: + movq %r15,%rdx + + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rax + leaq 48(%rsp),%rsp + +.Lbase2_64_avx_epilogue: + jmp .Ldo_avx + + +.align 32 +.Leven_avx: + vmovd 0(%rdi),%xmm0 + vmovd 4(%rdi),%xmm1 + vmovd 8(%rdi),%xmm2 + vmovd 12(%rdi),%xmm3 + vmovd 16(%rdi),%xmm4 + +.Ldo_avx: + leaq 8(%rsp),%r10 + andq $-32,%rsp + subq $8,%rsp + leaq -88(%rsp),%r11 + subq $0x178,%rsp + subq $64,%rdx + leaq -32(%rsi),%rax + cmovcq %rax,%rsi + + vmovdqu 48(%rdi),%xmm14 + leaq 112(%rdi),%rdi + leaq .Lconst(%rip),%rcx + + vmovdqu 32(%rsi),%xmm5 + vmovdqu 48(%rsi),%xmm6 + vmovdqa 64(%rcx),%xmm15 + + vpsrldq $6,%xmm5,%xmm7 + vpsrldq $6,%xmm6,%xmm8 + vpunpckhqdq %xmm6,%xmm5,%xmm9 + vpunpcklqdq %xmm6,%xmm5,%xmm5 + vpunpcklqdq %xmm8,%xmm7,%xmm8 + + vpsrlq $40,%xmm9,%xmm9 + vpsrlq $26,%xmm5,%xmm6 + vpand %xmm15,%xmm5,%xmm5 + vpsrlq $4,%xmm8,%xmm7 + vpand %xmm15,%xmm6,%xmm6 + vpsrlq $30,%xmm8,%xmm8 + vpand %xmm15,%xmm7,%xmm7 + vpand %xmm15,%xmm8,%xmm8 + vpor 32(%rcx),%xmm9,%xmm9 + + jbe .Lskip_loop_avx + + + vmovdqu -48(%rdi),%xmm11 + vmovdqu -32(%rdi),%xmm12 + vpshufd $0xEE,%xmm14,%xmm13 + vpshufd $0x44,%xmm14,%xmm10 + vmovdqa %xmm13,-144(%r11) + vmovdqa %xmm10,0(%rsp) + vpshufd $0xEE,%xmm11,%xmm14 + vmovdqu -16(%rdi),%xmm10 + vpshufd $0x44,%xmm11,%xmm11 + vmovdqa %xmm14,-128(%r11) + vmovdqa %xmm11,16(%rsp) + vpshufd $0xEE,%xmm12,%xmm13 + vmovdqu 0(%rdi),%xmm11 + vpshufd $0x44,%xmm12,%xmm12 + vmovdqa %xmm13,-112(%r11) + vmovdqa %xmm12,32(%rsp) + vpshufd $0xEE,%xmm10,%xmm14 + vmovdqu 16(%rdi),%xmm12 + vpshufd $0x44,%xmm10,%xmm10 + vmovdqa %xmm14,-96(%r11) + vmovdqa %xmm10,48(%rsp) + vpshufd $0xEE,%xmm11,%xmm13 + vmovdqu 32(%rdi),%xmm10 + vpshufd $0x44,%xmm11,%xmm11 + vmovdqa %xmm13,-80(%r11) + vmovdqa %xmm11,64(%rsp) + vpshufd $0xEE,%xmm12,%xmm14 + vmovdqu 48(%rdi),%xmm11 + vpshufd $0x44,%xmm12,%xmm12 + vmovdqa %xmm14,-64(%r11) + vmovdqa %xmm12,80(%rsp) + vpshufd $0xEE,%xmm10,%xmm13 + vmovdqu 64(%rdi),%xmm12 + vpshufd $0x44,%xmm10,%xmm10 + vmovdqa %xmm13,-48(%r11) + vmovdqa %xmm10,96(%rsp) + vpshufd $0xEE,%xmm11,%xmm14 + vpshufd $0x44,%xmm11,%xmm11 + vmovdqa %xmm14,-32(%r11) + vmovdqa %xmm11,112(%rsp) + vpshufd $0xEE,%xmm12,%xmm13 + vmovdqa 0(%rsp),%xmm14 + vpshufd $0x44,%xmm12,%xmm12 + vmovdqa %xmm13,-16(%r11) + vmovdqa %xmm12,128(%rsp) + + jmp .Loop_avx + +.align 32 +.Loop_avx: + + vpmuludq %xmm5,%xmm14,%xmm10 + vpmuludq %xmm6,%xmm14,%xmm11 + vmovdqa %xmm2,32(%r11) + vpmuludq %xmm7,%xmm14,%xmm12 + vmovdqa 16(%rsp),%xmm2 + vpmuludq %xmm8,%xmm14,%xmm13 + vpmuludq %xmm9,%xmm14,%xmm14 + + vmovdqa %xmm0,0(%r11) + vpmuludq 32(%rsp),%xmm9,%xmm0 + vmovdqa %xmm1,16(%r11) + vpmuludq %xmm8,%xmm2,%xmm1 + vpaddq %xmm0,%xmm10,%xmm10 + vpaddq %xmm1,%xmm14,%xmm14 + vmovdqa %xmm3,48(%r11) + vpmuludq %xmm7,%xmm2,%xmm0 + vpmuludq %xmm6,%xmm2,%xmm1 + vpaddq %xmm0,%xmm13,%xmm13 + vmovdqa 48(%rsp),%xmm3 + vpaddq %xmm1,%xmm12,%xmm12 + vmovdqa %xmm4,64(%r11) + vpmuludq %xmm5,%xmm2,%xmm2 + vpmuludq %xmm7,%xmm3,%xmm0 + vpaddq %xmm2,%xmm11,%xmm11 + + vmovdqa 64(%rsp),%xmm4 + vpaddq %xmm0,%xmm14,%xmm14 + vpmuludq %xmm6,%xmm3,%xmm1 + vpmuludq %xmm5,%xmm3,%xmm3 + vpaddq %xmm1,%xmm13,%xmm13 + vmovdqa 80(%rsp),%xmm2 + vpaddq %xmm3,%xmm12,%xmm12 + vpmuludq %xmm9,%xmm4,%xmm0 + vpmuludq %xmm8,%xmm4,%xmm4 + vpaddq %xmm0,%xmm11,%xmm11 + vmovdqa 96(%rsp),%xmm3 + vpaddq %xmm4,%xmm10,%xmm10 + + vmovdqa 128(%rsp),%xmm4 + vpmuludq %xmm6,%xmm2,%xmm1 + vpmuludq %xmm5,%xmm2,%xmm2 + vpaddq %xmm1,%xmm14,%xmm14 + vpaddq %xmm2,%xmm13,%xmm13 + vpmuludq %xmm9,%xmm3,%xmm0 + vpmuludq %xmm8,%xmm3,%xmm1 + vpaddq %xmm0,%xmm12,%xmm12 + vmovdqu 0(%rsi),%xmm0 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq %xmm7,%xmm3,%xmm3 + vpmuludq %xmm7,%xmm4,%xmm7 + vpaddq %xmm3,%xmm10,%xmm10 + + vmovdqu 16(%rsi),%xmm1 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq %xmm8,%xmm4,%xmm8 + vpmuludq %xmm9,%xmm4,%xmm9 + vpsrldq $6,%xmm0,%xmm2 + vpaddq %xmm8,%xmm12,%xmm12 + vpaddq %xmm9,%xmm13,%xmm13 + vpsrldq $6,%xmm1,%xmm3 + vpmuludq 112(%rsp),%xmm5,%xmm9 + vpmuludq %xmm6,%xmm4,%xmm5 + vpunpckhqdq %xmm1,%xmm0,%xmm4 + vpaddq %xmm9,%xmm14,%xmm14 + vmovdqa -144(%r11),%xmm9 + vpaddq %xmm5,%xmm10,%xmm10 + + vpunpcklqdq %xmm1,%xmm0,%xmm0 + vpunpcklqdq %xmm3,%xmm2,%xmm3 + + + vpsrldq $5,%xmm4,%xmm4 + vpsrlq $26,%xmm0,%xmm1 + vpand %xmm15,%xmm0,%xmm0 + vpsrlq $4,%xmm3,%xmm2 + vpand %xmm15,%xmm1,%xmm1 + vpand 0(%rcx),%xmm4,%xmm4 + vpsrlq $30,%xmm3,%xmm3 + vpand %xmm15,%xmm2,%xmm2 + vpand %xmm15,%xmm3,%xmm3 + vpor 32(%rcx),%xmm4,%xmm4 + + vpaddq 0(%r11),%xmm0,%xmm0 + vpaddq 16(%r11),%xmm1,%xmm1 + vpaddq 32(%r11),%xmm2,%xmm2 + vpaddq 48(%r11),%xmm3,%xmm3 + vpaddq 64(%r11),%xmm4,%xmm4 + + leaq 32(%rsi),%rax + leaq 64(%rsi),%rsi + subq $64,%rdx + cmovcq %rax,%rsi + + vpmuludq %xmm0,%xmm9,%xmm5 + vpmuludq %xmm1,%xmm9,%xmm6 + vpaddq %xmm5,%xmm10,%xmm10 + vpaddq %xmm6,%xmm11,%xmm11 + vmovdqa -128(%r11),%xmm7 + vpmuludq %xmm2,%xmm9,%xmm5 + vpmuludq %xmm3,%xmm9,%xmm6 + vpaddq %xmm5,%xmm12,%xmm12 + vpaddq %xmm6,%xmm13,%xmm13 + vpmuludq %xmm4,%xmm9,%xmm9 + vpmuludq -112(%r11),%xmm4,%xmm5 + vpaddq %xmm9,%xmm14,%xmm14 + + vpaddq %xmm5,%xmm10,%xmm10 + vpmuludq %xmm2,%xmm7,%xmm6 + vpmuludq %xmm3,%xmm7,%xmm5 + vpaddq %xmm6,%xmm13,%xmm13 + vmovdqa -96(%r11),%xmm8 + vpaddq %xmm5,%xmm14,%xmm14 + vpmuludq %xmm1,%xmm7,%xmm6 + vpmuludq %xmm0,%xmm7,%xmm7 + vpaddq %xmm6,%xmm12,%xmm12 + vpaddq %xmm7,%xmm11,%xmm11 + + vmovdqa -80(%r11),%xmm9 + vpmuludq %xmm2,%xmm8,%xmm5 + vpmuludq %xmm1,%xmm8,%xmm6 + vpaddq %xmm5,%xmm14,%xmm14 + vpaddq %xmm6,%xmm13,%xmm13 + vmovdqa -64(%r11),%xmm7 + vpmuludq %xmm0,%xmm8,%xmm8 + vpmuludq %xmm4,%xmm9,%xmm5 + vpaddq %xmm8,%xmm12,%xmm12 + vpaddq %xmm5,%xmm11,%xmm11 + vmovdqa -48(%r11),%xmm8 + vpmuludq %xmm3,%xmm9,%xmm9 + vpmuludq %xmm1,%xmm7,%xmm6 + vpaddq %xmm9,%xmm10,%xmm10 + + vmovdqa -16(%r11),%xmm9 + vpaddq %xmm6,%xmm14,%xmm14 + vpmuludq %xmm0,%xmm7,%xmm7 + vpmuludq %xmm4,%xmm8,%xmm5 + vpaddq %xmm7,%xmm13,%xmm13 + vpaddq %xmm5,%xmm12,%xmm12 + vmovdqu 32(%rsi),%xmm5 + vpmuludq %xmm3,%xmm8,%xmm7 + vpmuludq %xmm2,%xmm8,%xmm8 + vpaddq %xmm7,%xmm11,%xmm11 + vmovdqu 48(%rsi),%xmm6 + vpaddq %xmm8,%xmm10,%xmm10 + + vpmuludq %xmm2,%xmm9,%xmm2 + vpmuludq %xmm3,%xmm9,%xmm3 + vpsrldq $6,%xmm5,%xmm7 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq %xmm4,%xmm9,%xmm4 + vpsrldq $6,%xmm6,%xmm8 + vpaddq %xmm3,%xmm12,%xmm2 + vpaddq %xmm4,%xmm13,%xmm3 + vpmuludq -32(%r11),%xmm0,%xmm4 + vpmuludq %xmm1,%xmm9,%xmm0 + vpunpckhqdq %xmm6,%xmm5,%xmm9 + vpaddq %xmm4,%xmm14,%xmm4 + vpaddq %xmm0,%xmm10,%xmm0 + + vpunpcklqdq %xmm6,%xmm5,%xmm5 + vpunpcklqdq %xmm8,%xmm7,%xmm8 + + + vpsrldq $5,%xmm9,%xmm9 + vpsrlq $26,%xmm5,%xmm6 + vmovdqa 0(%rsp),%xmm14 + vpand %xmm15,%xmm5,%xmm5 + vpsrlq $4,%xmm8,%xmm7 + vpand %xmm15,%xmm6,%xmm6 + vpand 0(%rcx),%xmm9,%xmm9 + vpsrlq $30,%xmm8,%xmm8 + vpand %xmm15,%xmm7,%xmm7 + vpand %xmm15,%xmm8,%xmm8 + vpor 32(%rcx),%xmm9,%xmm9 + + vpsrlq $26,%xmm3,%xmm13 + vpand %xmm15,%xmm3,%xmm3 + vpaddq %xmm13,%xmm4,%xmm4 + + vpsrlq $26,%xmm0,%xmm10 + vpand %xmm15,%xmm0,%xmm0 + vpaddq %xmm10,%xmm11,%xmm1 + + vpsrlq $26,%xmm4,%xmm10 + vpand %xmm15,%xmm4,%xmm4 + + vpsrlq $26,%xmm1,%xmm11 + vpand %xmm15,%xmm1,%xmm1 + vpaddq %xmm11,%xmm2,%xmm2 + + vpaddq %xmm10,%xmm0,%xmm0 + vpsllq $2,%xmm10,%xmm10 + vpaddq %xmm10,%xmm0,%xmm0 + + vpsrlq $26,%xmm2,%xmm12 + vpand %xmm15,%xmm2,%xmm2 + vpaddq %xmm12,%xmm3,%xmm3 + + vpsrlq $26,%xmm0,%xmm10 + vpand %xmm15,%xmm0,%xmm0 + vpaddq %xmm10,%xmm1,%xmm1 + + vpsrlq $26,%xmm3,%xmm13 + vpand %xmm15,%xmm3,%xmm3 + vpaddq %xmm13,%xmm4,%xmm4 + + ja .Loop_avx + +.Lskip_loop_avx: + vpshufd $0x10,%xmm14,%xmm14 + addq $32,%rdx + jnz .Long_tail_avx + + vpaddq %xmm2,%xmm7,%xmm7 + vpaddq %xmm0,%xmm5,%xmm5 + vpaddq %xmm1,%xmm6,%xmm6 + vpaddq %xmm3,%xmm8,%xmm8 + vpaddq %xmm4,%xmm9,%xmm9 + +.Long_tail_avx: + vmovdqa %xmm2,32(%r11) + vmovdqa %xmm0,0(%r11) + vmovdqa %xmm1,16(%r11) + vmovdqa %xmm3,48(%r11) + vmovdqa %xmm4,64(%r11) + + vpmuludq %xmm7,%xmm14,%xmm12 + vpmuludq %xmm5,%xmm14,%xmm10 + vpshufd $0x10,-48(%rdi),%xmm2 + vpmuludq %xmm6,%xmm14,%xmm11 + vpmuludq %xmm8,%xmm14,%xmm13 + vpmuludq %xmm9,%xmm14,%xmm14 + + vpmuludq %xmm8,%xmm2,%xmm0 + vpaddq %xmm0,%xmm14,%xmm14 + vpshufd $0x10,-32(%rdi),%xmm3 + vpmuludq %xmm7,%xmm2,%xmm1 + vpaddq %xmm1,%xmm13,%xmm13 + vpshufd $0x10,-16(%rdi),%xmm4 + vpmuludq %xmm6,%xmm2,%xmm0 + vpaddq %xmm0,%xmm12,%xmm12 + vpmuludq %xmm5,%xmm2,%xmm2 + vpaddq %xmm2,%xmm11,%xmm11 + vpmuludq %xmm9,%xmm3,%xmm3 + vpaddq %xmm3,%xmm10,%xmm10 + + vpshufd $0x10,0(%rdi),%xmm2 + vpmuludq %xmm7,%xmm4,%xmm1 + vpaddq %xmm1,%xmm14,%xmm14 + vpmuludq %xmm6,%xmm4,%xmm0 + vpaddq %xmm0,%xmm13,%xmm13 + vpshufd $0x10,16(%rdi),%xmm3 + vpmuludq %xmm5,%xmm4,%xmm4 + vpaddq %xmm4,%xmm12,%xmm12 + vpmuludq %xmm9,%xmm2,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpshufd $0x10,32(%rdi),%xmm4 + vpmuludq %xmm8,%xmm2,%xmm2 + vpaddq %xmm2,%xmm10,%xmm10 + + vpmuludq %xmm6,%xmm3,%xmm0 + vpaddq %xmm0,%xmm14,%xmm14 + vpmuludq %xmm5,%xmm3,%xmm3 + vpaddq %xmm3,%xmm13,%xmm13 + vpshufd $0x10,48(%rdi),%xmm2 + vpmuludq %xmm9,%xmm4,%xmm1 + vpaddq %xmm1,%xmm12,%xmm12 + vpshufd $0x10,64(%rdi),%xmm3 + vpmuludq %xmm8,%xmm4,%xmm0 + vpaddq %xmm0,%xmm11,%xmm11 + vpmuludq %xmm7,%xmm4,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + + vpmuludq %xmm5,%xmm2,%xmm2 + vpaddq %xmm2,%xmm14,%xmm14 + vpmuludq %xmm9,%xmm3,%xmm1 + vpaddq %xmm1,%xmm13,%xmm13 + vpmuludq %xmm8,%xmm3,%xmm0 + vpaddq %xmm0,%xmm12,%xmm12 + vpmuludq %xmm7,%xmm3,%xmm1 + vpaddq %xmm1,%xmm11,%xmm11 + vpmuludq %xmm6,%xmm3,%xmm3 + vpaddq %xmm3,%xmm10,%xmm10 + + jz .Lshort_tail_avx + + vmovdqu 0(%rsi),%xmm0 + vmovdqu 16(%rsi),%xmm1 + + vpsrldq $6,%xmm0,%xmm2 + vpsrldq $6,%xmm1,%xmm3 + vpunpckhqdq %xmm1,%xmm0,%xmm4 + vpunpcklqdq %xmm1,%xmm0,%xmm0 + vpunpcklqdq %xmm3,%xmm2,%xmm3 + + vpsrlq $40,%xmm4,%xmm4 + vpsrlq $26,%xmm0,%xmm1 + vpand %xmm15,%xmm0,%xmm0 + vpsrlq $4,%xmm3,%xmm2 + vpand %xmm15,%xmm1,%xmm1 + vpsrlq $30,%xmm3,%xmm3 + vpand %xmm15,%xmm2,%xmm2 + vpand %xmm15,%xmm3,%xmm3 + vpor 32(%rcx),%xmm4,%xmm4 + + vpshufd $0x32,-64(%rdi),%xmm9 + vpaddq 0(%r11),%xmm0,%xmm0 + vpaddq 16(%r11),%xmm1,%xmm1 + vpaddq 32(%r11),%xmm2,%xmm2 + vpaddq 48(%r11),%xmm3,%xmm3 + vpaddq 64(%r11),%xmm4,%xmm4 + + vpmuludq %xmm0,%xmm9,%xmm5 + vpaddq %xmm5,%xmm10,%xmm10 + vpmuludq %xmm1,%xmm9,%xmm6 + vpaddq %xmm6,%xmm11,%xmm11 + vpmuludq %xmm2,%xmm9,%xmm5 + vpaddq %xmm5,%xmm12,%xmm12 + vpshufd $0x32,-48(%rdi),%xmm7 + vpmuludq %xmm3,%xmm9,%xmm6 + vpaddq %xmm6,%xmm13,%xmm13 + vpmuludq %xmm4,%xmm9,%xmm9 + vpaddq %xmm9,%xmm14,%xmm14 + + vpmuludq %xmm3,%xmm7,%xmm5 + vpaddq %xmm5,%xmm14,%xmm14 + vpshufd $0x32,-32(%rdi),%xmm8 + vpmuludq %xmm2,%xmm7,%xmm6 + vpaddq %xmm6,%xmm13,%xmm13 + vpshufd $0x32,-16(%rdi),%xmm9 + vpmuludq %xmm1,%xmm7,%xmm5 + vpaddq %xmm5,%xmm12,%xmm12 + vpmuludq %xmm0,%xmm7,%xmm7 + vpaddq %xmm7,%xmm11,%xmm11 + vpmuludq %xmm4,%xmm8,%xmm8 + vpaddq %xmm8,%xmm10,%xmm10 + + vpshufd $0x32,0(%rdi),%xmm7 + vpmuludq %xmm2,%xmm9,%xmm6 + vpaddq %xmm6,%xmm14,%xmm14 + vpmuludq %xmm1,%xmm9,%xmm5 + vpaddq %xmm5,%xmm13,%xmm13 + vpshufd $0x32,16(%rdi),%xmm8 + vpmuludq %xmm0,%xmm9,%xmm9 + vpaddq %xmm9,%xmm12,%xmm12 + vpmuludq %xmm4,%xmm7,%xmm6 + vpaddq %xmm6,%xmm11,%xmm11 + vpshufd $0x32,32(%rdi),%xmm9 + vpmuludq %xmm3,%xmm7,%xmm7 + vpaddq %xmm7,%xmm10,%xmm10 + + vpmuludq %xmm1,%xmm8,%xmm5 + vpaddq %xmm5,%xmm14,%xmm14 + vpmuludq %xmm0,%xmm8,%xmm8 + vpaddq %xmm8,%xmm13,%xmm13 + vpshufd $0x32,48(%rdi),%xmm7 + vpmuludq %xmm4,%xmm9,%xmm6 + vpaddq %xmm6,%xmm12,%xmm12 + vpshufd $0x32,64(%rdi),%xmm8 + vpmuludq %xmm3,%xmm9,%xmm5 + vpaddq %xmm5,%xmm11,%xmm11 + vpmuludq %xmm2,%xmm9,%xmm9 + vpaddq %xmm9,%xmm10,%xmm10 + + vpmuludq %xmm0,%xmm7,%xmm7 + vpaddq %xmm7,%xmm14,%xmm14 + vpmuludq %xmm4,%xmm8,%xmm6 + vpaddq %xmm6,%xmm13,%xmm13 + vpmuludq %xmm3,%xmm8,%xmm5 + vpaddq %xmm5,%xmm12,%xmm12 + vpmuludq %xmm2,%xmm8,%xmm6 + vpaddq %xmm6,%xmm11,%xmm11 + vpmuludq %xmm1,%xmm8,%xmm8 + vpaddq %xmm8,%xmm10,%xmm10 + +.Lshort_tail_avx: + + vpsrldq $8,%xmm14,%xmm9 + vpsrldq $8,%xmm13,%xmm8 + vpsrldq $8,%xmm11,%xmm6 + vpsrldq $8,%xmm10,%xmm5 + vpsrldq $8,%xmm12,%xmm7 + vpaddq %xmm8,%xmm13,%xmm13 + vpaddq %xmm9,%xmm14,%xmm14 + vpaddq %xmm5,%xmm10,%xmm10 + vpaddq %xmm6,%xmm11,%xmm11 + vpaddq %xmm7,%xmm12,%xmm12 + + vpsrlq $26,%xmm13,%xmm3 + vpand %xmm15,%xmm13,%xmm13 + vpaddq %xmm3,%xmm14,%xmm14 + + vpsrlq $26,%xmm10,%xmm0 + vpand %xmm15,%xmm10,%xmm10 + vpaddq %xmm0,%xmm11,%xmm11 + + vpsrlq $26,%xmm14,%xmm4 + vpand %xmm15,%xmm14,%xmm14 + + vpsrlq $26,%xmm11,%xmm1 + vpand %xmm15,%xmm11,%xmm11 + vpaddq %xmm1,%xmm12,%xmm12 + + vpaddq %xmm4,%xmm10,%xmm10 + vpsllq $2,%xmm4,%xmm4 + vpaddq %xmm4,%xmm10,%xmm10 + + vpsrlq $26,%xmm12,%xmm2 + vpand %xmm15,%xmm12,%xmm12 + vpaddq %xmm2,%xmm13,%xmm13 + + vpsrlq $26,%xmm10,%xmm0 + vpand %xmm15,%xmm10,%xmm10 + vpaddq %xmm0,%xmm11,%xmm11 + + vpsrlq $26,%xmm13,%xmm3 + vpand %xmm15,%xmm13,%xmm13 + vpaddq %xmm3,%xmm14,%xmm14 + + vmovd %xmm10,-112(%rdi) + vmovd %xmm11,-108(%rdi) + vmovd %xmm12,-104(%rdi) + vmovd %xmm13,-100(%rdi) + vmovd %xmm14,-96(%rdi) + leaq -8(%r10),%rsp + + vzeroupper + ret +ENDPROC(poly1305_blocks_avx) + +.align 32 +ENTRY(poly1305_emit_avx) + cmpl $0,20(%rdi) + je .Lemit + + movl 0(%rdi),%eax + movl 4(%rdi),%ecx + movl 8(%rdi),%r8d + movl 12(%rdi),%r11d + movl 16(%rdi),%r10d + + shlq $26,%rcx + movq %r8,%r9 + shlq $52,%r8 + addq %rcx,%rax + shrq $12,%r9 + addq %rax,%r8 + adcq $0,%r9 + + shlq $14,%r11 + movq %r10,%rax + shrq $24,%r10 + addq %r11,%r9 + shlq $40,%rax + addq %rax,%r9 + adcq $0,%r10 + + movq %r10,%rax + movq %r10,%rcx + andq $3,%r10 + shrq $2,%rax + andq $-4,%rcx + addq %rcx,%rax + addq %rax,%r8 + adcq $0,%r9 + adcq $0,%r10 + + movq %r8,%rax + addq $5,%r8 + movq %r9,%rcx + adcq $0,%r9 + adcq $0,%r10 + shrq $2,%r10 + cmovnzq %r8,%rax + cmovnzq %r9,%rcx + + addq 0(%rdx),%rax + adcq 8(%rdx),%rcx + movq %rax,0(%rsi) + movq %rcx,8(%rsi) + + ret +ENDPROC(poly1305_emit_avx) +#endif /* CONFIG_AS_AVX */ + +#ifdef CONFIG_AS_AVX2 +.align 32 +ENTRY(poly1305_blocks_avx2) + + movl 20(%rdi),%r8d + cmpq $128,%rdx + jae .Lblocks_avx2 + testl %r8d,%r8d + jz .Lblocks + +.Lblocks_avx2: + andq $-16,%rdx + jz .Lno_data_avx2 + + vzeroupper + + testl %r8d,%r8d + jz .Lbase2_64_avx2 + + testq $63,%rdx + jz .Leven_avx2 + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lblocks_avx2_body: + + movq %rdx,%r15 + + movq 0(%rdi),%r8 + movq 8(%rdi),%r9 + movl 16(%rdi),%ebp + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + + movl %r8d,%r14d + andq $-2147483648,%r8 + movq %r9,%r12 + movl %r9d,%ebx + andq $-2147483648,%r9 + + shrq $6,%r8 + shlq $52,%r12 + addq %r8,%r14 + shrq $12,%rbx + shrq $18,%r9 + addq %r12,%r14 + adcq %r9,%rbx + + movq %rbp,%r8 + shlq $40,%r8 + shrq $24,%rbp + addq %r8,%rbx + adcq $0,%rbp + + movq $-4,%r9 + movq %rbp,%r8 + andq %rbp,%r9 + shrq $2,%r8 + andq $3,%rbp + addq %r9,%r8 + addq %r8,%r14 + adcq $0,%rbx + adcq $0,%rbp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + +.Lbase2_26_pre_avx2: + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + subq $16,%r15 + + __poly1305_block + movq %r12,%rax + + testq $63,%r15 + jnz .Lbase2_26_pre_avx2 + + testq %rcx,%rcx + jz .Lstore_base2_64_avx2 + + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r11 + movq %rbx,%r12 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r11 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r11,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r12 + andq $0x3ffffff,%rbx + orq %r12,%rbp + + testq %r15,%r15 + jz .Lstore_base2_26_avx2 + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + jmp .Lproceed_avx2 + +.align 32 +.Lstore_base2_64_avx2: + movq %r14,0(%rdi) + movq %rbx,8(%rdi) + movq %rbp,16(%rdi) + jmp .Ldone_avx2 + +.align 16 +.Lstore_base2_26_avx2: + movl %eax,0(%rdi) + movl %edx,4(%rdi) + movl %r14d,8(%rdi) + movl %ebx,12(%rdi) + movl %ebp,16(%rdi) +.align 16 +.Ldone_avx2: + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rsp + +.Lno_data_avx2: +.Lblocks_avx2_epilogue: + ret + + +.align 32 +.Lbase2_64_avx2: + + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lbase2_64_avx2_body: + + movq %rdx,%r15 + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + movq 0(%rdi),%r14 + movq 8(%rdi),%rbx + movl 16(%rdi),%ebp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + + testq $63,%rdx + jz .Linit_avx2 + +.Lbase2_64_pre_avx2: + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + subq $16,%r15 + + __poly1305_block + movq %r12,%rax + + testq $63,%r15 + jnz .Lbase2_64_pre_avx2 + +.Linit_avx2: + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r8 + movq %rbx,%r9 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r8 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r8,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r9 + andq $0x3ffffff,%rbx + orq %r9,%rbp + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + movl $1,20(%rdi) + + __poly1305_init_avx + +.Lproceed_avx2: + movq %r15,%rdx + + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rax + leaq 48(%rsp),%rsp + +.Lbase2_64_avx2_epilogue: + jmp .Ldo_avx2 + + +.align 32 +.Leven_avx2: + + vmovd 0(%rdi),%xmm0 + vmovd 4(%rdi),%xmm1 + vmovd 8(%rdi),%xmm2 + vmovd 12(%rdi),%xmm3 + vmovd 16(%rdi),%xmm4 + +.Ldo_avx2: + leaq 8(%rsp),%r10 + subq $0x128,%rsp + leaq .Lconst(%rip),%rcx + leaq 48+64(%rdi),%rdi + vmovdqa 96(%rcx),%ymm7 + + + vmovdqu -64(%rdi),%xmm9 + andq $-512,%rsp + vmovdqu -48(%rdi),%xmm10 + vmovdqu -32(%rdi),%xmm6 + vmovdqu -16(%rdi),%xmm11 + vmovdqu 0(%rdi),%xmm12 + vmovdqu 16(%rdi),%xmm13 + leaq 144(%rsp),%rax + vmovdqu 32(%rdi),%xmm14 + vpermd %ymm9,%ymm7,%ymm9 + vmovdqu 48(%rdi),%xmm15 + vpermd %ymm10,%ymm7,%ymm10 + vmovdqu 64(%rdi),%xmm5 + vpermd %ymm6,%ymm7,%ymm6 + vmovdqa %ymm9,0(%rsp) + vpermd %ymm11,%ymm7,%ymm11 + vmovdqa %ymm10,32-144(%rax) + vpermd %ymm12,%ymm7,%ymm12 + vmovdqa %ymm6,64-144(%rax) + vpermd %ymm13,%ymm7,%ymm13 + vmovdqa %ymm11,96-144(%rax) + vpermd %ymm14,%ymm7,%ymm14 + vmovdqa %ymm12,128-144(%rax) + vpermd %ymm15,%ymm7,%ymm15 + vmovdqa %ymm13,160-144(%rax) + vpermd %ymm5,%ymm7,%ymm5 + vmovdqa %ymm14,192-144(%rax) + vmovdqa %ymm15,224-144(%rax) + vmovdqa %ymm5,256-144(%rax) + vmovdqa 64(%rcx),%ymm5 + + + + vmovdqu 0(%rsi),%xmm7 + vmovdqu 16(%rsi),%xmm8 + vinserti128 $1,32(%rsi),%ymm7,%ymm7 + vinserti128 $1,48(%rsi),%ymm8,%ymm8 + leaq 64(%rsi),%rsi + + vpsrldq $6,%ymm7,%ymm9 + vpsrldq $6,%ymm8,%ymm10 + vpunpckhqdq %ymm8,%ymm7,%ymm6 + vpunpcklqdq %ymm10,%ymm9,%ymm9 + vpunpcklqdq %ymm8,%ymm7,%ymm7 + + vpsrlq $30,%ymm9,%ymm10 + vpsrlq $4,%ymm9,%ymm9 + vpsrlq $26,%ymm7,%ymm8 + vpsrlq $40,%ymm6,%ymm6 + vpand %ymm5,%ymm9,%ymm9 + vpand %ymm5,%ymm7,%ymm7 + vpand %ymm5,%ymm8,%ymm8 + vpand %ymm5,%ymm10,%ymm10 + vpor 32(%rcx),%ymm6,%ymm6 + + vpaddq %ymm2,%ymm9,%ymm2 + subq $64,%rdx + jz .Ltail_avx2 + jmp .Loop_avx2 + +.align 32 +.Loop_avx2: + + vpaddq %ymm0,%ymm7,%ymm0 + vmovdqa 0(%rsp),%ymm7 + vpaddq %ymm1,%ymm8,%ymm1 + vmovdqa 32(%rsp),%ymm8 + vpaddq %ymm3,%ymm10,%ymm3 + vmovdqa 96(%rsp),%ymm9 + vpaddq %ymm4,%ymm6,%ymm4 + vmovdqa 48(%rax),%ymm10 + vmovdqa 112(%rax),%ymm5 + + vpmuludq %ymm2,%ymm7,%ymm13 + vpmuludq %ymm2,%ymm8,%ymm14 + vpmuludq %ymm2,%ymm9,%ymm15 + vpmuludq %ymm2,%ymm10,%ymm11 + vpmuludq %ymm2,%ymm5,%ymm12 + + vpmuludq %ymm0,%ymm8,%ymm6 + vpmuludq %ymm1,%ymm8,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq 64(%rsp),%ymm4,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm11,%ymm11 + vmovdqa -16(%rax),%ymm8 + + vpmuludq %ymm0,%ymm7,%ymm6 + vpmuludq %ymm1,%ymm7,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vpmuludq %ymm3,%ymm7,%ymm6 + vpmuludq %ymm4,%ymm7,%ymm2 + vmovdqu 0(%rsi),%xmm7 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm2,%ymm15,%ymm15 + vinserti128 $1,32(%rsi),%ymm7,%ymm7 + + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq %ymm4,%ymm8,%ymm2 + vmovdqu 16(%rsi),%xmm8 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vmovdqa 16(%rax),%ymm2 + vpmuludq %ymm1,%ymm9,%ymm6 + vpmuludq %ymm0,%ymm9,%ymm9 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm9,%ymm13,%ymm13 + vinserti128 $1,48(%rsi),%ymm8,%ymm8 + leaq 64(%rsi),%rsi + + vpmuludq %ymm1,%ymm2,%ymm6 + vpmuludq %ymm0,%ymm2,%ymm2 + vpsrldq $6,%ymm7,%ymm9 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm14,%ymm14 + vpmuludq %ymm3,%ymm10,%ymm6 + vpmuludq %ymm4,%ymm10,%ymm2 + vpsrldq $6,%ymm8,%ymm10 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpunpckhqdq %ymm8,%ymm7,%ymm6 + + vpmuludq %ymm3,%ymm5,%ymm3 + vpmuludq %ymm4,%ymm5,%ymm4 + vpunpcklqdq %ymm8,%ymm7,%ymm7 + vpaddq %ymm3,%ymm13,%ymm2 + vpaddq %ymm4,%ymm14,%ymm3 + vpunpcklqdq %ymm10,%ymm9,%ymm10 + vpmuludq 80(%rax),%ymm0,%ymm4 + vpmuludq %ymm1,%ymm5,%ymm0 + vmovdqa 64(%rcx),%ymm5 + vpaddq %ymm4,%ymm15,%ymm4 + vpaddq %ymm0,%ymm11,%ymm0 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm12,%ymm1 + + vpsrlq $26,%ymm4,%ymm15 + vpand %ymm5,%ymm4,%ymm4 + + vpsrlq $4,%ymm10,%ymm9 + + vpsrlq $26,%ymm1,%ymm12 + vpand %ymm5,%ymm1,%ymm1 + vpaddq %ymm12,%ymm2,%ymm2 + + vpaddq %ymm15,%ymm0,%ymm0 + vpsllq $2,%ymm15,%ymm15 + vpaddq %ymm15,%ymm0,%ymm0 + + vpand %ymm5,%ymm9,%ymm9 + vpsrlq $26,%ymm7,%ymm8 + + vpsrlq $26,%ymm2,%ymm13 + vpand %ymm5,%ymm2,%ymm2 + vpaddq %ymm13,%ymm3,%ymm3 + + vpaddq %ymm9,%ymm2,%ymm2 + vpsrlq $30,%ymm10,%ymm10 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $40,%ymm6,%ymm6 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpand %ymm5,%ymm7,%ymm7 + vpand %ymm5,%ymm8,%ymm8 + vpand %ymm5,%ymm10,%ymm10 + vpor 32(%rcx),%ymm6,%ymm6 + + subq $64,%rdx + jnz .Loop_avx2 + +.byte 0x66,0x90 +.Ltail_avx2: + + vpaddq %ymm0,%ymm7,%ymm0 + vmovdqu 4(%rsp),%ymm7 + vpaddq %ymm1,%ymm8,%ymm1 + vmovdqu 36(%rsp),%ymm8 + vpaddq %ymm3,%ymm10,%ymm3 + vmovdqu 100(%rsp),%ymm9 + vpaddq %ymm4,%ymm6,%ymm4 + vmovdqu 52(%rax),%ymm10 + vmovdqu 116(%rax),%ymm5 + + vpmuludq %ymm2,%ymm7,%ymm13 + vpmuludq %ymm2,%ymm8,%ymm14 + vpmuludq %ymm2,%ymm9,%ymm15 + vpmuludq %ymm2,%ymm10,%ymm11 + vpmuludq %ymm2,%ymm5,%ymm12 + + vpmuludq %ymm0,%ymm8,%ymm6 + vpmuludq %ymm1,%ymm8,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq 68(%rsp),%ymm4,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm11,%ymm11 + + vpmuludq %ymm0,%ymm7,%ymm6 + vpmuludq %ymm1,%ymm7,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vmovdqu -12(%rax),%ymm8 + vpaddq %ymm2,%ymm12,%ymm12 + vpmuludq %ymm3,%ymm7,%ymm6 + vpmuludq %ymm4,%ymm7,%ymm2 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm2,%ymm15,%ymm15 + + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq %ymm4,%ymm8,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vmovdqu 20(%rax),%ymm2 + vpmuludq %ymm1,%ymm9,%ymm6 + vpmuludq %ymm0,%ymm9,%ymm9 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm9,%ymm13,%ymm13 + + vpmuludq %ymm1,%ymm2,%ymm6 + vpmuludq %ymm0,%ymm2,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm14,%ymm14 + vpmuludq %ymm3,%ymm10,%ymm6 + vpmuludq %ymm4,%ymm10,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + + vpmuludq %ymm3,%ymm5,%ymm3 + vpmuludq %ymm4,%ymm5,%ymm4 + vpaddq %ymm3,%ymm13,%ymm2 + vpaddq %ymm4,%ymm14,%ymm3 + vpmuludq 84(%rax),%ymm0,%ymm4 + vpmuludq %ymm1,%ymm5,%ymm0 + vmovdqa 64(%rcx),%ymm5 + vpaddq %ymm4,%ymm15,%ymm4 + vpaddq %ymm0,%ymm11,%ymm0 + + vpsrldq $8,%ymm12,%ymm8 + vpsrldq $8,%ymm2,%ymm9 + vpsrldq $8,%ymm3,%ymm10 + vpsrldq $8,%ymm4,%ymm6 + vpsrldq $8,%ymm0,%ymm7 + vpaddq %ymm8,%ymm12,%ymm12 + vpaddq %ymm9,%ymm2,%ymm2 + vpaddq %ymm10,%ymm3,%ymm3 + vpaddq %ymm6,%ymm4,%ymm4 + vpaddq %ymm7,%ymm0,%ymm0 + + vpermq $0x2,%ymm3,%ymm10 + vpermq $0x2,%ymm4,%ymm6 + vpermq $0x2,%ymm0,%ymm7 + vpermq $0x2,%ymm12,%ymm8 + vpermq $0x2,%ymm2,%ymm9 + vpaddq %ymm10,%ymm3,%ymm3 + vpaddq %ymm6,%ymm4,%ymm4 + vpaddq %ymm7,%ymm0,%ymm0 + vpaddq %ymm8,%ymm12,%ymm12 + vpaddq %ymm9,%ymm2,%ymm2 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm12,%ymm1 + + vpsrlq $26,%ymm4,%ymm15 + vpand %ymm5,%ymm4,%ymm4 + + vpsrlq $26,%ymm1,%ymm12 + vpand %ymm5,%ymm1,%ymm1 + vpaddq %ymm12,%ymm2,%ymm2 + + vpaddq %ymm15,%ymm0,%ymm0 + vpsllq $2,%ymm15,%ymm15 + vpaddq %ymm15,%ymm0,%ymm0 + + vpsrlq $26,%ymm2,%ymm13 + vpand %ymm5,%ymm2,%ymm2 + vpaddq %ymm13,%ymm3,%ymm3 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vmovd %xmm0,-112(%rdi) + vmovd %xmm1,-108(%rdi) + vmovd %xmm2,-104(%rdi) + vmovd %xmm3,-100(%rdi) + vmovd %xmm4,-96(%rdi) + leaq -8(%r10),%rsp + + vzeroupper + ret + +ENDPROC(poly1305_blocks_avx2) +#endif /* CONFIG_AS_AVX2 */ + +#ifdef CONFIG_AS_AVX512 +.align 32 +ENTRY(poly1305_blocks_avx512) + + movl 20(%rdi),%r8d + cmpq $128,%rdx + jae .Lblocks_avx2_512 + testl %r8d,%r8d + jz .Lblocks + +.Lblocks_avx2_512: + andq $-16,%rdx + jz .Lno_data_avx2_512 + + vzeroupper + + testl %r8d,%r8d + jz .Lbase2_64_avx2_512 + + testq $63,%rdx + jz .Leven_avx2_512 + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lblocks_avx2_body_512: + + movq %rdx,%r15 + + movq 0(%rdi),%r8 + movq 8(%rdi),%r9 + movl 16(%rdi),%ebp + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + + movl %r8d,%r14d + andq $-2147483648,%r8 + movq %r9,%r12 + movl %r9d,%ebx + andq $-2147483648,%r9 + + shrq $6,%r8 + shlq $52,%r12 + addq %r8,%r14 + shrq $12,%rbx + shrq $18,%r9 + addq %r12,%r14 + adcq %r9,%rbx + + movq %rbp,%r8 + shlq $40,%r8 + shrq $24,%rbp + addq %r8,%rbx + adcq $0,%rbp + + movq $-4,%r9 + movq %rbp,%r8 + andq %rbp,%r9 + shrq $2,%r8 + andq $3,%rbp + addq %r9,%r8 + addq %r8,%r14 + adcq $0,%rbx + adcq $0,%rbp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + +.Lbase2_26_pre_avx2_512: + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + subq $16,%r15 + + __poly1305_block + movq %r12,%rax + + testq $63,%r15 + jnz .Lbase2_26_pre_avx2_512 + + testq %rcx,%rcx + jz .Lstore_base2_64_avx2_512 + + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r11 + movq %rbx,%r12 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r11 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r11,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r12 + andq $0x3ffffff,%rbx + orq %r12,%rbp + + testq %r15,%r15 + jz .Lstore_base2_26_avx2_512 + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + jmp .Lproceed_avx2_512 + +.align 32 +.Lstore_base2_64_avx2_512: + movq %r14,0(%rdi) + movq %rbx,8(%rdi) + movq %rbp,16(%rdi) + jmp .Ldone_avx2_512 + +.align 16 +.Lstore_base2_26_avx2_512: + movl %eax,0(%rdi) + movl %edx,4(%rdi) + movl %r14d,8(%rdi) + movl %ebx,12(%rdi) + movl %ebp,16(%rdi) +.align 16 +.Ldone_avx2_512: + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rsp + +.Lno_data_avx2_512: +.Lblocks_avx2_epilogue_512: + ret + + +.align 32 +.Lbase2_64_avx2_512: + + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + +.Lbase2_64_avx2_body_512: + + movq %rdx,%r15 + + movq 24(%rdi),%r11 + movq 32(%rdi),%r13 + + movq 0(%rdi),%r14 + movq 8(%rdi),%rbx + movl 16(%rdi),%ebp + + movq %r13,%r12 + movq %r13,%rax + shrq $2,%r13 + addq %r12,%r13 + + testq $63,%rdx + jz .Linit_avx2_512 + +.Lbase2_64_pre_avx2_512: + addq 0(%rsi),%r14 + adcq 8(%rsi),%rbx + leaq 16(%rsi),%rsi + adcq %rcx,%rbp + subq $16,%r15 + + __poly1305_block + movq %r12,%rax + + testq $63,%r15 + jnz .Lbase2_64_pre_avx2_512 + +.Linit_avx2_512: + + movq %r14,%rax + movq %r14,%rdx + shrq $52,%r14 + movq %rbx,%r8 + movq %rbx,%r9 + shrq $26,%rdx + andq $0x3ffffff,%rax + shlq $12,%r8 + andq $0x3ffffff,%rdx + shrq $14,%rbx + orq %r8,%r14 + shlq $24,%rbp + andq $0x3ffffff,%r14 + shrq $40,%r9 + andq $0x3ffffff,%rbx + orq %r9,%rbp + + vmovd %eax,%xmm0 + vmovd %edx,%xmm1 + vmovd %r14d,%xmm2 + vmovd %ebx,%xmm3 + vmovd %ebp,%xmm4 + movl $1,20(%rdi) + + __poly1305_init_avx + +.Lproceed_avx2_512: + movq %r15,%rdx + + movq 0(%rsp),%r15 + movq 8(%rsp),%r14 + movq 16(%rsp),%r13 + movq 24(%rsp),%r12 + movq 32(%rsp),%rbp + movq 40(%rsp),%rbx + leaq 48(%rsp),%rax + leaq 48(%rsp),%rsp + +.Lbase2_64_avx2_epilogue_512: + jmp .Ldo_avx2_512 + + +.align 32 +.Leven_avx2_512: + + vmovd 0(%rdi),%xmm0 + vmovd 4(%rdi),%xmm1 + vmovd 8(%rdi),%xmm2 + vmovd 12(%rdi),%xmm3 + vmovd 16(%rdi),%xmm4 + +.Ldo_avx2_512: + cmpq $512,%rdx + jae .Lblocks_avx512 +.Lskip_avx512: + leaq 8(%rsp),%r10 + + subq $0x128,%rsp + leaq .Lconst(%rip),%rcx + leaq 48+64(%rdi),%rdi + vmovdqa 96(%rcx),%ymm7 + + + vmovdqu -64(%rdi),%xmm9 + andq $-512,%rsp + vmovdqu -48(%rdi),%xmm10 + vmovdqu -32(%rdi),%xmm6 + vmovdqu -16(%rdi),%xmm11 + vmovdqu 0(%rdi),%xmm12 + vmovdqu 16(%rdi),%xmm13 + leaq 144(%rsp),%rax + vmovdqu 32(%rdi),%xmm14 + vpermd %ymm9,%ymm7,%ymm9 + vmovdqu 48(%rdi),%xmm15 + vpermd %ymm10,%ymm7,%ymm10 + vmovdqu 64(%rdi),%xmm5 + vpermd %ymm6,%ymm7,%ymm6 + vmovdqa %ymm9,0(%rsp) + vpermd %ymm11,%ymm7,%ymm11 + vmovdqa %ymm10,32-144(%rax) + vpermd %ymm12,%ymm7,%ymm12 + vmovdqa %ymm6,64-144(%rax) + vpermd %ymm13,%ymm7,%ymm13 + vmovdqa %ymm11,96-144(%rax) + vpermd %ymm14,%ymm7,%ymm14 + vmovdqa %ymm12,128-144(%rax) + vpermd %ymm15,%ymm7,%ymm15 + vmovdqa %ymm13,160-144(%rax) + vpermd %ymm5,%ymm7,%ymm5 + vmovdqa %ymm14,192-144(%rax) + vmovdqa %ymm15,224-144(%rax) + vmovdqa %ymm5,256-144(%rax) + vmovdqa 64(%rcx),%ymm5 + + + + vmovdqu 0(%rsi),%xmm7 + vmovdqu 16(%rsi),%xmm8 + vinserti128 $1,32(%rsi),%ymm7,%ymm7 + vinserti128 $1,48(%rsi),%ymm8,%ymm8 + leaq 64(%rsi),%rsi + + vpsrldq $6,%ymm7,%ymm9 + vpsrldq $6,%ymm8,%ymm10 + vpunpckhqdq %ymm8,%ymm7,%ymm6 + vpunpcklqdq %ymm10,%ymm9,%ymm9 + vpunpcklqdq %ymm8,%ymm7,%ymm7 + + vpsrlq $30,%ymm9,%ymm10 + vpsrlq $4,%ymm9,%ymm9 + vpsrlq $26,%ymm7,%ymm8 + vpsrlq $40,%ymm6,%ymm6 + vpand %ymm5,%ymm9,%ymm9 + vpand %ymm5,%ymm7,%ymm7 + vpand %ymm5,%ymm8,%ymm8 + vpand %ymm5,%ymm10,%ymm10 + vpor 32(%rcx),%ymm6,%ymm6 + + vpaddq %ymm2,%ymm9,%ymm2 + subq $64,%rdx + jz .Ltail_avx2_512 + jmp .Loop_avx2_512 + +.align 32 +.Loop_avx2_512: + + vpaddq %ymm0,%ymm7,%ymm0 + vmovdqa 0(%rsp),%ymm7 + vpaddq %ymm1,%ymm8,%ymm1 + vmovdqa 32(%rsp),%ymm8 + vpaddq %ymm3,%ymm10,%ymm3 + vmovdqa 96(%rsp),%ymm9 + vpaddq %ymm4,%ymm6,%ymm4 + vmovdqa 48(%rax),%ymm10 + vmovdqa 112(%rax),%ymm5 + + vpmuludq %ymm2,%ymm7,%ymm13 + vpmuludq %ymm2,%ymm8,%ymm14 + vpmuludq %ymm2,%ymm9,%ymm15 + vpmuludq %ymm2,%ymm10,%ymm11 + vpmuludq %ymm2,%ymm5,%ymm12 + + vpmuludq %ymm0,%ymm8,%ymm6 + vpmuludq %ymm1,%ymm8,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq 64(%rsp),%ymm4,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm11,%ymm11 + vmovdqa -16(%rax),%ymm8 + + vpmuludq %ymm0,%ymm7,%ymm6 + vpmuludq %ymm1,%ymm7,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vpmuludq %ymm3,%ymm7,%ymm6 + vpmuludq %ymm4,%ymm7,%ymm2 + vmovdqu 0(%rsi),%xmm7 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm2,%ymm15,%ymm15 + vinserti128 $1,32(%rsi),%ymm7,%ymm7 + + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq %ymm4,%ymm8,%ymm2 + vmovdqu 16(%rsi),%xmm8 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vmovdqa 16(%rax),%ymm2 + vpmuludq %ymm1,%ymm9,%ymm6 + vpmuludq %ymm0,%ymm9,%ymm9 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm9,%ymm13,%ymm13 + vinserti128 $1,48(%rsi),%ymm8,%ymm8 + leaq 64(%rsi),%rsi + + vpmuludq %ymm1,%ymm2,%ymm6 + vpmuludq %ymm0,%ymm2,%ymm2 + vpsrldq $6,%ymm7,%ymm9 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm14,%ymm14 + vpmuludq %ymm3,%ymm10,%ymm6 + vpmuludq %ymm4,%ymm10,%ymm2 + vpsrldq $6,%ymm8,%ymm10 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpunpckhqdq %ymm8,%ymm7,%ymm6 + + vpmuludq %ymm3,%ymm5,%ymm3 + vpmuludq %ymm4,%ymm5,%ymm4 + vpunpcklqdq %ymm8,%ymm7,%ymm7 + vpaddq %ymm3,%ymm13,%ymm2 + vpaddq %ymm4,%ymm14,%ymm3 + vpunpcklqdq %ymm10,%ymm9,%ymm10 + vpmuludq 80(%rax),%ymm0,%ymm4 + vpmuludq %ymm1,%ymm5,%ymm0 + vmovdqa 64(%rcx),%ymm5 + vpaddq %ymm4,%ymm15,%ymm4 + vpaddq %ymm0,%ymm11,%ymm0 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm12,%ymm1 + + vpsrlq $26,%ymm4,%ymm15 + vpand %ymm5,%ymm4,%ymm4 + + vpsrlq $4,%ymm10,%ymm9 + + vpsrlq $26,%ymm1,%ymm12 + vpand %ymm5,%ymm1,%ymm1 + vpaddq %ymm12,%ymm2,%ymm2 + + vpaddq %ymm15,%ymm0,%ymm0 + vpsllq $2,%ymm15,%ymm15 + vpaddq %ymm15,%ymm0,%ymm0 + + vpand %ymm5,%ymm9,%ymm9 + vpsrlq $26,%ymm7,%ymm8 + + vpsrlq $26,%ymm2,%ymm13 + vpand %ymm5,%ymm2,%ymm2 + vpaddq %ymm13,%ymm3,%ymm3 + + vpaddq %ymm9,%ymm2,%ymm2 + vpsrlq $30,%ymm10,%ymm10 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $40,%ymm6,%ymm6 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpand %ymm5,%ymm7,%ymm7 + vpand %ymm5,%ymm8,%ymm8 + vpand %ymm5,%ymm10,%ymm10 + vpor 32(%rcx),%ymm6,%ymm6 + + subq $64,%rdx + jnz .Loop_avx2_512 + +.byte 0x66,0x90 +.Ltail_avx2_512: + + vpaddq %ymm0,%ymm7,%ymm0 + vmovdqu 4(%rsp),%ymm7 + vpaddq %ymm1,%ymm8,%ymm1 + vmovdqu 36(%rsp),%ymm8 + vpaddq %ymm3,%ymm10,%ymm3 + vmovdqu 100(%rsp),%ymm9 + vpaddq %ymm4,%ymm6,%ymm4 + vmovdqu 52(%rax),%ymm10 + vmovdqu 116(%rax),%ymm5 + + vpmuludq %ymm2,%ymm7,%ymm13 + vpmuludq %ymm2,%ymm8,%ymm14 + vpmuludq %ymm2,%ymm9,%ymm15 + vpmuludq %ymm2,%ymm10,%ymm11 + vpmuludq %ymm2,%ymm5,%ymm12 + + vpmuludq %ymm0,%ymm8,%ymm6 + vpmuludq %ymm1,%ymm8,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq 68(%rsp),%ymm4,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm11,%ymm11 + + vpmuludq %ymm0,%ymm7,%ymm6 + vpmuludq %ymm1,%ymm7,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vmovdqu -12(%rax),%ymm8 + vpaddq %ymm2,%ymm12,%ymm12 + vpmuludq %ymm3,%ymm7,%ymm6 + vpmuludq %ymm4,%ymm7,%ymm2 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm2,%ymm15,%ymm15 + + vpmuludq %ymm3,%ymm8,%ymm6 + vpmuludq %ymm4,%ymm8,%ymm2 + vpaddq %ymm6,%ymm11,%ymm11 + vpaddq %ymm2,%ymm12,%ymm12 + vmovdqu 20(%rax),%ymm2 + vpmuludq %ymm1,%ymm9,%ymm6 + vpmuludq %ymm0,%ymm9,%ymm9 + vpaddq %ymm6,%ymm14,%ymm14 + vpaddq %ymm9,%ymm13,%ymm13 + + vpmuludq %ymm1,%ymm2,%ymm6 + vpmuludq %ymm0,%ymm2,%ymm2 + vpaddq %ymm6,%ymm15,%ymm15 + vpaddq %ymm2,%ymm14,%ymm14 + vpmuludq %ymm3,%ymm10,%ymm6 + vpmuludq %ymm4,%ymm10,%ymm2 + vpaddq %ymm6,%ymm12,%ymm12 + vpaddq %ymm2,%ymm13,%ymm13 + + vpmuludq %ymm3,%ymm5,%ymm3 + vpmuludq %ymm4,%ymm5,%ymm4 + vpaddq %ymm3,%ymm13,%ymm2 + vpaddq %ymm4,%ymm14,%ymm3 + vpmuludq 84(%rax),%ymm0,%ymm4 + vpmuludq %ymm1,%ymm5,%ymm0 + vmovdqa 64(%rcx),%ymm5 + vpaddq %ymm4,%ymm15,%ymm4 + vpaddq %ymm0,%ymm11,%ymm0 + + vpsrldq $8,%ymm12,%ymm8 + vpsrldq $8,%ymm2,%ymm9 + vpsrldq $8,%ymm3,%ymm10 + vpsrldq $8,%ymm4,%ymm6 + vpsrldq $8,%ymm0,%ymm7 + vpaddq %ymm8,%ymm12,%ymm12 + vpaddq %ymm9,%ymm2,%ymm2 + vpaddq %ymm10,%ymm3,%ymm3 + vpaddq %ymm6,%ymm4,%ymm4 + vpaddq %ymm7,%ymm0,%ymm0 + + vpermq $0x2,%ymm3,%ymm10 + vpermq $0x2,%ymm4,%ymm6 + vpermq $0x2,%ymm0,%ymm7 + vpermq $0x2,%ymm12,%ymm8 + vpermq $0x2,%ymm2,%ymm9 + vpaddq %ymm10,%ymm3,%ymm3 + vpaddq %ymm6,%ymm4,%ymm4 + vpaddq %ymm7,%ymm0,%ymm0 + vpaddq %ymm8,%ymm12,%ymm12 + vpaddq %ymm9,%ymm2,%ymm2 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm12,%ymm1 + + vpsrlq $26,%ymm4,%ymm15 + vpand %ymm5,%ymm4,%ymm4 + + vpsrlq $26,%ymm1,%ymm12 + vpand %ymm5,%ymm1,%ymm1 + vpaddq %ymm12,%ymm2,%ymm2 + + vpaddq %ymm15,%ymm0,%ymm0 + vpsllq $2,%ymm15,%ymm15 + vpaddq %ymm15,%ymm0,%ymm0 + + vpsrlq $26,%ymm2,%ymm13 + vpand %ymm5,%ymm2,%ymm2 + vpaddq %ymm13,%ymm3,%ymm3 + + vpsrlq $26,%ymm0,%ymm11 + vpand %ymm5,%ymm0,%ymm0 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $26,%ymm3,%ymm14 + vpand %ymm5,%ymm3,%ymm3 + vpaddq %ymm14,%ymm4,%ymm4 + + vmovd %xmm0,-112(%rdi) + vmovd %xmm1,-108(%rdi) + vmovd %xmm2,-104(%rdi) + vmovd %xmm3,-100(%rdi) + vmovd %xmm4,-96(%rdi) + leaq -8(%r10),%rsp + + vzeroupper + ret + +.Lblocks_avx512: + + vzeroupper + leaq 8(%rsp),%r10 + + subq $0x128,%rsp + leaq .Lconst(%rip),%rcx + leaq 48+64(%rdi),%rdi + vmovdqa 96(%rcx),%ymm9 + + + vmovdqu32 -64(%rdi),%xmm16 + andq $-512,%rsp + vmovdqu32 -48(%rdi),%xmm17 + vmovdqu32 -32(%rdi),%xmm21 + vmovdqu32 -16(%rdi),%xmm18 + vmovdqu32 0(%rdi),%xmm22 + vmovdqu32 16(%rdi),%xmm19 + vmovdqu32 32(%rdi),%xmm23 + vmovdqu32 48(%rdi),%xmm20 + vmovdqu32 64(%rdi),%xmm24 + vpermd %ymm16,%ymm9,%ymm16 + vmovdqa64 64(%rcx),%ymm5 + vpermd %ymm17,%ymm9,%ymm17 + vpermd %ymm21,%ymm9,%ymm21 + vpermd %ymm18,%ymm9,%ymm18 + vmovdqa32 %ymm16,0(%rsp) + vpsrlq $32,%ymm16,%ymm7 + vpermd %ymm22,%ymm9,%ymm22 + vmovdqa32 %ymm17,32(%rsp) + vpsrlq $32,%ymm17,%ymm8 + vpermd %ymm19,%ymm9,%ymm19 + vmovdqa32 %ymm21,64(%rsp) + vpermd %ymm23,%ymm9,%ymm23 + vpermd %ymm20,%ymm9,%ymm20 + vmovdqa32 %ymm18,96(%rsp) + vpermd %ymm24,%ymm9,%ymm24 + vmovdqa32 %ymm22,128(%rsp) + vmovdqa32 %ymm19,160(%rsp) + vmovdqa32 %ymm23,192(%rsp) + vmovdqa32 %ymm20,224(%rsp) + vmovdqa32 %ymm24,256(%rsp) + + vpmuludq %ymm7,%ymm16,%ymm11 + vpmuludq %ymm7,%ymm17,%ymm12 + vpmuludq %ymm7,%ymm18,%ymm13 + vpmuludq %ymm7,%ymm19,%ymm14 + vpmuludq %ymm7,%ymm20,%ymm15 + vpsrlq $32,%ymm18,%ymm9 + + vpmuludq %ymm8,%ymm24,%ymm25 + vpmuludq %ymm8,%ymm16,%ymm26 + vpmuludq %ymm8,%ymm17,%ymm27 + vpmuludq %ymm8,%ymm18,%ymm28 + vpmuludq %ymm8,%ymm19,%ymm29 + vpsrlq $32,%ymm19,%ymm10 + vpaddq %ymm25,%ymm11,%ymm11 + vpaddq %ymm26,%ymm12,%ymm12 + vpaddq %ymm27,%ymm13,%ymm13 + vpaddq %ymm28,%ymm14,%ymm14 + vpaddq %ymm29,%ymm15,%ymm15 + + vpmuludq %ymm9,%ymm23,%ymm25 + vpmuludq %ymm9,%ymm24,%ymm26 + vpmuludq %ymm9,%ymm17,%ymm28 + vpmuludq %ymm9,%ymm18,%ymm29 + vpmuludq %ymm9,%ymm16,%ymm27 + vpsrlq $32,%ymm20,%ymm6 + vpaddq %ymm25,%ymm11,%ymm11 + vpaddq %ymm26,%ymm12,%ymm12 + vpaddq %ymm28,%ymm14,%ymm14 + vpaddq %ymm29,%ymm15,%ymm15 + vpaddq %ymm27,%ymm13,%ymm13 + + vpmuludq %ymm10,%ymm22,%ymm25 + vpmuludq %ymm10,%ymm16,%ymm28 + vpmuludq %ymm10,%ymm17,%ymm29 + vpmuludq %ymm10,%ymm23,%ymm26 + vpmuludq %ymm10,%ymm24,%ymm27 + vpaddq %ymm25,%ymm11,%ymm11 + vpaddq %ymm28,%ymm14,%ymm14 + vpaddq %ymm29,%ymm15,%ymm15 + vpaddq %ymm26,%ymm12,%ymm12 + vpaddq %ymm27,%ymm13,%ymm13 + + vpmuludq %ymm6,%ymm24,%ymm28 + vpmuludq %ymm6,%ymm16,%ymm29 + vpmuludq %ymm6,%ymm21,%ymm25 + vpmuludq %ymm6,%ymm22,%ymm26 + vpmuludq %ymm6,%ymm23,%ymm27 + vpaddq %ymm28,%ymm14,%ymm14 + vpaddq %ymm29,%ymm15,%ymm15 + vpaddq %ymm25,%ymm11,%ymm11 + vpaddq %ymm26,%ymm12,%ymm12 + vpaddq %ymm27,%ymm13,%ymm13 + + vmovdqu64 0(%rsi),%zmm10 + vmovdqu64 64(%rsi),%zmm6 + leaq 128(%rsi),%rsi + + vpsrlq $26,%ymm14,%ymm28 + vpandq %ymm5,%ymm14,%ymm14 + vpaddq %ymm28,%ymm15,%ymm15 + + vpsrlq $26,%ymm11,%ymm25 + vpandq %ymm5,%ymm11,%ymm11 + vpaddq %ymm25,%ymm12,%ymm12 + + vpsrlq $26,%ymm15,%ymm29 + vpandq %ymm5,%ymm15,%ymm15 + + vpsrlq $26,%ymm12,%ymm26 + vpandq %ymm5,%ymm12,%ymm12 + vpaddq %ymm26,%ymm13,%ymm13 + + vpaddq %ymm29,%ymm11,%ymm11 + vpsllq $2,%ymm29,%ymm29 + vpaddq %ymm29,%ymm11,%ymm11 + + vpsrlq $26,%ymm13,%ymm27 + vpandq %ymm5,%ymm13,%ymm13 + vpaddq %ymm27,%ymm14,%ymm14 + + vpsrlq $26,%ymm11,%ymm25 + vpandq %ymm5,%ymm11,%ymm11 + vpaddq %ymm25,%ymm12,%ymm12 + + vpsrlq $26,%ymm14,%ymm28 + vpandq %ymm5,%ymm14,%ymm14 + vpaddq %ymm28,%ymm15,%ymm15 + + vpunpcklqdq %zmm6,%zmm10,%zmm7 + vpunpckhqdq %zmm6,%zmm10,%zmm6 + + vmovdqa32 128(%rcx),%zmm25 + movl $0x7777,%eax + kmovw %eax,%k1 + + vpermd %zmm16,%zmm25,%zmm16 + vpermd %zmm17,%zmm25,%zmm17 + vpermd %zmm18,%zmm25,%zmm18 + vpermd %zmm19,%zmm25,%zmm19 + vpermd %zmm20,%zmm25,%zmm20 + + vpermd %zmm11,%zmm25,%zmm16{%k1} + vpermd %zmm12,%zmm25,%zmm17{%k1} + vpermd %zmm13,%zmm25,%zmm18{%k1} + vpermd %zmm14,%zmm25,%zmm19{%k1} + vpermd %zmm15,%zmm25,%zmm20{%k1} + + vpslld $2,%zmm17,%zmm21 + vpslld $2,%zmm18,%zmm22 + vpslld $2,%zmm19,%zmm23 + vpslld $2,%zmm20,%zmm24 + vpaddd %zmm17,%zmm21,%zmm21 + vpaddd %zmm18,%zmm22,%zmm22 + vpaddd %zmm19,%zmm23,%zmm23 + vpaddd %zmm20,%zmm24,%zmm24 + + vpbroadcastq %xmm5,%zmm5 + vpbroadcastq 32(%rcx),%zmm30 + + vpsrlq $52,%zmm7,%zmm9 + vpsllq $12,%zmm6,%zmm10 + vporq %zmm10,%zmm9,%zmm9 + vpsrlq $26,%zmm7,%zmm8 + vpsrlq $14,%zmm6,%zmm10 + vpsrlq $40,%zmm6,%zmm6 + vpandq %zmm5,%zmm9,%zmm9 + vpandq %zmm5,%zmm7,%zmm7 + + vpaddq %zmm2,%zmm9,%zmm2 + subq $192,%rdx + jbe .Ltail_avx512 + +.align 32 +.Loop_avx512: + + vpmuludq %zmm2,%zmm17,%zmm14 + vpaddq %zmm0,%zmm7,%zmm0 + vpmuludq %zmm2,%zmm18,%zmm15 + vpandq %zmm5,%zmm8,%zmm8 + vpmuludq %zmm2,%zmm23,%zmm11 + vpandq %zmm5,%zmm10,%zmm10 + vpmuludq %zmm2,%zmm24,%zmm12 + vporq %zmm30,%zmm6,%zmm6 + vpmuludq %zmm2,%zmm16,%zmm13 + vpaddq %zmm1,%zmm8,%zmm1 + vpaddq %zmm3,%zmm10,%zmm3 + vpaddq %zmm4,%zmm6,%zmm4 + + vmovdqu64 0(%rsi),%zmm10 + vmovdqu64 64(%rsi),%zmm6 + leaq 128(%rsi),%rsi + vpmuludq %zmm0,%zmm19,%zmm28 + vpmuludq %zmm0,%zmm20,%zmm29 + vpmuludq %zmm0,%zmm16,%zmm25 + vpmuludq %zmm0,%zmm17,%zmm26 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm26,%zmm12,%zmm12 + + vpmuludq %zmm1,%zmm18,%zmm28 + vpmuludq %zmm1,%zmm19,%zmm29 + vpmuludq %zmm1,%zmm24,%zmm25 + vpmuludq %zmm0,%zmm18,%zmm27 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm27,%zmm13,%zmm13 + + vpunpcklqdq %zmm6,%zmm10,%zmm7 + vpunpckhqdq %zmm6,%zmm10,%zmm6 + + vpmuludq %zmm3,%zmm16,%zmm28 + vpmuludq %zmm3,%zmm17,%zmm29 + vpmuludq %zmm1,%zmm16,%zmm26 + vpmuludq %zmm1,%zmm17,%zmm27 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm26,%zmm12,%zmm12 + vpaddq %zmm27,%zmm13,%zmm13 + + vpmuludq %zmm4,%zmm24,%zmm28 + vpmuludq %zmm4,%zmm16,%zmm29 + vpmuludq %zmm3,%zmm22,%zmm25 + vpmuludq %zmm3,%zmm23,%zmm26 + vpaddq %zmm28,%zmm14,%zmm14 + vpmuludq %zmm3,%zmm24,%zmm27 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm26,%zmm12,%zmm12 + vpaddq %zmm27,%zmm13,%zmm13 + + vpmuludq %zmm4,%zmm21,%zmm25 + vpmuludq %zmm4,%zmm22,%zmm26 + vpmuludq %zmm4,%zmm23,%zmm27 + vpaddq %zmm25,%zmm11,%zmm0 + vpaddq %zmm26,%zmm12,%zmm1 + vpaddq %zmm27,%zmm13,%zmm2 + + vpsrlq $52,%zmm7,%zmm9 + vpsllq $12,%zmm6,%zmm10 + + vpsrlq $26,%zmm14,%zmm3 + vpandq %zmm5,%zmm14,%zmm14 + vpaddq %zmm3,%zmm15,%zmm4 + + vporq %zmm10,%zmm9,%zmm9 + + vpsrlq $26,%zmm0,%zmm11 + vpandq %zmm5,%zmm0,%zmm0 + vpaddq %zmm11,%zmm1,%zmm1 + + vpandq %zmm5,%zmm9,%zmm9 + + vpsrlq $26,%zmm4,%zmm15 + vpandq %zmm5,%zmm4,%zmm4 + + vpsrlq $26,%zmm1,%zmm12 + vpandq %zmm5,%zmm1,%zmm1 + vpaddq %zmm12,%zmm2,%zmm2 + + vpaddq %zmm15,%zmm0,%zmm0 + vpsllq $2,%zmm15,%zmm15 + vpaddq %zmm15,%zmm0,%zmm0 + + vpaddq %zmm9,%zmm2,%zmm2 + vpsrlq $26,%zmm7,%zmm8 + + vpsrlq $26,%zmm2,%zmm13 + vpandq %zmm5,%zmm2,%zmm2 + vpaddq %zmm13,%zmm14,%zmm3 + + vpsrlq $14,%zmm6,%zmm10 + + vpsrlq $26,%zmm0,%zmm11 + vpandq %zmm5,%zmm0,%zmm0 + vpaddq %zmm11,%zmm1,%zmm1 + + vpsrlq $40,%zmm6,%zmm6 + + vpsrlq $26,%zmm3,%zmm14 + vpandq %zmm5,%zmm3,%zmm3 + vpaddq %zmm14,%zmm4,%zmm4 + + vpandq %zmm5,%zmm7,%zmm7 + + subq $128,%rdx + ja .Loop_avx512 + +.Ltail_avx512: + + vpsrlq $32,%zmm16,%zmm16 + vpsrlq $32,%zmm17,%zmm17 + vpsrlq $32,%zmm18,%zmm18 + vpsrlq $32,%zmm23,%zmm23 + vpsrlq $32,%zmm24,%zmm24 + vpsrlq $32,%zmm19,%zmm19 + vpsrlq $32,%zmm20,%zmm20 + vpsrlq $32,%zmm21,%zmm21 + vpsrlq $32,%zmm22,%zmm22 + + leaq (%rsi,%rdx,1),%rsi + + vpaddq %zmm0,%zmm7,%zmm0 + + vpmuludq %zmm2,%zmm17,%zmm14 + vpmuludq %zmm2,%zmm18,%zmm15 + vpmuludq %zmm2,%zmm23,%zmm11 + vpandq %zmm5,%zmm8,%zmm8 + vpmuludq %zmm2,%zmm24,%zmm12 + vpandq %zmm5,%zmm10,%zmm10 + vpmuludq %zmm2,%zmm16,%zmm13 + vporq %zmm30,%zmm6,%zmm6 + vpaddq %zmm1,%zmm8,%zmm1 + vpaddq %zmm3,%zmm10,%zmm3 + vpaddq %zmm4,%zmm6,%zmm4 + + vmovdqu64 0(%rsi),%xmm7 + vpmuludq %zmm0,%zmm19,%zmm28 + vpmuludq %zmm0,%zmm20,%zmm29 + vpmuludq %zmm0,%zmm16,%zmm25 + vpmuludq %zmm0,%zmm17,%zmm26 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm26,%zmm12,%zmm12 + + vmovdqu64 16(%rsi),%xmm8 + vpmuludq %zmm1,%zmm18,%zmm28 + vpmuludq %zmm1,%zmm19,%zmm29 + vpmuludq %zmm1,%zmm24,%zmm25 + vpmuludq %zmm0,%zmm18,%zmm27 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm27,%zmm13,%zmm13 + + vinserti64x2 $1,32(%rsi),%zmm7,%zmm7 + vpmuludq %zmm3,%zmm16,%zmm28 + vpmuludq %zmm3,%zmm17,%zmm29 + vpmuludq %zmm1,%zmm16,%zmm26 + vpmuludq %zmm1,%zmm17,%zmm27 + vpaddq %zmm28,%zmm14,%zmm14 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm26,%zmm12,%zmm12 + vpaddq %zmm27,%zmm13,%zmm13 + + vinserti64x2 $1,48(%rsi),%zmm8,%zmm8 + vpmuludq %zmm4,%zmm24,%zmm28 + vpmuludq %zmm4,%zmm16,%zmm29 + vpmuludq %zmm3,%zmm22,%zmm25 + vpmuludq %zmm3,%zmm23,%zmm26 + vpmuludq %zmm3,%zmm24,%zmm27 + vpaddq %zmm28,%zmm14,%zmm3 + vpaddq %zmm29,%zmm15,%zmm15 + vpaddq %zmm25,%zmm11,%zmm11 + vpaddq %zmm26,%zmm12,%zmm12 + vpaddq %zmm27,%zmm13,%zmm13 + + vpmuludq %zmm4,%zmm21,%zmm25 + vpmuludq %zmm4,%zmm22,%zmm26 + vpmuludq %zmm4,%zmm23,%zmm27 + vpaddq %zmm25,%zmm11,%zmm0 + vpaddq %zmm26,%zmm12,%zmm1 + vpaddq %zmm27,%zmm13,%zmm2 + + movl $1,%eax + vpsrldq $8,%zmm3,%zmm14 + vpsrldq $8,%zmm15,%zmm4 + vpsrldq $8,%zmm0,%zmm11 + vpsrldq $8,%zmm1,%zmm12 + vpsrldq $8,%zmm2,%zmm13 + vpaddq %zmm14,%zmm3,%zmm3 + vpaddq %zmm15,%zmm4,%zmm4 + vpaddq %zmm11,%zmm0,%zmm0 + vpaddq %zmm12,%zmm1,%zmm1 + vpaddq %zmm13,%zmm2,%zmm2 + + kmovw %eax,%k3 + vpermq $0x2,%zmm3,%zmm14 + vpermq $0x2,%zmm4,%zmm15 + vpermq $0x2,%zmm0,%zmm11 + vpermq $0x2,%zmm1,%zmm12 + vpermq $0x2,%zmm2,%zmm13 + vpaddq %zmm14,%zmm3,%zmm3 + vpaddq %zmm15,%zmm4,%zmm4 + vpaddq %zmm11,%zmm0,%zmm0 + vpaddq %zmm12,%zmm1,%zmm1 + vpaddq %zmm13,%zmm2,%zmm2 + + vextracti64x4 $0x1,%zmm3,%ymm14 + vextracti64x4 $0x1,%zmm4,%ymm15 + vextracti64x4 $0x1,%zmm0,%ymm11 + vextracti64x4 $0x1,%zmm1,%ymm12 + vextracti64x4 $0x1,%zmm2,%ymm13 + vpaddq %zmm14,%zmm3,%zmm3{%k3}{z} + vpaddq %zmm15,%zmm4,%zmm4{%k3}{z} + vpaddq %zmm11,%zmm0,%zmm0{%k3}{z} + vpaddq %zmm12,%zmm1,%zmm1{%k3}{z} + vpaddq %zmm13,%zmm2,%zmm2{%k3}{z} + + + + vpsrlq $26,%ymm3,%ymm14 + vpandq %ymm5,%ymm3,%ymm3 + vpsrldq $6,%ymm7,%ymm9 + vpsrldq $6,%ymm8,%ymm10 + vpunpckhqdq %ymm8,%ymm7,%ymm6 + vpaddq %ymm14,%ymm4,%ymm4 + + vpsrlq $26,%ymm0,%ymm11 + vpandq %ymm5,%ymm0,%ymm0 + vpunpcklqdq %ymm10,%ymm9,%ymm9 + vpunpcklqdq %ymm8,%ymm7,%ymm7 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $26,%ymm4,%ymm15 + vpandq %ymm5,%ymm4,%ymm4 + + vpsrlq $26,%ymm1,%ymm12 + vpandq %ymm5,%ymm1,%ymm1 + vpsrlq $30,%ymm9,%ymm10 + vpsrlq $4,%ymm9,%ymm9 + vpaddq %ymm12,%ymm2,%ymm2 + + vpaddq %ymm15,%ymm0,%ymm0 + vpsllq $2,%ymm15,%ymm15 + vpsrlq $26,%ymm7,%ymm8 + vpsrlq $40,%ymm6,%ymm6 + vpaddq %ymm15,%ymm0,%ymm0 + + vpsrlq $26,%ymm2,%ymm13 + vpandq %ymm5,%ymm2,%ymm2 + vpandq %ymm5,%ymm9,%ymm9 + vpandq %ymm5,%ymm7,%ymm7 + vpaddq %ymm13,%ymm3,%ymm3 + + vpsrlq $26,%ymm0,%ymm11 + vpandq %ymm5,%ymm0,%ymm0 + vpaddq %ymm2,%ymm9,%ymm2 + vpandq %ymm5,%ymm8,%ymm8 + vpaddq %ymm11,%ymm1,%ymm1 + + vpsrlq $26,%ymm3,%ymm14 + vpandq %ymm5,%ymm3,%ymm3 + vpandq %ymm5,%ymm10,%ymm10 + vporq %ymm30,%ymm6,%ymm6 + vpaddq %ymm14,%ymm4,%ymm4 + + leaq 144(%rsp),%rax + addq $64,%rdx + jnz .Ltail_avx2_512 + + vpsubq %ymm9,%ymm2,%ymm2 + vmovd %xmm0,-112(%rdi) + vmovd %xmm1,-108(%rdi) + vmovd %xmm2,-104(%rdi) + vmovd %xmm3,-100(%rdi) + vmovd %xmm4,-96(%rdi) + vzeroall + leaq -8(%r10),%rsp + + ret + +ENDPROC(poly1305_blocks_avx512) +#endif /* CONFIG_AS_AVX512 */ -- cgit v1.2.3