From d73986b308e2385201c02ca197260d1adc481f03 Mon Sep 17 00:00:00 2001 From: Vsevolod Stakhov Date: Mon, 12 Oct 2015 15:24:31 +0100 Subject: [PATCH] Add Sandy2x implementation by Tung Chou for curve25519 --- src/CMakeLists.txt | 2 +- src/libcryptobox/CMakeLists.txt | 2 + src/libcryptobox/curve25519/avx.S | 1953 +++++++++++++++++++++++ src/libcryptobox/curve25519/avx.c | 207 +++ src/libcryptobox/curve25519/constants.S | 32 + 5 files changed, 2195 insertions(+), 1 deletion(-) create mode 100644 src/libcryptobox/curve25519/avx.S create mode 100644 src/libcryptobox/curve25519/avx.c create mode 100644 src/libcryptobox/curve25519/constants.S diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 668893a1f..17ff6ebe5 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -85,7 +85,7 @@ SET(PLUGINSSRC plugins/surbl.c plugins/chartable.c plugins/fuzzy_check.c plugins/spf.c - plugins/dkim_check.c libutil/unix-std.h libserver/rspamd_control.c libserver/rspamd_control.h libcryptobox/curve25519/curve25519.c) + plugins/dkim_check.c libutil/unix-std.h libserver/rspamd_control.c libserver/rspamd_control.h libcryptobox/curve25519/curve25519.c libcryptobox/curve25519/avx.c) SET(MODULES_LIST surbl regexp chartable fuzzy_check spf dkim) SET(WORKERS_LIST normal controller smtp_proxy fuzzy lua http_proxy) diff --git a/src/libcryptobox/CMakeLists.txt b/src/libcryptobox/CMakeLists.txt index 5a172532a..224a9deb9 100644 --- a/src/libcryptobox/CMakeLists.txt +++ b/src/libcryptobox/CMakeLists.txt @@ -60,6 +60,8 @@ ENDIF(HAVE_AVX2) IF(HAVE_AVX) SET(CHACHASRC ${CHACHASRC} ${CMAKE_CURRENT_SOURCE_DIR}/chacha20/avx.S) SET(POLYSRC ${POLYSRC} ${CMAKE_CURRENT_SOURCE_DIR}/poly1305/avx.S) + SET(CURVESRC ${CURVESRC} ${CMAKE_CURRENT_SOURCE_DIR}/curve25519/avx.S + ${CMAKE_CURRENT_SOURCE_DIR}/curve25519/avx.c) ENDIF(HAVE_AVX) IF(HAVE_SSE2) SET(CHACHASRC ${CHACHASRC} ${CMAKE_CURRENT_SOURCE_DIR}/chacha20/sse2.S) diff --git a/src/libcryptobox/curve25519/avx.S b/src/libcryptobox/curve25519/avx.S new file mode 100644 index 000000000..9a49f1c8a --- /dev/null +++ b/src/libcryptobox/curve25519/avx.S @@ -0,0 +1,1953 @@ +#include "../macro.S" +#include "constants.S" + +.p2align 5 +GLOBAL_HIDDEN_FN_EXT ladder_avx,2,0 +ladder_avx_local: +mov %rsp,%r11 +and $31,%r11 +add $1856,%r11 +sub %r11,%rsp +movq %r11,1824(%rsp) +movq %r12,1832(%rsp) +movq %r13,1840(%rsp) +movq %r14,1848(%rsp) +movdqa v0_0(%rip),%xmm0 +movdqa v1_0(%rip),%xmm1 +movdqu 0(%rdi),%xmm2 +movdqa %xmm2,0(%rsp) +movdqu 16(%rdi),%xmm2 +movdqa %xmm2,16(%rsp) +movdqu 32(%rdi),%xmm2 +movdqa %xmm2,32(%rsp) +movdqu 48(%rdi),%xmm2 +movdqa %xmm2,48(%rsp) +movdqu 64(%rdi),%xmm2 +movdqa %xmm2,64(%rsp) +movdqa %xmm1,80(%rsp) +movdqa %xmm0,96(%rsp) +movdqa %xmm0,112(%rsp) +movdqa %xmm0,128(%rsp) +movdqa %xmm0,144(%rsp) +movdqa %xmm1,%xmm0 +pxor %xmm1,%xmm1 +pxor %xmm2,%xmm2 +pxor %xmm3,%xmm3 +pxor %xmm4,%xmm4 +pxor %xmm5,%xmm5 +pxor %xmm6,%xmm6 +pxor %xmm7,%xmm7 +pxor %xmm8,%xmm8 +pxor %xmm9,%xmm9 +movdqu 0(%rdi),%xmm10 +movdqa %xmm10,160(%rsp) +movdqu 16(%rdi),%xmm10 +movdqa %xmm10,176(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,192(%rsp) +movdqu 32(%rdi),%xmm10 +movdqa %xmm10,208(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,224(%rsp) +movdqu 48(%rdi),%xmm10 +movdqa %xmm10,240(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,256(%rsp) +movdqu 64(%rdi),%xmm10 +movdqa %xmm10,272(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,288(%rsp) +movdqu 8(%rdi),%xmm10 +pmuludq v2_1(%rip),%xmm10 +movdqa %xmm10,304(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,320(%rsp) +movdqu 24(%rdi),%xmm10 +pmuludq v2_1(%rip),%xmm10 +movdqa %xmm10,336(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,352(%rsp) +movdqu 40(%rdi),%xmm10 +pmuludq v2_1(%rip),%xmm10 +movdqa %xmm10,368(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,384(%rsp) +movdqu 56(%rdi),%xmm10 +pmuludq v2_1(%rip),%xmm10 +movdqa %xmm10,400(%rsp) +pmuludq v19_19(%rip),%xmm10 +movdqa %xmm10,416(%rsp) +movdqu 0(%rdi),%xmm10 +movdqu 64(%rdi),%xmm11 +blendps $12, %xmm11, %xmm10 +pshufd $2,%xmm10,%xmm10 +pmuludq v38_1(%rip),%xmm10 +movdqa %xmm10,432(%rsp) +movq 0(%rsi),%rdx +movq 8(%rsi),%rcx +movq 16(%rsi),%r8 +movq 24(%rsi),%r9 +shrd $1,%rcx,%rdx +shrd $1,%r8,%rcx +shrd $1,%r9,%r8 +shr $1,%r9 +xorq 0(%rsi),%rdx +xorq 8(%rsi),%rcx +xorq 16(%rsi),%r8 +xorq 24(%rsi),%r9 +leaq 800(%rsp),%rsi +mov $64,%rax +._small_loop: +mov %rdx,%r10 +mov %rcx,%r11 +mov %r8,%r12 +mov %r9,%r13 +shr $1,%rdx +shr $1,%rcx +shr $1,%r8 +shr $1,%r9 +and $1,%r10d +and $1,%r11d +and $1,%r12d +and $1,%r13d +neg %r10 +neg %r11 +neg %r12 +neg %r13 +movl %r10d,0(%rsi) +movl %r11d,256(%rsi) +movl %r12d,512(%rsi) +movl %r13d,768(%rsi) +add $4,%rsi +sub $1,%rax +jne ._small_loop +mov $255,%rdx +add $760,%rsi +._loop_ladder: +sub $1,%rdx +vbroadcastss 0(%rsi),%xmm10 +sub $4,%rsi +movdqa 0(%rsp),%xmm11 +movdqa 80(%rsp),%xmm12 +vpxor %xmm11,%xmm0,%xmm13 +pand %xmm10,%xmm13 +pxor %xmm13,%xmm0 +pxor %xmm13,%xmm11 +vpxor %xmm12,%xmm1,%xmm13 +pand %xmm10,%xmm13 +pxor %xmm13,%xmm1 +pxor %xmm13,%xmm12 +movdqa 16(%rsp),%xmm13 +movdqa 96(%rsp),%xmm14 +vpxor %xmm13,%xmm2,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm2 +pxor %xmm15,%xmm13 +vpxor %xmm14,%xmm3,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm3 +pxor %xmm15,%xmm14 +movdqa %xmm13,0(%rsp) +movdqa %xmm14,16(%rsp) +movdqa 32(%rsp),%xmm13 +movdqa 112(%rsp),%xmm14 +vpxor %xmm13,%xmm4,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm4 +pxor %xmm15,%xmm13 +vpxor %xmm14,%xmm5,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm5 +pxor %xmm15,%xmm14 +movdqa %xmm13,32(%rsp) +movdqa %xmm14,80(%rsp) +movdqa 48(%rsp),%xmm13 +movdqa 128(%rsp),%xmm14 +vpxor %xmm13,%xmm6,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm6 +pxor %xmm15,%xmm13 +vpxor %xmm14,%xmm7,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm7 +pxor %xmm15,%xmm14 +movdqa %xmm13,48(%rsp) +movdqa %xmm14,96(%rsp) +movdqa 64(%rsp),%xmm13 +movdqa 144(%rsp),%xmm14 +vpxor %xmm13,%xmm8,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm8 +pxor %xmm15,%xmm13 +vpxor %xmm14,%xmm9,%xmm15 +pand %xmm10,%xmm15 +pxor %xmm15,%xmm9 +pxor %xmm15,%xmm14 +movdqa %xmm13,64(%rsp) +movdqa %xmm14,112(%rsp) +vpaddq subc0(%rip),%xmm11,%xmm10 +psubq %xmm12,%xmm10 +paddq %xmm12,%xmm11 +vpunpckhqdq %xmm10,%xmm11,%xmm12 +vpunpcklqdq %xmm10,%xmm11,%xmm10 +vpaddq %xmm1,%xmm0,%xmm11 +paddq subc0(%rip),%xmm0 +psubq %xmm1,%xmm0 +vpunpckhqdq %xmm11,%xmm0,%xmm1 +vpunpcklqdq %xmm11,%xmm0,%xmm0 +vpmuludq %xmm0,%xmm10,%xmm11 +vpmuludq %xmm1,%xmm10,%xmm13 +movdqa %xmm1,128(%rsp) +paddq %xmm1,%xmm1 +vpmuludq %xmm0,%xmm12,%xmm14 +movdqa %xmm0,144(%rsp) +paddq %xmm14,%xmm13 +vpmuludq %xmm1,%xmm12,%xmm0 +movdqa %xmm1,448(%rsp) +vpaddq %xmm3,%xmm2,%xmm1 +paddq subc2(%rip),%xmm2 +psubq %xmm3,%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm3 +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpmuludq %xmm1,%xmm10,%xmm2 +paddq %xmm2,%xmm0 +vpmuludq %xmm3,%xmm10,%xmm2 +movdqa %xmm3,464(%rsp) +paddq %xmm3,%xmm3 +vpmuludq %xmm1,%xmm12,%xmm14 +movdqa %xmm1,480(%rsp) +paddq %xmm14,%xmm2 +vpmuludq %xmm3,%xmm12,%xmm1 +movdqa %xmm3,496(%rsp) +vpaddq %xmm5,%xmm4,%xmm3 +paddq subc2(%rip),%xmm4 +psubq %xmm5,%xmm4 +vpunpckhqdq %xmm3,%xmm4,%xmm5 +vpunpcklqdq %xmm3,%xmm4,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm4 +paddq %xmm4,%xmm1 +vpmuludq %xmm5,%xmm10,%xmm4 +movdqa %xmm5,512(%rsp) +paddq %xmm5,%xmm5 +vpmuludq %xmm3,%xmm12,%xmm14 +movdqa %xmm3,528(%rsp) +paddq %xmm14,%xmm4 +vpaddq %xmm7,%xmm6,%xmm3 +paddq subc2(%rip),%xmm6 +psubq %xmm7,%xmm6 +vpunpckhqdq %xmm3,%xmm6,%xmm7 +vpunpcklqdq %xmm3,%xmm6,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm6 +vpmuludq %xmm5,%xmm12,%xmm14 +movdqa %xmm5,544(%rsp) +pmuludq v19_19(%rip),%xmm5 +movdqa %xmm5,560(%rsp) +paddq %xmm14,%xmm6 +vpmuludq %xmm7,%xmm10,%xmm5 +movdqa %xmm7,576(%rsp) +paddq %xmm7,%xmm7 +vpmuludq %xmm3,%xmm12,%xmm14 +movdqa %xmm3,592(%rsp) +paddq %xmm14,%xmm5 +pmuludq v19_19(%rip),%xmm3 +movdqa %xmm3,608(%rsp) +vpaddq %xmm9,%xmm8,%xmm3 +paddq subc2(%rip),%xmm8 +psubq %xmm9,%xmm8 +vpunpckhqdq %xmm3,%xmm8,%xmm9 +vpunpcklqdq %xmm3,%xmm8,%xmm3 +movdqa %xmm3,624(%rsp) +vpmuludq %xmm7,%xmm12,%xmm8 +movdqa %xmm7,640(%rsp) +pmuludq v19_19(%rip),%xmm7 +movdqa %xmm7,656(%rsp) +vpmuludq %xmm3,%xmm10,%xmm7 +paddq %xmm7,%xmm8 +vpmuludq %xmm9,%xmm10,%xmm7 +movdqa %xmm9,672(%rsp) +paddq %xmm9,%xmm9 +vpmuludq %xmm3,%xmm12,%xmm10 +paddq %xmm10,%xmm7 +pmuludq v19_19(%rip),%xmm3 +movdqa %xmm3,688(%rsp) +pmuludq v19_19(%rip),%xmm12 +vpmuludq %xmm9,%xmm12,%xmm3 +movdqa %xmm9,704(%rsp) +paddq %xmm3,%xmm11 +movdqa 0(%rsp),%xmm3 +movdqa 16(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +psubq %xmm9,%xmm10 +paddq %xmm9,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm0 +vpmuludq 128(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm2 +vpmuludq 480(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm1 +vpmuludq 464(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm4 +vpmuludq 528(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm6 +vpmuludq 512(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm5 +vpmuludq 592(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm8 +vpmuludq 576(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm7 +pmuludq v19_19(%rip),%xmm3 +vpmuludq 624(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm11 +pmuludq 672(%rsp),%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 144(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm2 +vpmuludq 448(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm1 +vpmuludq 480(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 496(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 528(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 544(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm8 +vpmuludq 592(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm7 +pmuludq v19_19(%rip),%xmm9 +vpmuludq 640(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 624(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm13 +pmuludq 704(%rsp),%xmm9 +paddq %xmm9,%xmm0 +movdqa 32(%rsp),%xmm3 +movdqa 80(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +psubq %xmm9,%xmm10 +paddq %xmm9,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm1 +vpmuludq 128(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm4 +vpmuludq 480(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm6 +vpmuludq 464(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm5 +vpmuludq 528(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm8 +vpmuludq 512(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm7 +pmuludq v19_19(%rip),%xmm3 +vpmuludq 592(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm11 +vpmuludq 576(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm13 +vpmuludq 624(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm0 +pmuludq 672(%rsp),%xmm3 +paddq %xmm3,%xmm2 +vpmuludq 144(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 448(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 480(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 496(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm8 +vpmuludq 528(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm7 +pmuludq v19_19(%rip),%xmm9 +vpmuludq 544(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 592(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 640(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm0 +vpmuludq 624(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm2 +pmuludq 704(%rsp),%xmm9 +paddq %xmm9,%xmm1 +movdqa 48(%rsp),%xmm3 +movdqa 96(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +psubq %xmm9,%xmm10 +paddq %xmm9,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm6 +vpmuludq 128(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm5 +vpmuludq 480(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm8 +vpmuludq 464(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm7 +pmuludq v19_19(%rip),%xmm3 +vpmuludq 528(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm11 +vpmuludq 512(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm13 +vpmuludq 592(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm0 +vpmuludq 576(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm2 +vpmuludq 624(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm1 +pmuludq 672(%rsp),%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 144(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 448(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm8 +vpmuludq 480(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm7 +pmuludq v19_19(%rip),%xmm9 +vpmuludq 496(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 528(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 544(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm0 +vpmuludq 592(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm2 +vpmuludq 640(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm1 +vpmuludq 624(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm4 +pmuludq 704(%rsp),%xmm9 +paddq %xmm9,%xmm6 +movdqa 64(%rsp),%xmm3 +movdqa 112(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +psubq %xmm9,%xmm10 +paddq %xmm9,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm8 +vpmuludq 128(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm7 +pmuludq v19_19(%rip),%xmm3 +vpmuludq 480(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm11 +vpmuludq 464(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm13 +vpmuludq 528(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm0 +vpmuludq 512(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm2 +vpmuludq 592(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm1 +vpmuludq 576(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm4 +vpmuludq 624(%rsp),%xmm3,%xmm10 +paddq %xmm10,%xmm6 +pmuludq 672(%rsp),%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 144(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm7 +pmuludq v19_19(%rip),%xmm9 +vpmuludq 448(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 480(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 496(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm0 +vpmuludq 528(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm2 +vpmuludq 544(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm1 +vpmuludq 592(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 640(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 624(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm5 +pmuludq 704(%rsp),%xmm9 +paddq %xmm9,%xmm8 +vpsrlq $25,%xmm4,%xmm3 +paddq %xmm3,%xmm6 +pand m25(%rip),%xmm4 +vpsrlq $26,%xmm11,%xmm3 +paddq %xmm3,%xmm13 +pand m26(%rip),%xmm11 +vpsrlq $26,%xmm6,%xmm3 +paddq %xmm3,%xmm5 +pand m26(%rip),%xmm6 +vpsrlq $25,%xmm13,%xmm3 +paddq %xmm3,%xmm0 +pand m25(%rip),%xmm13 +vpsrlq $25,%xmm5,%xmm3 +paddq %xmm3,%xmm8 +pand m25(%rip),%xmm5 +vpsrlq $26,%xmm0,%xmm3 +paddq %xmm3,%xmm2 +pand m26(%rip),%xmm0 +vpsrlq $26,%xmm8,%xmm3 +paddq %xmm3,%xmm7 +pand m26(%rip),%xmm8 +vpsrlq $25,%xmm2,%xmm3 +paddq %xmm3,%xmm1 +pand m25(%rip),%xmm2 +vpsrlq $25,%xmm7,%xmm3 +vpsllq $4,%xmm3,%xmm9 +paddq %xmm3,%xmm11 +psllq $1,%xmm3 +paddq %xmm3,%xmm9 +paddq %xmm9,%xmm11 +pand m25(%rip),%xmm7 +vpsrlq $26,%xmm1,%xmm3 +paddq %xmm3,%xmm4 +pand m26(%rip),%xmm1 +vpsrlq $26,%xmm11,%xmm3 +paddq %xmm3,%xmm13 +pand m26(%rip),%xmm11 +vpsrlq $25,%xmm4,%xmm3 +paddq %xmm3,%xmm6 +pand m25(%rip),%xmm4 +vpunpcklqdq %xmm13,%xmm11,%xmm3 +vpunpckhqdq %xmm13,%xmm11,%xmm9 +vpaddq subc0(%rip),%xmm9,%xmm10 +psubq %xmm3,%xmm10 +paddq %xmm9,%xmm3 +vpunpckhqdq %xmm3,%xmm10,%xmm9 +punpcklqdq %xmm3,%xmm10 +vpmuludq %xmm10,%xmm10,%xmm3 +paddq %xmm10,%xmm10 +vpmuludq %xmm9,%xmm10,%xmm11 +vpunpcklqdq %xmm2,%xmm0,%xmm12 +vpunpckhqdq %xmm2,%xmm0,%xmm0 +vpaddq subc2(%rip),%xmm0,%xmm2 +psubq %xmm12,%xmm2 +paddq %xmm0,%xmm12 +vpunpckhqdq %xmm12,%xmm2,%xmm0 +punpcklqdq %xmm12,%xmm2 +vpmuludq %xmm2,%xmm10,%xmm12 +vpaddq %xmm9,%xmm9,%xmm13 +vpmuludq %xmm13,%xmm9,%xmm9 +paddq %xmm9,%xmm12 +vpmuludq %xmm0,%xmm10,%xmm9 +vpmuludq %xmm2,%xmm13,%xmm14 +paddq %xmm14,%xmm9 +vpunpcklqdq %xmm4,%xmm1,%xmm14 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpaddq subc2(%rip),%xmm1,%xmm4 +psubq %xmm14,%xmm4 +paddq %xmm1,%xmm14 +vpunpckhqdq %xmm14,%xmm4,%xmm1 +punpcklqdq %xmm14,%xmm4 +movdqa %xmm1,0(%rsp) +paddq %xmm1,%xmm1 +movdqa %xmm1,16(%rsp) +pmuludq v19_19(%rip),%xmm1 +movdqa %xmm1,32(%rsp) +vpmuludq %xmm4,%xmm10,%xmm1 +vpmuludq %xmm2,%xmm2,%xmm14 +paddq %xmm14,%xmm1 +vpmuludq 0(%rsp),%xmm10,%xmm14 +vpmuludq %xmm4,%xmm13,%xmm15 +paddq %xmm15,%xmm14 +vpunpcklqdq %xmm5,%xmm6,%xmm15 +vpunpckhqdq %xmm5,%xmm6,%xmm5 +vpaddq subc2(%rip),%xmm5,%xmm6 +psubq %xmm15,%xmm6 +paddq %xmm5,%xmm15 +vpunpckhqdq %xmm15,%xmm6,%xmm5 +punpcklqdq %xmm15,%xmm6 +movdqa %xmm6,48(%rsp) +pmuludq v19_19(%rip),%xmm6 +movdqa %xmm6,64(%rsp) +movdqa %xmm5,80(%rsp) +pmuludq v38_38(%rip),%xmm5 +movdqa %xmm5,96(%rsp) +vpmuludq 48(%rsp),%xmm10,%xmm5 +vpaddq %xmm0,%xmm0,%xmm6 +vpmuludq %xmm6,%xmm0,%xmm0 +paddq %xmm0,%xmm5 +vpmuludq 80(%rsp),%xmm10,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm15 +paddq %xmm15,%xmm0 +vpmuludq %xmm6,%xmm13,%xmm15 +paddq %xmm15,%xmm1 +vpmuludq %xmm6,%xmm2,%xmm15 +paddq %xmm15,%xmm14 +vpunpcklqdq %xmm7,%xmm8,%xmm15 +vpunpckhqdq %xmm7,%xmm8,%xmm7 +vpaddq subc2(%rip),%xmm7,%xmm8 +psubq %xmm15,%xmm8 +paddq %xmm7,%xmm15 +vpunpckhqdq %xmm15,%xmm8,%xmm7 +punpcklqdq %xmm15,%xmm8 +movdqa %xmm8,112(%rsp) +pmuludq v19_19(%rip),%xmm8 +movdqa %xmm8,448(%rsp) +vpmuludq 112(%rsp),%xmm10,%xmm8 +vpmuludq %xmm7,%xmm10,%xmm10 +vpmuludq v38_38(%rip),%xmm7,%xmm15 +vpmuludq %xmm15,%xmm7,%xmm7 +paddq %xmm7,%xmm8 +vpmuludq %xmm15,%xmm13,%xmm7 +paddq %xmm7,%xmm3 +vpmuludq %xmm15,%xmm2,%xmm7 +paddq %xmm7,%xmm11 +vpmuludq 80(%rsp),%xmm13,%xmm7 +paddq %xmm7,%xmm7 +paddq %xmm7,%xmm8 +vpmuludq 16(%rsp),%xmm13,%xmm7 +paddq %xmm7,%xmm5 +vpmuludq 48(%rsp),%xmm13,%xmm7 +paddq %xmm7,%xmm0 +vpmuludq 112(%rsp),%xmm13,%xmm7 +paddq %xmm7,%xmm10 +vpmuludq %xmm15,%xmm6,%xmm7 +paddq %xmm7,%xmm12 +vpmuludq %xmm15,%xmm4,%xmm7 +paddq %xmm7,%xmm9 +vpaddq %xmm2,%xmm2,%xmm2 +vpmuludq %xmm4,%xmm2,%xmm7 +paddq %xmm7,%xmm5 +vpmuludq 448(%rsp),%xmm2,%xmm7 +paddq %xmm7,%xmm3 +vpmuludq 448(%rsp),%xmm6,%xmm7 +paddq %xmm7,%xmm11 +vpmuludq 0(%rsp),%xmm2,%xmm7 +paddq %xmm7,%xmm0 +vpmuludq 48(%rsp),%xmm2,%xmm7 +paddq %xmm7,%xmm8 +vpmuludq 80(%rsp),%xmm2,%xmm2 +paddq %xmm2,%xmm10 +vpmuludq 96(%rsp),%xmm4,%xmm2 +paddq %xmm2,%xmm11 +vpmuludq %xmm4,%xmm4,%xmm2 +paddq %xmm2,%xmm8 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq 448(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm12 +vpmuludq 16(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm1 +vpmuludq 48(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm14 +vpmuludq 96(%rsp),%xmm6,%xmm4 +paddq %xmm4,%xmm3 +movdqa 16(%rsp),%xmm4 +pmuludq 448(%rsp),%xmm4 +paddq %xmm4,%xmm9 +vpmuludq 16(%rsp),%xmm6,%xmm4 +paddq %xmm4,%xmm8 +vpmuludq 48(%rsp),%xmm6,%xmm4 +paddq %xmm4,%xmm10 +vpmuludq 80(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm4 +paddq %xmm4,%xmm5 +vpmuludq 112(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm0 +movdqa 48(%rsp),%xmm4 +paddq %xmm4,%xmm4 +pmuludq 448(%rsp),%xmm4 +paddq %xmm4,%xmm1 +movdqa 80(%rsp),%xmm4 +paddq %xmm4,%xmm4 +pmuludq 448(%rsp),%xmm4 +paddq %xmm4,%xmm14 +vpmuludq 64(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm3 +movdqa 16(%rsp),%xmm4 +pmuludq 64(%rsp),%xmm4 +paddq %xmm4,%xmm11 +movdqa 16(%rsp),%xmm4 +pmuludq 96(%rsp),%xmm4 +paddq %xmm4,%xmm12 +movdqa 48(%rsp),%xmm4 +pmuludq 96(%rsp),%xmm4 +paddq %xmm4,%xmm9 +vpmuludq 0(%rsp),%xmm2,%xmm2 +paddq %xmm2,%xmm10 +movdqa 32(%rsp),%xmm2 +pmuludq 0(%rsp),%xmm2 +paddq %xmm2,%xmm3 +movdqa 64(%rsp),%xmm2 +pmuludq 48(%rsp),%xmm2 +paddq %xmm2,%xmm12 +movdqa 96(%rsp),%xmm2 +pmuludq 80(%rsp),%xmm2 +paddq %xmm2,%xmm1 +movdqa 448(%rsp),%xmm2 +pmuludq 112(%rsp),%xmm2 +paddq %xmm2,%xmm5 +vpsrlq $26,%xmm3,%xmm2 +paddq %xmm2,%xmm11 +pand m26(%rip),%xmm3 +vpsrlq $25,%xmm14,%xmm2 +paddq %xmm2,%xmm5 +pand m25(%rip),%xmm14 +vpsrlq $25,%xmm11,%xmm2 +paddq %xmm2,%xmm12 +pand m25(%rip),%xmm11 +vpsrlq $26,%xmm5,%xmm2 +paddq %xmm2,%xmm0 +pand m26(%rip),%xmm5 +vpsrlq $26,%xmm12,%xmm2 +paddq %xmm2,%xmm9 +pand m26(%rip),%xmm12 +vpsrlq $25,%xmm0,%xmm2 +paddq %xmm2,%xmm8 +pand m25(%rip),%xmm0 +vpsrlq $25,%xmm9,%xmm2 +paddq %xmm2,%xmm1 +pand m25(%rip),%xmm9 +vpsrlq $26,%xmm8,%xmm2 +paddq %xmm2,%xmm10 +pand m26(%rip),%xmm8 +vpsrlq $26,%xmm1,%xmm2 +paddq %xmm2,%xmm14 +pand m26(%rip),%xmm1 +vpsrlq $25,%xmm10,%xmm2 +vpsllq $4,%xmm2,%xmm4 +paddq %xmm2,%xmm3 +psllq $1,%xmm2 +paddq %xmm2,%xmm4 +paddq %xmm4,%xmm3 +pand m25(%rip),%xmm10 +vpsrlq $25,%xmm14,%xmm2 +paddq %xmm2,%xmm5 +pand m25(%rip),%xmm14 +vpsrlq $26,%xmm3,%xmm2 +paddq %xmm2,%xmm11 +pand m26(%rip),%xmm3 +vpunpckhqdq %xmm11,%xmm3,%xmm2 +movdqa %xmm2,0(%rsp) +pshufd $0,%xmm3,%xmm2 +pshufd $0,%xmm11,%xmm3 +vpmuludq 160(%rsp),%xmm2,%xmm4 +vpmuludq 432(%rsp),%xmm3,%xmm6 +paddq %xmm6,%xmm4 +vpmuludq 176(%rsp),%xmm2,%xmm6 +vpmuludq 304(%rsp),%xmm3,%xmm7 +paddq %xmm7,%xmm6 +vpmuludq 208(%rsp),%xmm2,%xmm7 +vpmuludq 336(%rsp),%xmm3,%xmm11 +paddq %xmm11,%xmm7 +vpmuludq 240(%rsp),%xmm2,%xmm11 +vpmuludq 368(%rsp),%xmm3,%xmm13 +paddq %xmm13,%xmm11 +vpmuludq 272(%rsp),%xmm2,%xmm2 +vpmuludq 400(%rsp),%xmm3,%xmm3 +paddq %xmm3,%xmm2 +vpunpckhqdq %xmm9,%xmm12,%xmm3 +movdqa %xmm3,16(%rsp) +pshufd $0,%xmm12,%xmm3 +pshufd $0,%xmm9,%xmm9 +vpmuludq 288(%rsp),%xmm3,%xmm12 +paddq %xmm12,%xmm4 +vpmuludq 416(%rsp),%xmm9,%xmm12 +paddq %xmm12,%xmm4 +vpmuludq 160(%rsp),%xmm3,%xmm12 +paddq %xmm12,%xmm6 +vpmuludq 432(%rsp),%xmm9,%xmm12 +paddq %xmm12,%xmm6 +vpmuludq 176(%rsp),%xmm3,%xmm12 +paddq %xmm12,%xmm7 +vpmuludq 304(%rsp),%xmm9,%xmm12 +paddq %xmm12,%xmm7 +vpmuludq 208(%rsp),%xmm3,%xmm12 +paddq %xmm12,%xmm11 +vpmuludq 336(%rsp),%xmm9,%xmm12 +paddq %xmm12,%xmm11 +vpmuludq 240(%rsp),%xmm3,%xmm3 +paddq %xmm3,%xmm2 +vpmuludq 368(%rsp),%xmm9,%xmm3 +paddq %xmm3,%xmm2 +vpunpckhqdq %xmm14,%xmm1,%xmm3 +movdqa %xmm3,32(%rsp) +pshufd $0,%xmm1,%xmm1 +pshufd $0,%xmm14,%xmm3 +vpmuludq 256(%rsp),%xmm1,%xmm9 +paddq %xmm9,%xmm4 +vpmuludq 384(%rsp),%xmm3,%xmm9 +paddq %xmm9,%xmm4 +vpmuludq 288(%rsp),%xmm1,%xmm9 +paddq %xmm9,%xmm6 +vpmuludq 416(%rsp),%xmm3,%xmm9 +paddq %xmm9,%xmm6 +vpmuludq 160(%rsp),%xmm1,%xmm9 +paddq %xmm9,%xmm7 +vpmuludq 432(%rsp),%xmm3,%xmm9 +paddq %xmm9,%xmm7 +vpmuludq 176(%rsp),%xmm1,%xmm9 +paddq %xmm9,%xmm11 +vpmuludq 304(%rsp),%xmm3,%xmm9 +paddq %xmm9,%xmm11 +vpmuludq 208(%rsp),%xmm1,%xmm1 +paddq %xmm1,%xmm2 +vpmuludq 336(%rsp),%xmm3,%xmm1 +paddq %xmm1,%xmm2 +vpunpckhqdq %xmm0,%xmm5,%xmm1 +movdqa %xmm1,48(%rsp) +pshufd $0,%xmm5,%xmm1 +pshufd $0,%xmm0,%xmm0 +vpmuludq 224(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 352(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 256(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 384(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 288(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm7 +vpmuludq 416(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm7 +vpmuludq 160(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 432(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 176(%rsp),%xmm1,%xmm1 +paddq %xmm1,%xmm2 +vpmuludq 304(%rsp),%xmm0,%xmm0 +paddq %xmm0,%xmm2 +vpunpckhqdq %xmm10,%xmm8,%xmm0 +movdqa %xmm0,64(%rsp) +pshufd $0,%xmm8,%xmm0 +pshufd $0,%xmm10,%xmm1 +vpmuludq 192(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 320(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm4 +vpmuludq 224(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 352(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 256(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm7 +vpmuludq 384(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm7 +vpmuludq 288(%rsp),%xmm0,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 416(%rsp),%xmm1,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 160(%rsp),%xmm0,%xmm0 +paddq %xmm0,%xmm2 +vpmuludq 432(%rsp),%xmm1,%xmm0 +paddq %xmm0,%xmm2 +movdqa %xmm4,80(%rsp) +movdqa %xmm6,96(%rsp) +movdqa %xmm7,112(%rsp) +movdqa %xmm11,448(%rsp) +movdqa %xmm2,496(%rsp) +movdqa 144(%rsp),%xmm0 +vpmuludq %xmm0,%xmm0,%xmm1 +paddq %xmm0,%xmm0 +movdqa 128(%rsp),%xmm2 +vpmuludq %xmm2,%xmm0,%xmm3 +movdqa 480(%rsp),%xmm4 +vpmuludq %xmm4,%xmm0,%xmm5 +movdqa 464(%rsp),%xmm6 +vpmuludq %xmm6,%xmm0,%xmm7 +movdqa 528(%rsp),%xmm8 +vpmuludq %xmm8,%xmm0,%xmm9 +vpmuludq 512(%rsp),%xmm0,%xmm10 +vpmuludq 592(%rsp),%xmm0,%xmm11 +vpmuludq 576(%rsp),%xmm0,%xmm12 +vpmuludq 624(%rsp),%xmm0,%xmm13 +movdqa 672(%rsp),%xmm14 +vpmuludq %xmm14,%xmm0,%xmm0 +vpmuludq v38_38(%rip),%xmm14,%xmm15 +vpmuludq %xmm15,%xmm14,%xmm14 +paddq %xmm14,%xmm13 +vpaddq %xmm6,%xmm6,%xmm14 +vpmuludq %xmm14,%xmm6,%xmm6 +paddq %xmm6,%xmm11 +vpaddq %xmm2,%xmm2,%xmm6 +vpmuludq %xmm6,%xmm2,%xmm2 +paddq %xmm2,%xmm5 +vpmuludq %xmm15,%xmm6,%xmm2 +paddq %xmm2,%xmm1 +vpmuludq %xmm15,%xmm4,%xmm2 +paddq %xmm2,%xmm3 +vpmuludq 544(%rsp),%xmm6,%xmm2 +paddq %xmm2,%xmm11 +vpmuludq 592(%rsp),%xmm6,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 640(%rsp),%xmm6,%xmm2 +paddq %xmm2,%xmm13 +vpmuludq 624(%rsp),%xmm6,%xmm2 +paddq %xmm2,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm2 +paddq %xmm2,%xmm7 +vpmuludq %xmm14,%xmm6,%xmm2 +paddq %xmm2,%xmm9 +vpmuludq %xmm8,%xmm6,%xmm2 +paddq %xmm2,%xmm10 +vpmuludq %xmm15,%xmm14,%xmm2 +paddq %xmm2,%xmm5 +vpmuludq %xmm15,%xmm8,%xmm2 +paddq %xmm2,%xmm7 +vpmuludq %xmm4,%xmm4,%xmm2 +paddq %xmm2,%xmm9 +vpmuludq %xmm14,%xmm4,%xmm2 +paddq %xmm2,%xmm10 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq %xmm8,%xmm2,%xmm4 +paddq %xmm4,%xmm11 +vpmuludq 688(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm1 +vpmuludq 688(%rsp),%xmm14,%xmm4 +paddq %xmm4,%xmm3 +vpmuludq 512(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm12 +vpmuludq 592(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm13 +vpmuludq 576(%rsp),%xmm2,%xmm2 +paddq %xmm2,%xmm0 +vpmuludq 656(%rsp),%xmm8,%xmm2 +paddq %xmm2,%xmm3 +vpmuludq %xmm8,%xmm14,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq %xmm8,%xmm8,%xmm2 +paddq %xmm2,%xmm13 +vpaddq %xmm8,%xmm8,%xmm2 +vpmuludq 688(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm5 +vpmuludq 544(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm9 +vpmuludq 592(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm10 +vpmuludq 656(%rsp),%xmm14,%xmm4 +paddq %xmm4,%xmm1 +movdqa 544(%rsp),%xmm4 +pmuludq 688(%rsp),%xmm4 +paddq %xmm4,%xmm7 +vpmuludq 544(%rsp),%xmm14,%xmm4 +paddq %xmm4,%xmm13 +vpmuludq 592(%rsp),%xmm14,%xmm4 +paddq %xmm4,%xmm0 +vpmuludq 640(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm11 +vpmuludq 624(%rsp),%xmm15,%xmm4 +paddq %xmm4,%xmm12 +movdqa 592(%rsp),%xmm4 +paddq %xmm4,%xmm4 +pmuludq 688(%rsp),%xmm4 +paddq %xmm4,%xmm9 +vpmuludq 608(%rsp),%xmm2,%xmm4 +paddq %xmm4,%xmm1 +movdqa 544(%rsp),%xmm4 +pmuludq 608(%rsp),%xmm4 +paddq %xmm4,%xmm3 +movdqa 544(%rsp),%xmm4 +pmuludq 656(%rsp),%xmm4 +paddq %xmm4,%xmm5 +movdqa 592(%rsp),%xmm4 +pmuludq 656(%rsp),%xmm4 +paddq %xmm4,%xmm7 +movdqa 640(%rsp),%xmm4 +pmuludq 688(%rsp),%xmm4 +paddq %xmm4,%xmm10 +vpmuludq 512(%rsp),%xmm2,%xmm2 +paddq %xmm2,%xmm0 +movdqa 560(%rsp),%xmm2 +pmuludq 512(%rsp),%xmm2 +paddq %xmm2,%xmm1 +movdqa 608(%rsp),%xmm2 +pmuludq 592(%rsp),%xmm2 +paddq %xmm2,%xmm5 +movdqa 656(%rsp),%xmm2 +pmuludq 576(%rsp),%xmm2 +paddq %xmm2,%xmm9 +movdqa 688(%rsp),%xmm2 +pmuludq 624(%rsp),%xmm2 +paddq %xmm2,%xmm11 +vpsrlq $26,%xmm1,%xmm2 +paddq %xmm2,%xmm3 +pand m26(%rip),%xmm1 +vpsrlq $25,%xmm10,%xmm2 +paddq %xmm2,%xmm11 +pand m25(%rip),%xmm10 +vpsrlq $25,%xmm3,%xmm2 +paddq %xmm2,%xmm5 +pand m25(%rip),%xmm3 +vpsrlq $26,%xmm11,%xmm2 +paddq %xmm2,%xmm12 +pand m26(%rip),%xmm11 +vpsrlq $26,%xmm5,%xmm2 +paddq %xmm2,%xmm7 +pand m26(%rip),%xmm5 +vpsrlq $25,%xmm12,%xmm2 +paddq %xmm2,%xmm13 +pand m25(%rip),%xmm12 +vpsrlq $25,%xmm7,%xmm2 +paddq %xmm2,%xmm9 +pand m25(%rip),%xmm7 +vpsrlq $26,%xmm13,%xmm2 +paddq %xmm2,%xmm0 +pand m26(%rip),%xmm13 +vpsrlq $26,%xmm9,%xmm2 +paddq %xmm2,%xmm10 +pand m26(%rip),%xmm9 +vpsrlq $25,%xmm0,%xmm2 +vpsllq $4,%xmm2,%xmm4 +paddq %xmm2,%xmm1 +psllq $1,%xmm2 +paddq %xmm2,%xmm4 +paddq %xmm4,%xmm1 +pand m25(%rip),%xmm0 +vpsrlq $25,%xmm10,%xmm2 +paddq %xmm2,%xmm11 +pand m25(%rip),%xmm10 +vpsrlq $26,%xmm1,%xmm2 +paddq %xmm2,%xmm3 +pand m26(%rip),%xmm1 +vpunpckhqdq %xmm3,%xmm1,%xmm2 +vpunpcklqdq %xmm3,%xmm1,%xmm1 +movdqa %xmm1,464(%rsp) +vpaddq subc0(%rip),%xmm2,%xmm3 +psubq %xmm1,%xmm3 +vpunpckhqdq %xmm3,%xmm2,%xmm1 +vpunpcklqdq %xmm3,%xmm2,%xmm2 +movdqa %xmm2,480(%rsp) +movdqa %xmm1,512(%rsp) +psllq $1,%xmm1 +movdqa %xmm1,528(%rsp) +pmuludq v121666_121666(%rip),%xmm3 +movdqa 80(%rsp),%xmm1 +vpunpcklqdq %xmm1,%xmm3,%xmm2 +vpunpckhqdq %xmm1,%xmm3,%xmm1 +vpunpckhqdq %xmm7,%xmm5,%xmm3 +vpunpcklqdq %xmm7,%xmm5,%xmm4 +movdqa %xmm4,544(%rsp) +vpaddq subc2(%rip),%xmm3,%xmm5 +psubq %xmm4,%xmm5 +vpunpckhqdq %xmm5,%xmm3,%xmm4 +vpunpcklqdq %xmm5,%xmm3,%xmm3 +movdqa %xmm3,560(%rsp) +movdqa %xmm4,576(%rsp) +psllq $1,%xmm4 +movdqa %xmm4,592(%rsp) +pmuludq v121666_121666(%rip),%xmm5 +movdqa 96(%rsp),%xmm3 +vpunpcklqdq %xmm3,%xmm5,%xmm4 +vpunpckhqdq %xmm3,%xmm5,%xmm3 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm6 +movdqa %xmm6,608(%rsp) +vpaddq subc2(%rip),%xmm5,%xmm7 +psubq %xmm6,%xmm7 +vpunpckhqdq %xmm7,%xmm5,%xmm6 +vpunpcklqdq %xmm7,%xmm5,%xmm5 +movdqa %xmm5,624(%rsp) +movdqa %xmm6,640(%rsp) +psllq $1,%xmm6 +movdqa %xmm6,656(%rsp) +pmuludq v121666_121666(%rip),%xmm7 +movdqa 112(%rsp),%xmm5 +vpunpcklqdq %xmm5,%xmm7,%xmm6 +vpunpckhqdq %xmm5,%xmm7,%xmm5 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm8 +movdqa %xmm8,672(%rsp) +vpaddq subc2(%rip),%xmm7,%xmm9 +psubq %xmm8,%xmm9 +vpunpckhqdq %xmm9,%xmm7,%xmm8 +vpunpcklqdq %xmm9,%xmm7,%xmm7 +movdqa %xmm7,688(%rsp) +movdqa %xmm8,704(%rsp) +psllq $1,%xmm8 +movdqa %xmm8,720(%rsp) +pmuludq v121666_121666(%rip),%xmm9 +movdqa 448(%rsp),%xmm7 +vpunpcklqdq %xmm7,%xmm9,%xmm8 +vpunpckhqdq %xmm7,%xmm9,%xmm7 +vpunpckhqdq %xmm0,%xmm13,%xmm9 +vpunpcklqdq %xmm0,%xmm13,%xmm0 +movdqa %xmm0,448(%rsp) +vpaddq subc2(%rip),%xmm9,%xmm10 +psubq %xmm0,%xmm10 +vpunpckhqdq %xmm10,%xmm9,%xmm0 +vpunpcklqdq %xmm10,%xmm9,%xmm9 +movdqa %xmm9,736(%rsp) +movdqa %xmm0,752(%rsp) +psllq $1,%xmm0 +movdqa %xmm0,768(%rsp) +pmuludq v121666_121666(%rip),%xmm10 +movdqa 496(%rsp),%xmm0 +vpunpcklqdq %xmm0,%xmm10,%xmm9 +vpunpckhqdq %xmm0,%xmm10,%xmm0 +vpsrlq $26,%xmm2,%xmm10 +paddq %xmm10,%xmm1 +pand m26(%rip),%xmm2 +vpsrlq $25,%xmm5,%xmm10 +paddq %xmm10,%xmm8 +pand m25(%rip),%xmm5 +vpsrlq $25,%xmm1,%xmm10 +paddq %xmm10,%xmm4 +pand m25(%rip),%xmm1 +vpsrlq $26,%xmm8,%xmm10 +paddq %xmm10,%xmm7 +pand m26(%rip),%xmm8 +vpsrlq $26,%xmm4,%xmm10 +paddq %xmm10,%xmm3 +pand m26(%rip),%xmm4 +vpsrlq $25,%xmm7,%xmm10 +paddq %xmm10,%xmm9 +pand m25(%rip),%xmm7 +vpsrlq $25,%xmm3,%xmm10 +paddq %xmm10,%xmm6 +pand m25(%rip),%xmm3 +vpsrlq $26,%xmm9,%xmm10 +paddq %xmm10,%xmm0 +pand m26(%rip),%xmm9 +vpsrlq $26,%xmm6,%xmm10 +paddq %xmm10,%xmm5 +pand m26(%rip),%xmm6 +vpsrlq $25,%xmm0,%xmm10 +vpsllq $4,%xmm10,%xmm11 +paddq %xmm10,%xmm2 +psllq $1,%xmm10 +paddq %xmm10,%xmm11 +paddq %xmm11,%xmm2 +pand m25(%rip),%xmm0 +vpsrlq $25,%xmm5,%xmm10 +paddq %xmm10,%xmm8 +pand m25(%rip),%xmm5 +vpsrlq $26,%xmm2,%xmm10 +paddq %xmm10,%xmm1 +pand m26(%rip),%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm10 +movdqa %xmm10,80(%rsp) +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpunpckhqdq %xmm3,%xmm4,%xmm2 +movdqa %xmm2,96(%rsp) +vpunpcklqdq %xmm3,%xmm4,%xmm2 +vpunpckhqdq %xmm5,%xmm6,%xmm3 +movdqa %xmm3,112(%rsp) +vpunpcklqdq %xmm5,%xmm6,%xmm3 +vpunpckhqdq %xmm7,%xmm8,%xmm4 +movdqa %xmm4,128(%rsp) +vpunpcklqdq %xmm7,%xmm8,%xmm4 +vpunpckhqdq %xmm0,%xmm9,%xmm5 +movdqa %xmm5,144(%rsp) +vpunpcklqdq %xmm0,%xmm9,%xmm0 +movdqa 464(%rsp),%xmm5 +paddq %xmm5,%xmm1 +vpunpcklqdq %xmm1,%xmm5,%xmm6 +vpunpckhqdq %xmm1,%xmm5,%xmm1 +vpmuludq 512(%rsp),%xmm6,%xmm5 +vpmuludq 480(%rsp),%xmm1,%xmm7 +paddq %xmm7,%xmm5 +vpmuludq 560(%rsp),%xmm6,%xmm7 +vpmuludq 528(%rsp),%xmm1,%xmm8 +paddq %xmm8,%xmm7 +vpmuludq 576(%rsp),%xmm6,%xmm8 +vpmuludq 560(%rsp),%xmm1,%xmm9 +paddq %xmm9,%xmm8 +vpmuludq 624(%rsp),%xmm6,%xmm9 +vpmuludq 592(%rsp),%xmm1,%xmm10 +paddq %xmm10,%xmm9 +vpmuludq 640(%rsp),%xmm6,%xmm10 +vpmuludq 624(%rsp),%xmm1,%xmm11 +paddq %xmm11,%xmm10 +vpmuludq 688(%rsp),%xmm6,%xmm11 +vpmuludq 656(%rsp),%xmm1,%xmm12 +paddq %xmm12,%xmm11 +vpmuludq 704(%rsp),%xmm6,%xmm12 +vpmuludq 688(%rsp),%xmm1,%xmm13 +paddq %xmm13,%xmm12 +vpmuludq 736(%rsp),%xmm6,%xmm13 +vpmuludq 720(%rsp),%xmm1,%xmm14 +paddq %xmm14,%xmm13 +vpmuludq 752(%rsp),%xmm6,%xmm14 +vpmuludq 736(%rsp),%xmm1,%xmm15 +paddq %xmm15,%xmm14 +vpmuludq 480(%rsp),%xmm6,%xmm6 +pmuludq v19_19(%rip),%xmm1 +vpmuludq 768(%rsp),%xmm1,%xmm1 +paddq %xmm1,%xmm6 +movdqa 544(%rsp),%xmm1 +paddq %xmm1,%xmm2 +vpunpcklqdq %xmm2,%xmm1,%xmm15 +vpunpckhqdq %xmm2,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm7 +vpmuludq 512(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm8 +vpmuludq 560(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm9 +vpmuludq 576(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm10 +vpmuludq 624(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm11 +vpmuludq 640(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 688(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm13 +vpmuludq 704(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm14 +pmuludq v19_19(%rip),%xmm15 +vpmuludq 736(%rsp),%xmm15,%xmm2 +paddq %xmm2,%xmm6 +pmuludq 752(%rsp),%xmm15 +paddq %xmm15,%xmm5 +vpmuludq 480(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm8 +vpmuludq 528(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm9 +vpmuludq 560(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm10 +vpmuludq 592(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm11 +vpmuludq 624(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 656(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm13 +vpmuludq 688(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm14 +pmuludq v19_19(%rip),%xmm1 +vpmuludq 720(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm6 +vpmuludq 736(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm5 +pmuludq 768(%rsp),%xmm1 +paddq %xmm1,%xmm7 +movdqa 608(%rsp),%xmm1 +paddq %xmm1,%xmm3 +vpunpcklqdq %xmm3,%xmm1,%xmm2 +vpunpckhqdq %xmm3,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm9 +vpmuludq 512(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm10 +vpmuludq 560(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 576(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm12 +vpmuludq 624(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 640(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm14 +pmuludq v19_19(%rip),%xmm2 +vpmuludq 688(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 704(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 736(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm7 +pmuludq 752(%rsp),%xmm2 +paddq %xmm2,%xmm8 +vpmuludq 480(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm10 +vpmuludq 528(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm11 +vpmuludq 560(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 592(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm13 +vpmuludq 624(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm14 +pmuludq v19_19(%rip),%xmm1 +vpmuludq 656(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm6 +vpmuludq 688(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm5 +vpmuludq 720(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm7 +vpmuludq 736(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm8 +pmuludq 768(%rsp),%xmm1 +paddq %xmm1,%xmm9 +movdqa 672(%rsp),%xmm1 +paddq %xmm1,%xmm4 +vpunpcklqdq %xmm4,%xmm1,%xmm2 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm11 +vpmuludq 512(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm12 +vpmuludq 560(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm13 +vpmuludq 576(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm14 +pmuludq v19_19(%rip),%xmm2 +vpmuludq 624(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm6 +vpmuludq 640(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm5 +vpmuludq 688(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm7 +vpmuludq 704(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm8 +vpmuludq 736(%rsp),%xmm2,%xmm3 +paddq %xmm3,%xmm9 +pmuludq 752(%rsp),%xmm2 +paddq %xmm2,%xmm10 +vpmuludq 480(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 528(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm13 +vpmuludq 560(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm14 +pmuludq v19_19(%rip),%xmm1 +vpmuludq 592(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm6 +vpmuludq 624(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm5 +vpmuludq 656(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm7 +vpmuludq 688(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm8 +vpmuludq 720(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm9 +vpmuludq 736(%rsp),%xmm1,%xmm2 +paddq %xmm2,%xmm10 +pmuludq 768(%rsp),%xmm1 +paddq %xmm1,%xmm11 +movdqa 448(%rsp),%xmm1 +paddq %xmm1,%xmm0 +vpunpcklqdq %xmm0,%xmm1,%xmm2 +vpunpckhqdq %xmm0,%xmm1,%xmm0 +vpmuludq 480(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm13 +vpmuludq 512(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm14 +pmuludq v19_19(%rip),%xmm2 +vpmuludq 560(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm6 +vpmuludq 576(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm5 +vpmuludq 624(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm7 +vpmuludq 640(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm8 +vpmuludq 688(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm9 +vpmuludq 704(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm10 +vpmuludq 736(%rsp),%xmm2,%xmm1 +paddq %xmm1,%xmm11 +pmuludq 752(%rsp),%xmm2 +paddq %xmm2,%xmm12 +vpmuludq 480(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm14 +pmuludq v19_19(%rip),%xmm0 +vpmuludq 528(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm6 +vpmuludq 560(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm5 +vpmuludq 592(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm7 +vpmuludq 624(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm8 +vpmuludq 656(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm9 +vpmuludq 688(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm10 +vpmuludq 720(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm11 +vpmuludq 736(%rsp),%xmm0,%xmm1 +paddq %xmm1,%xmm12 +pmuludq 768(%rsp),%xmm0 +paddq %xmm0,%xmm13 +vpsrlq $26,%xmm6,%xmm0 +paddq %xmm0,%xmm5 +pand m26(%rip),%xmm6 +vpsrlq $25,%xmm10,%xmm0 +paddq %xmm0,%xmm11 +pand m25(%rip),%xmm10 +vpsrlq $25,%xmm5,%xmm0 +paddq %xmm0,%xmm7 +pand m25(%rip),%xmm5 +vpsrlq $26,%xmm11,%xmm0 +paddq %xmm0,%xmm12 +pand m26(%rip),%xmm11 +vpsrlq $26,%xmm7,%xmm0 +paddq %xmm0,%xmm8 +pand m26(%rip),%xmm7 +vpsrlq $25,%xmm12,%xmm0 +paddq %xmm0,%xmm13 +pand m25(%rip),%xmm12 +vpsrlq $25,%xmm8,%xmm0 +paddq %xmm0,%xmm9 +pand m25(%rip),%xmm8 +vpsrlq $26,%xmm13,%xmm0 +paddq %xmm0,%xmm14 +pand m26(%rip),%xmm13 +vpsrlq $26,%xmm9,%xmm0 +paddq %xmm0,%xmm10 +pand m26(%rip),%xmm9 +vpsrlq $25,%xmm14,%xmm0 +vpsllq $4,%xmm0,%xmm1 +paddq %xmm0,%xmm6 +psllq $1,%xmm0 +paddq %xmm0,%xmm1 +paddq %xmm1,%xmm6 +pand m25(%rip),%xmm14 +vpsrlq $25,%xmm10,%xmm0 +paddq %xmm0,%xmm11 +pand m25(%rip),%xmm10 +vpsrlq $26,%xmm6,%xmm0 +paddq %xmm0,%xmm5 +pand m26(%rip),%xmm6 +vpunpckhqdq %xmm5,%xmm6,%xmm1 +vpunpcklqdq %xmm5,%xmm6,%xmm0 +vpunpckhqdq %xmm8,%xmm7,%xmm3 +vpunpcklqdq %xmm8,%xmm7,%xmm2 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm4 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm6 +vpunpckhqdq %xmm14,%xmm13,%xmm9 +vpunpcklqdq %xmm14,%xmm13,%xmm8 +cmp $0,%rdx +jne ._loop_ladder +movdqu %xmm1,160(%rdi) +movdqu %xmm0,80(%rdi) +movdqu %xmm3,176(%rdi) +movdqu %xmm2,96(%rdi) +movdqu %xmm5,192(%rdi) +movdqu %xmm4,112(%rdi) +movdqu %xmm7,208(%rdi) +movdqu %xmm6,128(%rdi) +movdqu %xmm9,224(%rdi) +movdqu %xmm8,144(%rdi) +movq 1824(%rsp),%r11 +movq 1832(%rsp),%r12 +movq 1840(%rsp),%r13 +movq 1848(%rsp),%r14 +add %r11,%rsp +ret +FN_END ladder_avx + +.p2align 5 +GLOBAL_HIDDEN_FN_EXT fe51_pack_avx,2,0 +fe51_pack_avx_local: +mov %rsp,%r11 +and $31,%r11 +add $32,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq 0(%rsi),%rdx +movq 8(%rsi),%rcx +movq 16(%rsi),%r8 +movq 24(%rsi),%r9 +movq 32(%rsi),%rsi +movq REDMASK51(%rip),%rax +lea -18(%rax),%r10 +mov $3,%r11 +._reduceloop: +mov %rdx,%r12 +shr $51,%r12 +and %rax,%rdx +add %r12,%rcx +mov %rcx,%r12 +shr $51,%r12 +and %rax,%rcx +add %r12,%r8 +mov %r8,%r12 +shr $51,%r12 +and %rax,%r8 +add %r12,%r9 +mov %r9,%r12 +shr $51,%r12 +and %rax,%r9 +add %r12,%rsi +mov %rsi,%r12 +shr $51,%r12 +and %rax,%rsi +imulq $19, %r12,%r12 +add %r12,%rdx +sub $1,%r11 +ja ._reduceloop +mov $1,%r12 +cmp %r10,%rdx +cmovl %r11,%r12 +cmp %rax,%rcx +cmovne %r11,%r12 +cmp %rax,%r8 +cmovne %r11,%r12 +cmp %rax,%r9 +cmovne %r11,%r12 +cmp %rax,%rsi +cmovne %r11,%r12 +neg %r12 +and %r12,%rax +and %r12,%r10 +sub %r10,%rdx +sub %rax,%rcx +sub %rax,%r8 +sub %rax,%r9 +sub %rax,%rsi +mov %rdx,%rax +and $0xFF,%eax +movb %al,0(%rdi) +mov %rdx,%rax +shr $8,%rax +and $0xFF,%eax +movb %al,1(%rdi) +mov %rdx,%rax +shr $16,%rax +and $0xFF,%eax +movb %al,2(%rdi) +mov %rdx,%rax +shr $24,%rax +and $0xFF,%eax +movb %al,3(%rdi) +mov %rdx,%rax +shr $32,%rax +and $0xFF,%eax +movb %al,4(%rdi) +mov %rdx,%rax +shr $40,%rax +and $0xFF,%eax +movb %al,5(%rdi) +mov %rdx,%rdx +shr $48,%rdx +mov %rcx,%rax +shl $3,%rax +and $0xF8,%eax +xor %rdx,%rax +movb %al,6(%rdi) +mov %rcx,%rdx +shr $5,%rdx +and $0xFF,%edx +movb %dl,7(%rdi) +mov %rcx,%rdx +shr $13,%rdx +and $0xFF,%edx +movb %dl,8(%rdi) +mov %rcx,%rdx +shr $21,%rdx +and $0xFF,%edx +movb %dl,9(%rdi) +mov %rcx,%rdx +shr $29,%rdx +and $0xFF,%edx +movb %dl,10(%rdi) +mov %rcx,%rdx +shr $37,%rdx +and $0xFF,%edx +movb %dl,11(%rdi) +mov %rcx,%rdx +shr $45,%rdx +mov %r8,%rcx +shl $6,%rcx +and $0xC0,%ecx +xor %rdx,%rcx +movb %cl,12(%rdi) +mov %r8,%rdx +shr $2,%rdx +and $0xFF,%edx +movb %dl,13(%rdi) +mov %r8,%rdx +shr $10,%rdx +and $0xFF,%edx +movb %dl,14(%rdi) +mov %r8,%rdx +shr $18,%rdx +and $0xFF,%edx +movb %dl,15(%rdi) +mov %r8,%rdx +shr $26,%rdx +and $0xFF,%edx +movb %dl,16(%rdi) +mov %r8,%rdx +shr $34,%rdx +and $0xFF,%edx +movb %dl,17(%rdi) +mov %r8,%rdx +shr $42,%rdx +movb %dl,18(%rdi) +mov %r8,%rdx +shr $50,%rdx +mov %r9,%rcx +shl $1,%rcx +and $0xFE,%ecx +xor %rdx,%rcx +movb %cl,19(%rdi) +mov %r9,%rdx +shr $7,%rdx +and $0xFF,%edx +movb %dl,20(%rdi) +mov %r9,%rdx +shr $15,%rdx +and $0xFF,%edx +movb %dl,21(%rdi) +mov %r9,%rdx +shr $23,%rdx +and $0xFF,%edx +movb %dl,22(%rdi) +mov %r9,%rdx +shr $31,%rdx +and $0xFF,%edx +movb %dl,23(%rdi) +mov %r9,%rdx +shr $39,%rdx +and $0xFF,%edx +movb %dl,24(%rdi) +mov %r9,%rdx +shr $47,%rdx +mov %rsi,%rcx +shl $4,%rcx +and $0xF0,%ecx +xor %rdx,%rcx +movb %cl,25(%rdi) +mov %rsi,%rdx +shr $4,%rdx +and $0xFF,%edx +movb %dl,26(%rdi) +mov %rsi,%rdx +shr $12,%rdx +and $0xFF,%edx +movb %dl,27(%rdi) +mov %rsi,%rdx +shr $20,%rdx +and $0xFF,%edx +movb %dl,28(%rdi) +mov %rsi,%rdx +shr $28,%rdx +and $0xFF,%edx +movb %dl,29(%rdi) +mov %rsi,%rdx +shr $36,%rdx +and $0xFF,%edx +movb %dl,30(%rdi) +mov %rsi,%rsi +shr $44,%rsi +movb %sil,31(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +add %r11,%rsp +ret +FN_END fe51_pack_avx + +.p2align 5 +GLOBAL_HIDDEN_FN_EXT fe51_mul_avx,3,0 +fe51_mul_avx_local: +mov %rsp,%r11 +and $31,%r11 +add $96,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq %r13,16(%rsp) +movq %r14,24(%rsp) +movq %r15,32(%rsp) +movq %rbx,40(%rsp) +movq %rbp,48(%rsp) +movq %rdi,56(%rsp) +mov %rdx,%rcx +movq 24(%rsi),%rdx +imulq $19,%rdx,%rax +movq %rax,64(%rsp) +mulq 16(%rcx) +mov %rax,%r8 +mov %rdx,%r9 +movq 32(%rsi),%rdx +imulq $19,%rdx,%rax +movq %rax,72(%rsp) +mulq 8(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 0(%rsi),%rax +mulq 0(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 0(%rsi),%rax +mulq 8(%rcx) +mov %rax,%r10 +mov %rdx,%r11 +movq 0(%rsi),%rax +mulq 16(%rcx) +mov %rax,%r12 +mov %rdx,%r13 +movq 0(%rsi),%rax +mulq 24(%rcx) +mov %rax,%r14 +mov %rdx,%r15 +movq 0(%rsi),%rax +mulq 32(%rcx) +mov %rax,%rbx +mov %rdx,%rbp +movq 8(%rsi),%rax +mulq 0(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 8(%rsi),%rax +mulq 8(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 8(%rsi),%rax +mulq 16(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 8(%rsi),%rax +mulq 24(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 8(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 32(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 16(%rsi),%rax +mulq 0(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 16(%rsi),%rax +mulq 8(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 16(%rsi),%rax +mulq 16(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 16(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 24(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 16(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 32(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 24(%rsi),%rax +mulq 0(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 24(%rsi),%rax +mulq 8(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 64(%rsp),%rax +mulq 24(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 64(%rsp),%rax +mulq 32(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 32(%rsi),%rax +mulq 0(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 72(%rsp),%rax +mulq 16(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 72(%rsp),%rax +mulq 24(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 72(%rsp),%rax +mulq 32(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq REDMASK51(%rip),%rsi +shld $13,%r8,%r9 +and %rsi,%r8 +shld $13,%r10,%r11 +and %rsi,%r10 +add %r9,%r10 +shld $13,%r12,%r13 +and %rsi,%r12 +add %r11,%r12 +shld $13,%r14,%r15 +and %rsi,%r14 +add %r13,%r14 +shld $13,%rbx,%rbp +and %rsi,%rbx +add %r15,%rbx +imulq $19,%rbp,%rdx +add %rdx,%r8 +mov %r8,%rdx +shr $51,%rdx +add %r10,%rdx +mov %rdx,%rcx +shr $51,%rdx +and %rsi,%r8 +add %r12,%rdx +mov %rdx,%r9 +shr $51,%rdx +and %rsi,%rcx +add %r14,%rdx +mov %rdx,%rax +shr $51,%rdx +and %rsi,%r9 +add %rbx,%rdx +mov %rdx,%r10 +shr $51,%rdx +and %rsi,%rax +imulq $19,%rdx,%rdx +add %rdx,%r8 +and %rsi,%r10 +movq %r8,0(%rdi) +movq %rcx,8(%rdi) +movq %r9,16(%rdi) +movq %rax,24(%rdi) +movq %r10,32(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +movq 16(%rsp),%r13 +movq 24(%rsp),%r14 +movq 32(%rsp),%r15 +movq 40(%rsp),%rbx +movq 48(%rsp),%rbp +add %r11,%rsp +mov %rdi,%rax +mov %rsi,%rdx +ret +FN_END fe51_mul_avx + +.p2align 5 +GLOBAL_HIDDEN_FN_EXT fe51_nsquare_avx,4,0 +fe51_nsquare_avx_local: +mov %rsp,%r11 +and $31,%r11 +add $64,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq %r13,16(%rsp) +movq %r14,24(%rsp) +movq %r15,32(%rsp) +movq %rbx,40(%rsp) +movq %rbp,48(%rsp) +movq 0(%rsi),%rcx +movq 8(%rsi),%r8 +movq 16(%rsi),%r9 +movq 24(%rsi),%rax +movq 32(%rsi),%rsi +movq %r9,16(%rdi) +movq %rax,24(%rdi) +movq %rsi,32(%rdi) +mov %rdx,%rsi +._loop_nsquare: +sub $1,%rsi +mov %rcx,%rax +mul %rcx +add %rcx,%rcx +mov %rax,%r9 +mov %rdx,%r10 +mov %rcx,%rax +mul %r8 +mov %rax,%r11 +mov %rdx,%r12 +mov %rcx,%rax +mulq 16(%rdi) +mov %rax,%r13 +mov %rdx,%r14 +mov %rcx,%rax +mulq 24(%rdi) +mov %rax,%r15 +mov %rdx,%rbx +mov %rcx,%rax +mulq 32(%rdi) +mov %rax,%rcx +mov %rdx,%rbp +mov %r8,%rax +mul %r8 +add %r8,%r8 +add %rax,%r13 +adc %rdx,%r14 +mov %r8,%rax +mulq 16(%rdi) +add %rax,%r15 +adc %rdx,%rbx +mov %r8,%rax +imulq $19, %r8,%r8 +mulq 24(%rdi) +add %rax,%rcx +adc %rdx,%rbp +mov %r8,%rax +mulq 32(%rdi) +add %rax,%r9 +adc %rdx,%r10 +movq 16(%rdi),%rax +mulq 16(%rdi) +add %rax,%rcx +adc %rdx,%rbp +shld $13,%rcx,%rbp +movq 16(%rdi),%rax +imulq $38, %rax,%rax +mulq 24(%rdi) +add %rax,%r9 +adc %rdx,%r10 +shld $13,%r9,%r10 +movq 16(%rdi),%rax +imulq $38, %rax,%rax +mulq 32(%rdi) +add %rax,%r11 +adc %rdx,%r12 +movq 24(%rdi),%rax +imulq $19, %rax,%rax +mulq 24(%rdi) +add %rax,%r11 +adc %rdx,%r12 +shld $13,%r11,%r12 +movq 24(%rdi),%rax +imulq $38, %rax,%rax +mulq 32(%rdi) +add %rax,%r13 +adc %rdx,%r14 +shld $13,%r13,%r14 +movq 32(%rdi),%rax +imulq $19, %rax,%rax +mulq 32(%rdi) +add %rax,%r15 +adc %rdx,%rbx +shld $13,%r15,%rbx +movq REDMASK51(%rip),%rdx +and %rdx,%rcx +add %rbx,%rcx +and %rdx,%r9 +and %rdx,%r11 +add %r10,%r11 +and %rdx,%r13 +add %r12,%r13 +and %rdx,%r15 +add %r14,%r15 +imulq $19, %rbp,%rbp +lea (%r9,%rbp),%r9 +mov %r9,%rax +shr $51,%r9 +add %r11,%r9 +and %rdx,%rax +mov %r9,%r8 +shr $51,%r9 +add %r13,%r9 +and %rdx,%r8 +mov %r9,%r10 +shr $51,%r9 +add %r15,%r9 +and %rdx,%r10 +movq %r10,16(%rdi) +mov %r9,%r10 +shr $51,%r9 +add %rcx,%r9 +and %rdx,%r10 +movq %r10,24(%rdi) +mov %r9,%r10 +shr $51,%r9 +imulq $19, %r9,%r9 +lea (%rax,%r9),%rcx +and %rdx,%r10 +movq %r10,32(%rdi) +cmp $0,%rsi +jne ._loop_nsquare +movq %rcx,0(%rdi) +movq %r8,8(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +movq 16(%rsp),%r13 +movq 24(%rsp),%r14 +movq 32(%rsp),%r15 +movq 40(%rsp),%rbx +movq 48(%rsp),%rbp +add %r11,%rsp +ret +FN_END fe51_nsquare_avx \ No newline at end of file diff --git a/src/libcryptobox/curve25519/avx.c b/src/libcryptobox/curve25519/avx.c new file mode 100644 index 000000000..615b48111 --- /dev/null +++ b/src/libcryptobox/curve25519/avx.c @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2015, Tung Chou + * Copyright (c) 2015, Vsevolod Stakhov + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY AUTHOR ''AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL AUTHOR BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "cryptobox.h" +#include "curve25519.h" +#include "platform_config.h" + +typedef struct { + guint64 v[5]; +} fe51; +typedef guint64 fe[10]; + +extern void ladder_avx (fe *var, const guchar *p); +extern void fe51_mul_avx (fe51 *a, const fe51 *b, const fe51 *c); +extern void fe51_pack_avx (guchar *out, const fe51 *var); +extern void fe51_nsquare_avx (fe51 *a, const fe51 *b, gint n); + + +static guint64 load_3 (const unsigned char *in) +{ + guint64 result; + result = (guint64) in[0]; + result |= ((guint64) in[1]) << 8; + result |= ((guint64) in[2]) << 16; + return result; +} + +static guint64 load_4 (const unsigned char *in) +{ + guint64 result; + result = (guint64) in[0]; + result |= ((guint64) in[1]) << 8; + result |= ((guint64) in[2]) << 16; + result |= ((guint64) in[3]) << 24; + return result; +} + +void fe_frombytes (fe h, const unsigned char *s) +{ + guint64 h0 = load_4 (s); + guint64 h1 = load_3 (s + 4) << 6; + guint64 h2 = load_3 (s + 7) << 5; + guint64 h3 = load_3 (s + 10) << 3; + guint64 h4 = load_3 (s + 13) << 2; + guint64 h5 = load_4 (s + 16); + guint64 h6 = load_3 (s + 20) << 7; + guint64 h7 = load_3 (s + 23) << 5; + guint64 h8 = load_3 (s + 26) << 4; + guint64 h9 = load_3 (s + 29) << 2; + guint64 carry0; + guint64 carry1; + guint64 carry2; + guint64 carry3; + guint64 carry4; + guint64 carry5; + guint64 carry6; + guint64 carry7; + guint64 carry8; + guint64 carry9; + + carry9 = h9 >> 25; + h0 += carry9 * 19; + h9 &= 0x1FFFFFF; + carry1 = h1 >> 25; + h2 += carry1; + h1 &= 0x1FFFFFF; + carry3 = h3 >> 25; + h4 += carry3; + h3 &= 0x1FFFFFF; + carry5 = h5 >> 25; + h6 += carry5; + h5 &= 0x1FFFFFF; + carry7 = h7 >> 25; + h8 += carry7; + h7 &= 0x1FFFFFF; + + carry0 = h0 >> 26; + h1 += carry0; + h0 &= 0x3FFFFFF; + carry2 = h2 >> 26; + h3 += carry2; + h2 &= 0x3FFFFFF; + carry4 = h4 >> 26; + h5 += carry4; + h4 &= 0x3FFFFFF; + carry6 = h6 >> 26; + h7 += carry6; + h6 &= 0x3FFFFFF; + carry8 = h8 >> 26; + h9 += carry8; + h8 &= 0x3FFFFFF; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +#define fe51_square(x, y) fe51_nsquare_avx(x, y, 1) + +void fe51_invert (fe51 *r, const fe51 *x) +{ + fe51 z2; + fe51 z9; + fe51 z11; + fe51 z2_5_0; + fe51 z2_10_0; + fe51 z2_20_0; + fe51 z2_50_0; + fe51 z2_100_0; + fe51 t; + + /* 2 */ fe51_square (&z2, x); + /* 4 */ fe51_square (&t, &z2); + /* 8 */ fe51_square (&t, &t); + /* 9 */ fe51_mul_avx (&z9, &t, x); + /* 11 */ fe51_mul_avx (&z11, &z9, &z2); + /* 22 */ fe51_square (&t, &z11); + /* 2^5 - 2^0 = 31 */ fe51_mul_avx (&z2_5_0, &t, &z9); + + /* 2^10 - 2^5 */ fe51_nsquare_avx (&t, &z2_5_0, 5); + /* 2^10 - 2^0 */ fe51_mul_avx (&z2_10_0, &t, &z2_5_0); + + /* 2^20 - 2^10 */ fe51_nsquare_avx (&t, &z2_10_0, 10); + /* 2^20 - 2^0 */ fe51_mul_avx (&z2_20_0, &t, &z2_10_0); + + /* 2^40 - 2^20 */ fe51_nsquare_avx (&t, &z2_20_0, 20); + /* 2^40 - 2^0 */ fe51_mul_avx (&t, &t, &z2_20_0); + + /* 2^50 - 2^10 */ fe51_nsquare_avx (&t, &t, 10); + /* 2^50 - 2^0 */ fe51_mul_avx (&z2_50_0, &t, &z2_10_0); + + /* 2^100 - 2^50 */ fe51_nsquare_avx (&t, &z2_50_0, 50); + /* 2^100 - 2^0 */ fe51_mul_avx (&z2_100_0, &t, &z2_50_0); + + /* 2^200 - 2^100 */ fe51_nsquare_avx (&t, &z2_100_0, 100); + /* 2^200 - 2^0 */ fe51_mul_avx (&t, &t, &z2_100_0); + + /* 2^250 - 2^50 */ fe51_nsquare_avx (&t, &t, 50); + /* 2^250 - 2^0 */ fe51_mul_avx (&t, &t, &z2_50_0); + + /* 2^255 - 2^5 */ fe51_nsquare_avx (&t, &t, 5); + /* 2^255 - 21 */ fe51_mul_avx (r, &t, &z11); +} + +#define x1 var[0] +#define x2 var[1] +#define z2 var[2] + +void +scalarmult_avx (unsigned char *q, + const unsigned char *n, + const unsigned char *p) +{ + fe var[3]; + fe51 x_51; + fe51 z_51; + + fe_frombytes (x1, p); + + ladder_avx (var, n); + + z_51.v[0] = (z2[1] << 26) + z2[0]; + z_51.v[1] = (z2[3] << 26) + z2[2]; + z_51.v[2] = (z2[5] << 26) + z2[4]; + z_51.v[3] = (z2[7] << 26) + z2[6]; + z_51.v[4] = (z2[9] << 26) + z2[8]; + + x_51.v[0] = (x2[1] << 26) + x2[0]; + x_51.v[1] = (x2[3] << 26) + x2[2]; + x_51.v[2] = (x2[5] << 26) + x2[4]; + x_51.v[3] = (x2[7] << 26) + x2[6]; + x_51.v[4] = (x2[9] << 26) + x2[8]; + + fe51_invert (&z_51, &z_51); + fe51_mul_avx (&x_51, &x_51, &z_51); + fe51_pack_avx (q, &x_51); +} diff --git a/src/libcryptobox/curve25519/constants.S b/src/libcryptobox/curve25519/constants.S new file mode 100644 index 000000000..054837d01 --- /dev/null +++ b/src/libcryptobox/curve25519/constants.S @@ -0,0 +1,32 @@ +SECTION_RODATA + +.globl v0_0 +.globl v1_0 +.globl v2_1 +.globl v19_19 +.globl v38_1 +.globl v38_38 +.globl v121666_121666 +.globl m25 +.globl m26 +.globl subc0 +.globl subc2 + +.globl REDMASK51 + +.p2align 4 + +v0_0: .quad 0, 0 +v1_0: .quad 1, 0 +v2_1: .quad 2, 1 +v19_19: .quad 19, 19 +v38_1: .quad 38, 1 +v38_38: .quad 38, 38 +v121666_121666: .quad 121666, 121666 +m25: .quad 33554431, 33554431 +m26: .quad 67108863, 67108863 + +subc0: .quad 0x07FFFFDA, 0x03FFFFFE +subc2: .quad 0x07FFFFFE, 0x03FFFFFE + +REDMASK51: .quad 0x0007FFFFFFFFFFFF -- 2.39.5