aboutsummaryrefslogtreecommitdiffstats
path: root/src/libcryptobox
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@highsecure.ru>2016-01-22 14:05:11 +0000
committerVsevolod Stakhov <vsevolod@highsecure.ru>2016-01-22 14:05:11 +0000
commit8ebb57a8352974d1236582ceabab44806fa8e93e (patch)
treee959143c77828d5ff638fed673534a63a681e70d /src/libcryptobox
parente0e7601c62ed7f06b4cc47288a29e77f87a49d59 (diff)
downloadrspamd-8ebb57a8352974d1236582ceabab44806fa8e93e.tar.gz
rspamd-8ebb57a8352974d1236582ceabab44806fa8e93e.zip
Faster curve25519 key generation via Edwards isomorphism
Diffstat (limited to 'src/libcryptobox')
-rw-r--r--src/libcryptobox/cryptobox.c2
-rw-r--r--src/libcryptobox/curve25519/avx.S1278
-rw-r--r--src/libcryptobox/curve25519/avx.c41
-rw-r--r--src/libcryptobox/curve25519/constants.S4
-rw-r--r--src/libcryptobox/curve25519/curve25519-donna-c64.c8
-rw-r--r--src/libcryptobox/curve25519/curve25519-donna.c8
-rw-r--r--src/libcryptobox/curve25519/curve25519.c14
-rw-r--r--src/libcryptobox/curve25519/curve25519.h2
-rw-r--r--src/libcryptobox/curve25519/ref.c7
9 files changed, 1360 insertions, 4 deletions
diff --git a/src/libcryptobox/cryptobox.c b/src/libcryptobox/cryptobox.c
index 038c94adb..37b8704df 100644
--- a/src/libcryptobox/cryptobox.c
+++ b/src/libcryptobox/cryptobox.c
@@ -255,7 +255,7 @@ rspamd_cryptobox_keypair (rspamd_pk_t pk, rspamd_sk_t sk)
sk[31] &= 127;
sk[31] |= 64;
- curve25519 (pk, sk, curve25519_basepoint);
+ curve25519_base (pk, sk);
}
else {
#ifndef HAVE_USABLE_OPENSSL
diff --git a/src/libcryptobox/curve25519/avx.S b/src/libcryptobox/curve25519/avx.S
index 9a49f1c8a..c671cd3e8 100644
--- a/src/libcryptobox/curve25519/avx.S
+++ b/src/libcryptobox/curve25519/avx.S
@@ -1422,6 +1422,1282 @@ ret
FN_END ladder_avx
.p2align 5
+GLOBAL_HIDDEN_FN_EXT ladder_base_avx,2,0
+ladder_base_avx_local:
+mov %rsp,%r11
+and $31,%r11
+add $1568,%r11
+sub %r11,%rsp
+movq %r11,1536(%rsp)
+movq %r12,1544(%rsp)
+movq %r13,1552(%rsp)
+movdqa v0_0(%rip),%xmm0
+movdqa v1_0(%rip),%xmm1
+movdqa v9_0(%rip),%xmm2
+movdqa %xmm2,0(%rsp)
+movdqa %xmm0,16(%rsp)
+movdqa %xmm0,32(%rsp)
+movdqa %xmm0,48(%rsp)
+movdqa %xmm0,64(%rsp)
+movdqa %xmm1,80(%rsp)
+movdqa %xmm0,96(%rsp)
+movdqa %xmm0,112(%rsp)
+movdqa %xmm0,128(%rsp)
+movdqa %xmm0,144(%rsp)
+movdqa %xmm1,%xmm0
+pxor %xmm1,%xmm1
+pxor %xmm2,%xmm2
+pxor %xmm3,%xmm3
+pxor %xmm4,%xmm4
+pxor %xmm5,%xmm5
+pxor %xmm6,%xmm6
+pxor %xmm7,%xmm7
+pxor %xmm8,%xmm8
+pxor %xmm9,%xmm9
+movq 0(%rsi),%rdx
+movq 8(%rsi),%rcx
+movq 16(%rsi),%r8
+movq 24(%rsi),%r9
+shrd $1,%rcx,%rdx
+shrd $1,%r8,%rcx
+shrd $1,%r9,%r8
+shr $1,%r9
+xorq 0(%rsi),%rdx
+xorq 8(%rsi),%rcx
+xorq 16(%rsi),%r8
+xorq 24(%rsi),%r9
+leaq 512(%rsp),%rsi
+mov $64,%rax
+._ladder_base_small_loop:
+mov %rdx,%r10
+mov %rcx,%r11
+mov %r8,%r12
+mov %r9,%r13
+shr $1,%rdx
+shr $1,%rcx
+shr $1,%r8
+shr $1,%r9
+and $1,%r10d
+and $1,%r11d
+and $1,%r12d
+and $1,%r13d
+neg %r10
+neg %r11
+neg %r12
+neg %r13
+movl %r10d,0(%rsi)
+movl %r11d,256(%rsi)
+movl %r12d,512(%rsi)
+movl %r13d,768(%rsi)
+add $4,%rsi
+sub $1,%rax
+jne ._ladder_base_small_loop
+mov $255,%rdx
+add $760,%rsi
+._ladder_base_loop:
+sub $1,%rdx
+vbroadcastss 0(%rsi),%xmm10
+sub $4,%rsi
+movdqa 0(%rsp),%xmm11
+movdqa 80(%rsp),%xmm12
+vpxor %xmm11,%xmm0,%xmm13
+pand %xmm10,%xmm13
+pxor %xmm13,%xmm0
+pxor %xmm13,%xmm11
+vpxor %xmm12,%xmm1,%xmm13
+pand %xmm10,%xmm13
+pxor %xmm13,%xmm1
+pxor %xmm13,%xmm12
+movdqa 16(%rsp),%xmm13
+movdqa 96(%rsp),%xmm14
+vpxor %xmm13,%xmm2,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm2
+pxor %xmm15,%xmm13
+vpxor %xmm14,%xmm3,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm3
+pxor %xmm15,%xmm14
+movdqa %xmm13,0(%rsp)
+movdqa %xmm14,16(%rsp)
+movdqa 32(%rsp),%xmm13
+movdqa 112(%rsp),%xmm14
+vpxor %xmm13,%xmm4,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm4
+pxor %xmm15,%xmm13
+vpxor %xmm14,%xmm5,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm5
+pxor %xmm15,%xmm14
+movdqa %xmm13,32(%rsp)
+movdqa %xmm14,80(%rsp)
+movdqa 48(%rsp),%xmm13
+movdqa 128(%rsp),%xmm14
+vpxor %xmm13,%xmm6,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm6
+pxor %xmm15,%xmm13
+vpxor %xmm14,%xmm7,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm7
+pxor %xmm15,%xmm14
+movdqa %xmm13,48(%rsp)
+movdqa %xmm14,96(%rsp)
+movdqa 64(%rsp),%xmm13
+movdqa 144(%rsp),%xmm14
+vpxor %xmm13,%xmm8,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm8
+pxor %xmm15,%xmm13
+vpxor %xmm14,%xmm9,%xmm15
+pand %xmm10,%xmm15
+pxor %xmm15,%xmm9
+pxor %xmm15,%xmm14
+movdqa %xmm13,64(%rsp)
+movdqa %xmm14,112(%rsp)
+vpaddq subc0(%rip),%xmm11,%xmm10
+psubq %xmm12,%xmm10
+paddq %xmm12,%xmm11
+vpunpckhqdq %xmm10,%xmm11,%xmm12
+vpunpcklqdq %xmm10,%xmm11,%xmm10
+vpaddq %xmm1,%xmm0,%xmm11
+paddq subc0(%rip),%xmm0
+psubq %xmm1,%xmm0
+vpunpckhqdq %xmm11,%xmm0,%xmm1
+vpunpcklqdq %xmm11,%xmm0,%xmm0
+vpmuludq %xmm0,%xmm10,%xmm11
+vpmuludq %xmm1,%xmm10,%xmm13
+movdqa %xmm1,128(%rsp)
+paddq %xmm1,%xmm1
+vpmuludq %xmm0,%xmm12,%xmm14
+movdqa %xmm0,144(%rsp)
+paddq %xmm14,%xmm13
+vpmuludq %xmm1,%xmm12,%xmm0
+movdqa %xmm1,160(%rsp)
+vpaddq %xmm3,%xmm2,%xmm1
+paddq subc2(%rip),%xmm2
+psubq %xmm3,%xmm2
+vpunpckhqdq %xmm1,%xmm2,%xmm3
+vpunpcklqdq %xmm1,%xmm2,%xmm1
+vpmuludq %xmm1,%xmm10,%xmm2
+paddq %xmm2,%xmm0
+vpmuludq %xmm3,%xmm10,%xmm2
+movdqa %xmm3,176(%rsp)
+paddq %xmm3,%xmm3
+vpmuludq %xmm1,%xmm12,%xmm14
+movdqa %xmm1,192(%rsp)
+paddq %xmm14,%xmm2
+vpmuludq %xmm3,%xmm12,%xmm1
+movdqa %xmm3,208(%rsp)
+vpaddq %xmm5,%xmm4,%xmm3
+paddq subc2(%rip),%xmm4
+psubq %xmm5,%xmm4
+vpunpckhqdq %xmm3,%xmm4,%xmm5
+vpunpcklqdq %xmm3,%xmm4,%xmm3
+vpmuludq %xmm3,%xmm10,%xmm4
+paddq %xmm4,%xmm1
+vpmuludq %xmm5,%xmm10,%xmm4
+movdqa %xmm5,224(%rsp)
+paddq %xmm5,%xmm5
+vpmuludq %xmm3,%xmm12,%xmm14
+movdqa %xmm3,240(%rsp)
+paddq %xmm14,%xmm4
+vpaddq %xmm7,%xmm6,%xmm3
+paddq subc2(%rip),%xmm6
+psubq %xmm7,%xmm6
+vpunpckhqdq %xmm3,%xmm6,%xmm7
+vpunpcklqdq %xmm3,%xmm6,%xmm3
+vpmuludq %xmm3,%xmm10,%xmm6
+vpmuludq %xmm5,%xmm12,%xmm14
+movdqa %xmm5,256(%rsp)
+pmuludq v19_19(%rip),%xmm5
+movdqa %xmm5,272(%rsp)
+paddq %xmm14,%xmm6
+vpmuludq %xmm7,%xmm10,%xmm5
+movdqa %xmm7,288(%rsp)
+paddq %xmm7,%xmm7
+vpmuludq %xmm3,%xmm12,%xmm14
+movdqa %xmm3,304(%rsp)
+paddq %xmm14,%xmm5
+pmuludq v19_19(%rip),%xmm3
+movdqa %xmm3,320(%rsp)
+vpaddq %xmm9,%xmm8,%xmm3
+paddq subc2(%rip),%xmm8
+psubq %xmm9,%xmm8
+vpunpckhqdq %xmm3,%xmm8,%xmm9
+vpunpcklqdq %xmm3,%xmm8,%xmm3
+movdqa %xmm3,336(%rsp)
+vpmuludq %xmm7,%xmm12,%xmm8
+movdqa %xmm7,352(%rsp)
+pmuludq v19_19(%rip),%xmm7
+movdqa %xmm7,368(%rsp)
+vpmuludq %xmm3,%xmm10,%xmm7
+paddq %xmm7,%xmm8
+vpmuludq %xmm9,%xmm10,%xmm7
+movdqa %xmm9,384(%rsp)
+paddq %xmm9,%xmm9
+vpmuludq %xmm3,%xmm12,%xmm10
+paddq %xmm10,%xmm7
+pmuludq v19_19(%rip),%xmm3
+movdqa %xmm3,400(%rsp)
+pmuludq v19_19(%rip),%xmm12
+vpmuludq %xmm9,%xmm12,%xmm3
+movdqa %xmm9,416(%rsp)
+paddq %xmm3,%xmm11
+movdqa 0(%rsp),%xmm3
+movdqa 16(%rsp),%xmm9
+vpaddq subc2(%rip),%xmm3,%xmm10
+psubq %xmm9,%xmm10
+paddq %xmm9,%xmm3
+vpunpckhqdq %xmm10,%xmm3,%xmm9
+vpunpcklqdq %xmm10,%xmm3,%xmm3
+vpmuludq 144(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm0
+vpmuludq 128(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm2
+vpmuludq 192(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm1
+vpmuludq 176(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm4
+vpmuludq 240(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm6
+vpmuludq 224(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm5
+vpmuludq 304(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm8
+vpmuludq 288(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm7
+pmuludq v19_19(%rip),%xmm3
+vpmuludq 336(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm11
+pmuludq 384(%rsp),%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 144(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm2
+vpmuludq 160(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm1
+vpmuludq 192(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm4
+vpmuludq 208(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm6
+vpmuludq 240(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 256(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm8
+vpmuludq 304(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm7
+pmuludq v19_19(%rip),%xmm9
+vpmuludq 352(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 336(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm13
+pmuludq 416(%rsp),%xmm9
+paddq %xmm9,%xmm0
+movdqa 32(%rsp),%xmm3
+movdqa 80(%rsp),%xmm9
+vpaddq subc2(%rip),%xmm3,%xmm10
+psubq %xmm9,%xmm10
+paddq %xmm9,%xmm3
+vpunpckhqdq %xmm10,%xmm3,%xmm9
+vpunpcklqdq %xmm10,%xmm3,%xmm3
+vpmuludq 144(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm1
+vpmuludq 128(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm4
+vpmuludq 192(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm6
+vpmuludq 176(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm5
+vpmuludq 240(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm8
+vpmuludq 224(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm7
+pmuludq v19_19(%rip),%xmm3
+vpmuludq 304(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm11
+vpmuludq 288(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm13
+vpmuludq 336(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm0
+pmuludq 384(%rsp),%xmm3
+paddq %xmm3,%xmm2
+vpmuludq 144(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm4
+vpmuludq 160(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm6
+vpmuludq 192(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 208(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm8
+vpmuludq 240(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm7
+pmuludq v19_19(%rip),%xmm9
+vpmuludq 256(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 304(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 352(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm0
+vpmuludq 336(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm2
+pmuludq 416(%rsp),%xmm9
+paddq %xmm9,%xmm1
+movdqa 48(%rsp),%xmm3
+movdqa 96(%rsp),%xmm9
+vpaddq subc2(%rip),%xmm3,%xmm10
+psubq %xmm9,%xmm10
+paddq %xmm9,%xmm3
+vpunpckhqdq %xmm10,%xmm3,%xmm9
+vpunpcklqdq %xmm10,%xmm3,%xmm3
+vpmuludq 144(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm6
+vpmuludq 128(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm5
+vpmuludq 192(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm8
+vpmuludq 176(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm7
+pmuludq v19_19(%rip),%xmm3
+vpmuludq 240(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm11
+vpmuludq 224(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm13
+vpmuludq 304(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm0
+vpmuludq 288(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm2
+vpmuludq 336(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm1
+pmuludq 384(%rsp),%xmm3
+paddq %xmm3,%xmm4
+vpmuludq 144(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 160(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm8
+vpmuludq 192(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm7
+pmuludq v19_19(%rip),%xmm9
+vpmuludq 208(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 240(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 256(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm0
+vpmuludq 304(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm2
+vpmuludq 352(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm1
+vpmuludq 336(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm4
+pmuludq 416(%rsp),%xmm9
+paddq %xmm9,%xmm6
+movdqa 64(%rsp),%xmm3
+movdqa 112(%rsp),%xmm9
+vpaddq subc2(%rip),%xmm3,%xmm10
+psubq %xmm9,%xmm10
+paddq %xmm9,%xmm3
+vpunpckhqdq %xmm10,%xmm3,%xmm9
+vpunpcklqdq %xmm10,%xmm3,%xmm3
+vpmuludq 144(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm8
+vpmuludq 128(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm7
+pmuludq v19_19(%rip),%xmm3
+vpmuludq 192(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm11
+vpmuludq 176(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm13
+vpmuludq 240(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm0
+vpmuludq 224(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm2
+vpmuludq 304(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm1
+vpmuludq 288(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm4
+vpmuludq 336(%rsp),%xmm3,%xmm10
+paddq %xmm10,%xmm6
+pmuludq 384(%rsp),%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 144(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm7
+pmuludq v19_19(%rip),%xmm9
+vpmuludq 160(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 192(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 208(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm0
+vpmuludq 240(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm2
+vpmuludq 256(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm1
+vpmuludq 304(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm4
+vpmuludq 352(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm6
+vpmuludq 336(%rsp),%xmm9,%xmm3
+paddq %xmm3,%xmm5
+pmuludq 416(%rsp),%xmm9
+paddq %xmm9,%xmm8
+vpsrlq $25,%xmm4,%xmm3
+paddq %xmm3,%xmm6
+pand m25(%rip),%xmm4
+vpsrlq $26,%xmm11,%xmm3
+paddq %xmm3,%xmm13
+pand m26(%rip),%xmm11
+vpsrlq $26,%xmm6,%xmm3
+paddq %xmm3,%xmm5
+pand m26(%rip),%xmm6
+vpsrlq $25,%xmm13,%xmm3
+paddq %xmm3,%xmm0
+pand m25(%rip),%xmm13
+vpsrlq $25,%xmm5,%xmm3
+paddq %xmm3,%xmm8
+pand m25(%rip),%xmm5
+vpsrlq $26,%xmm0,%xmm3
+paddq %xmm3,%xmm2
+pand m26(%rip),%xmm0
+vpsrlq $26,%xmm8,%xmm3
+paddq %xmm3,%xmm7
+pand m26(%rip),%xmm8
+vpsrlq $25,%xmm2,%xmm3
+paddq %xmm3,%xmm1
+pand m25(%rip),%xmm2
+vpsrlq $25,%xmm7,%xmm3
+vpsllq $4,%xmm3,%xmm9
+paddq %xmm3,%xmm11
+psllq $1,%xmm3
+paddq %xmm3,%xmm9
+paddq %xmm9,%xmm11
+pand m25(%rip),%xmm7
+vpsrlq $26,%xmm1,%xmm3
+paddq %xmm3,%xmm4
+pand m26(%rip),%xmm1
+vpsrlq $26,%xmm11,%xmm3
+paddq %xmm3,%xmm13
+pand m26(%rip),%xmm11
+vpsrlq $25,%xmm4,%xmm3
+paddq %xmm3,%xmm6
+pand m25(%rip),%xmm4
+vpunpcklqdq %xmm13,%xmm11,%xmm3
+vpunpckhqdq %xmm13,%xmm11,%xmm9
+vpaddq subc0(%rip),%xmm9,%xmm10
+psubq %xmm3,%xmm10
+paddq %xmm9,%xmm3
+vpunpckhqdq %xmm3,%xmm10,%xmm9
+punpcklqdq %xmm3,%xmm10
+vpmuludq %xmm10,%xmm10,%xmm3
+paddq %xmm10,%xmm10
+vpmuludq %xmm9,%xmm10,%xmm11
+vpunpcklqdq %xmm2,%xmm0,%xmm12
+vpunpckhqdq %xmm2,%xmm0,%xmm0
+vpaddq subc2(%rip),%xmm0,%xmm2
+psubq %xmm12,%xmm2
+paddq %xmm0,%xmm12
+vpunpckhqdq %xmm12,%xmm2,%xmm0
+punpcklqdq %xmm12,%xmm2
+vpmuludq %xmm2,%xmm10,%xmm12
+vpaddq %xmm9,%xmm9,%xmm13
+vpmuludq %xmm13,%xmm9,%xmm9
+paddq %xmm9,%xmm12
+vpmuludq %xmm0,%xmm10,%xmm9
+vpmuludq %xmm2,%xmm13,%xmm14
+paddq %xmm14,%xmm9
+vpunpcklqdq %xmm4,%xmm1,%xmm14
+vpunpckhqdq %xmm4,%xmm1,%xmm1
+vpaddq subc2(%rip),%xmm1,%xmm4
+psubq %xmm14,%xmm4
+paddq %xmm1,%xmm14
+vpunpckhqdq %xmm14,%xmm4,%xmm1
+punpcklqdq %xmm14,%xmm4
+movdqa %xmm1,0(%rsp)
+paddq %xmm1,%xmm1
+movdqa %xmm1,16(%rsp)
+pmuludq v19_19(%rip),%xmm1
+movdqa %xmm1,32(%rsp)
+vpmuludq %xmm4,%xmm10,%xmm1
+vpmuludq %xmm2,%xmm2,%xmm14
+paddq %xmm14,%xmm1
+vpmuludq 0(%rsp),%xmm10,%xmm14
+vpmuludq %xmm4,%xmm13,%xmm15
+paddq %xmm15,%xmm14
+vpunpcklqdq %xmm5,%xmm6,%xmm15
+vpunpckhqdq %xmm5,%xmm6,%xmm5
+vpaddq subc2(%rip),%xmm5,%xmm6
+psubq %xmm15,%xmm6
+paddq %xmm5,%xmm15
+vpunpckhqdq %xmm15,%xmm6,%xmm5
+punpcklqdq %xmm15,%xmm6
+movdqa %xmm6,48(%rsp)
+pmuludq v19_19(%rip),%xmm6
+movdqa %xmm6,64(%rsp)
+movdqa %xmm5,80(%rsp)
+pmuludq v38_38(%rip),%xmm5
+movdqa %xmm5,96(%rsp)
+vpmuludq 48(%rsp),%xmm10,%xmm5
+vpaddq %xmm0,%xmm0,%xmm6
+vpmuludq %xmm6,%xmm0,%xmm0
+paddq %xmm0,%xmm5
+vpmuludq 80(%rsp),%xmm10,%xmm0
+vpmuludq %xmm4,%xmm6,%xmm15
+paddq %xmm15,%xmm0
+vpmuludq %xmm6,%xmm13,%xmm15
+paddq %xmm15,%xmm1
+vpmuludq %xmm6,%xmm2,%xmm15
+paddq %xmm15,%xmm14
+vpunpcklqdq %xmm7,%xmm8,%xmm15
+vpunpckhqdq %xmm7,%xmm8,%xmm7
+vpaddq subc2(%rip),%xmm7,%xmm8
+psubq %xmm15,%xmm8
+paddq %xmm7,%xmm15
+vpunpckhqdq %xmm15,%xmm8,%xmm7
+punpcklqdq %xmm15,%xmm8
+movdqa %xmm8,112(%rsp)
+pmuludq v19_19(%rip),%xmm8
+movdqa %xmm8,160(%rsp)
+vpmuludq 112(%rsp),%xmm10,%xmm8
+vpmuludq %xmm7,%xmm10,%xmm10
+vpmuludq v38_38(%rip),%xmm7,%xmm15
+vpmuludq %xmm15,%xmm7,%xmm7
+paddq %xmm7,%xmm8
+vpmuludq %xmm15,%xmm13,%xmm7
+paddq %xmm7,%xmm3
+vpmuludq %xmm15,%xmm2,%xmm7
+paddq %xmm7,%xmm11
+vpmuludq 80(%rsp),%xmm13,%xmm7
+paddq %xmm7,%xmm7
+paddq %xmm7,%xmm8
+vpmuludq 16(%rsp),%xmm13,%xmm7
+paddq %xmm7,%xmm5
+vpmuludq 48(%rsp),%xmm13,%xmm7
+paddq %xmm7,%xmm0
+vpmuludq 112(%rsp),%xmm13,%xmm7
+paddq %xmm7,%xmm10
+vpmuludq %xmm15,%xmm6,%xmm7
+paddq %xmm7,%xmm12
+vpmuludq %xmm15,%xmm4,%xmm7
+paddq %xmm7,%xmm9
+vpaddq %xmm2,%xmm2,%xmm2
+vpmuludq %xmm4,%xmm2,%xmm7
+paddq %xmm7,%xmm5
+vpmuludq 160(%rsp),%xmm2,%xmm7
+paddq %xmm7,%xmm3
+vpmuludq 160(%rsp),%xmm6,%xmm7
+paddq %xmm7,%xmm11
+vpmuludq 0(%rsp),%xmm2,%xmm7
+paddq %xmm7,%xmm0
+vpmuludq 48(%rsp),%xmm2,%xmm7
+paddq %xmm7,%xmm8
+vpmuludq 80(%rsp),%xmm2,%xmm2
+paddq %xmm2,%xmm10
+vpmuludq 96(%rsp),%xmm4,%xmm2
+paddq %xmm2,%xmm11
+vpmuludq %xmm4,%xmm4,%xmm2
+paddq %xmm2,%xmm8
+vpaddq %xmm4,%xmm4,%xmm2
+vpmuludq 160(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm12
+vpmuludq 16(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm1
+vpmuludq 48(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm14
+vpmuludq 96(%rsp),%xmm6,%xmm4
+paddq %xmm4,%xmm3
+movdqa 16(%rsp),%xmm4
+pmuludq 160(%rsp),%xmm4
+paddq %xmm4,%xmm9
+vpmuludq 16(%rsp),%xmm6,%xmm4
+paddq %xmm4,%xmm8
+vpmuludq 48(%rsp),%xmm6,%xmm4
+paddq %xmm4,%xmm10
+vpmuludq 80(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm4
+paddq %xmm4,%xmm5
+vpmuludq 112(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm0
+movdqa 48(%rsp),%xmm4
+paddq %xmm4,%xmm4
+pmuludq 160(%rsp),%xmm4
+paddq %xmm4,%xmm1
+movdqa 80(%rsp),%xmm4
+paddq %xmm4,%xmm4
+pmuludq 160(%rsp),%xmm4
+paddq %xmm4,%xmm14
+vpmuludq 64(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm3
+movdqa 16(%rsp),%xmm4
+pmuludq 64(%rsp),%xmm4
+paddq %xmm4,%xmm11
+movdqa 16(%rsp),%xmm4
+pmuludq 96(%rsp),%xmm4
+paddq %xmm4,%xmm12
+movdqa 48(%rsp),%xmm4
+pmuludq 96(%rsp),%xmm4
+paddq %xmm4,%xmm9
+vpmuludq 0(%rsp),%xmm2,%xmm2
+paddq %xmm2,%xmm10
+movdqa 32(%rsp),%xmm2
+pmuludq 0(%rsp),%xmm2
+paddq %xmm2,%xmm3
+movdqa 64(%rsp),%xmm2
+pmuludq 48(%rsp),%xmm2
+paddq %xmm2,%xmm12
+movdqa 96(%rsp),%xmm2
+pmuludq 80(%rsp),%xmm2
+paddq %xmm2,%xmm1
+movdqa 160(%rsp),%xmm2
+pmuludq 112(%rsp),%xmm2
+paddq %xmm2,%xmm5
+vpsrlq $26,%xmm3,%xmm2
+paddq %xmm2,%xmm11
+pand m26(%rip),%xmm3
+vpsrlq $25,%xmm14,%xmm2
+paddq %xmm2,%xmm5
+pand m25(%rip),%xmm14
+vpsrlq $25,%xmm11,%xmm2
+paddq %xmm2,%xmm12
+pand m25(%rip),%xmm11
+vpsrlq $26,%xmm5,%xmm2
+paddq %xmm2,%xmm0
+pand m26(%rip),%xmm5
+vpsrlq $26,%xmm12,%xmm2
+paddq %xmm2,%xmm9
+pand m26(%rip),%xmm12
+vpsrlq $25,%xmm0,%xmm2
+paddq %xmm2,%xmm8
+pand m25(%rip),%xmm0
+vpsrlq $25,%xmm9,%xmm2
+paddq %xmm2,%xmm1
+pand m25(%rip),%xmm9
+vpsrlq $26,%xmm8,%xmm2
+paddq %xmm2,%xmm10
+pand m26(%rip),%xmm8
+vpsrlq $26,%xmm1,%xmm2
+paddq %xmm2,%xmm14
+pand m26(%rip),%xmm1
+vpsrlq $25,%xmm10,%xmm2
+vpsllq $4,%xmm2,%xmm4
+paddq %xmm2,%xmm3
+psllq $1,%xmm2
+paddq %xmm2,%xmm4
+paddq %xmm4,%xmm3
+pand m25(%rip),%xmm10
+vpsrlq $25,%xmm14,%xmm2
+paddq %xmm2,%xmm5
+pand m25(%rip),%xmm14
+vpsrlq $26,%xmm3,%xmm2
+paddq %xmm2,%xmm11
+pand m26(%rip),%xmm3
+vpunpckhqdq %xmm11,%xmm3,%xmm2
+movdqa %xmm2,0(%rsp)
+vpunpcklqdq %xmm11,%xmm3,%xmm2
+pmuludq v9_9(%rip),%xmm2
+movdqa %xmm2,80(%rsp)
+vpunpckhqdq %xmm9,%xmm12,%xmm2
+movdqa %xmm2,16(%rsp)
+vpunpcklqdq %xmm9,%xmm12,%xmm2
+pmuludq v9_9(%rip),%xmm2
+movdqa %xmm2,96(%rsp)
+vpunpckhqdq %xmm14,%xmm1,%xmm2
+movdqa %xmm2,32(%rsp)
+vpunpcklqdq %xmm14,%xmm1,%xmm1
+pmuludq v9_9(%rip),%xmm1
+movdqa %xmm1,112(%rsp)
+vpunpckhqdq %xmm0,%xmm5,%xmm1
+movdqa %xmm1,48(%rsp)
+vpunpcklqdq %xmm0,%xmm5,%xmm0
+pmuludq v9_9(%rip),%xmm0
+movdqa %xmm0,160(%rsp)
+vpunpckhqdq %xmm10,%xmm8,%xmm0
+movdqa %xmm0,64(%rsp)
+vpunpcklqdq %xmm10,%xmm8,%xmm0
+pmuludq v9_9(%rip),%xmm0
+movdqa %xmm0,208(%rsp)
+movdqa 144(%rsp),%xmm0
+vpmuludq %xmm0,%xmm0,%xmm1
+paddq %xmm0,%xmm0
+movdqa 128(%rsp),%xmm2
+vpmuludq %xmm2,%xmm0,%xmm3
+movdqa 192(%rsp),%xmm4
+vpmuludq %xmm4,%xmm0,%xmm5
+movdqa 176(%rsp),%xmm6
+vpmuludq %xmm6,%xmm0,%xmm7
+movdqa 240(%rsp),%xmm8
+vpmuludq %xmm8,%xmm0,%xmm9
+vpmuludq 224(%rsp),%xmm0,%xmm10
+vpmuludq 304(%rsp),%xmm0,%xmm11
+vpmuludq 288(%rsp),%xmm0,%xmm12
+vpmuludq 336(%rsp),%xmm0,%xmm13
+movdqa 384(%rsp),%xmm14
+vpmuludq %xmm14,%xmm0,%xmm0
+vpmuludq v38_38(%rip),%xmm14,%xmm15
+vpmuludq %xmm15,%xmm14,%xmm14
+paddq %xmm14,%xmm13
+vpaddq %xmm6,%xmm6,%xmm14
+vpmuludq %xmm14,%xmm6,%xmm6
+paddq %xmm6,%xmm11
+vpaddq %xmm2,%xmm2,%xmm6
+vpmuludq %xmm6,%xmm2,%xmm2
+paddq %xmm2,%xmm5
+vpmuludq %xmm15,%xmm6,%xmm2
+paddq %xmm2,%xmm1
+vpmuludq %xmm15,%xmm4,%xmm2
+paddq %xmm2,%xmm3
+vpmuludq 256(%rsp),%xmm6,%xmm2
+paddq %xmm2,%xmm11
+vpmuludq 304(%rsp),%xmm6,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 352(%rsp),%xmm6,%xmm2
+paddq %xmm2,%xmm13
+vpmuludq 336(%rsp),%xmm6,%xmm2
+paddq %xmm2,%xmm0
+vpmuludq %xmm4,%xmm6,%xmm2
+paddq %xmm2,%xmm7
+vpmuludq %xmm14,%xmm6,%xmm2
+paddq %xmm2,%xmm9
+vpmuludq %xmm8,%xmm6,%xmm2
+paddq %xmm2,%xmm10
+vpmuludq %xmm15,%xmm14,%xmm2
+paddq %xmm2,%xmm5
+vpmuludq %xmm15,%xmm8,%xmm2
+paddq %xmm2,%xmm7
+vpmuludq %xmm4,%xmm4,%xmm2
+paddq %xmm2,%xmm9
+vpmuludq %xmm14,%xmm4,%xmm2
+paddq %xmm2,%xmm10
+vpaddq %xmm4,%xmm4,%xmm2
+vpmuludq %xmm8,%xmm2,%xmm4
+paddq %xmm4,%xmm11
+vpmuludq 400(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm1
+vpmuludq 400(%rsp),%xmm14,%xmm4
+paddq %xmm4,%xmm3
+vpmuludq 224(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm12
+vpmuludq 304(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm13
+vpmuludq 288(%rsp),%xmm2,%xmm2
+paddq %xmm2,%xmm0
+vpmuludq 368(%rsp),%xmm8,%xmm2
+paddq %xmm2,%xmm3
+vpmuludq %xmm8,%xmm14,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq %xmm8,%xmm8,%xmm2
+paddq %xmm2,%xmm13
+vpaddq %xmm8,%xmm8,%xmm2
+vpmuludq 400(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm5
+vpmuludq 256(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm9
+vpmuludq 304(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm10
+vpmuludq 368(%rsp),%xmm14,%xmm4
+paddq %xmm4,%xmm1
+movdqa 256(%rsp),%xmm4
+pmuludq 400(%rsp),%xmm4
+paddq %xmm4,%xmm7
+vpmuludq 256(%rsp),%xmm14,%xmm4
+paddq %xmm4,%xmm13
+vpmuludq 304(%rsp),%xmm14,%xmm4
+paddq %xmm4,%xmm0
+vpmuludq 352(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm11
+vpmuludq 336(%rsp),%xmm15,%xmm4
+paddq %xmm4,%xmm12
+movdqa 304(%rsp),%xmm4
+paddq %xmm4,%xmm4
+pmuludq 400(%rsp),%xmm4
+paddq %xmm4,%xmm9
+vpmuludq 320(%rsp),%xmm2,%xmm4
+paddq %xmm4,%xmm1
+movdqa 256(%rsp),%xmm4
+pmuludq 320(%rsp),%xmm4
+paddq %xmm4,%xmm3
+movdqa 256(%rsp),%xmm4
+pmuludq 368(%rsp),%xmm4
+paddq %xmm4,%xmm5
+movdqa 304(%rsp),%xmm4
+pmuludq 368(%rsp),%xmm4
+paddq %xmm4,%xmm7
+movdqa 352(%rsp),%xmm4
+pmuludq 400(%rsp),%xmm4
+paddq %xmm4,%xmm10
+vpmuludq 224(%rsp),%xmm2,%xmm2
+paddq %xmm2,%xmm0
+movdqa 272(%rsp),%xmm2
+pmuludq 224(%rsp),%xmm2
+paddq %xmm2,%xmm1
+movdqa 320(%rsp),%xmm2
+pmuludq 304(%rsp),%xmm2
+paddq %xmm2,%xmm5
+movdqa 368(%rsp),%xmm2
+pmuludq 288(%rsp),%xmm2
+paddq %xmm2,%xmm9
+movdqa 400(%rsp),%xmm2
+pmuludq 336(%rsp),%xmm2
+paddq %xmm2,%xmm11
+vpsrlq $26,%xmm1,%xmm2
+paddq %xmm2,%xmm3
+pand m26(%rip),%xmm1
+vpsrlq $25,%xmm10,%xmm2
+paddq %xmm2,%xmm11
+pand m25(%rip),%xmm10
+vpsrlq $25,%xmm3,%xmm2
+paddq %xmm2,%xmm5
+pand m25(%rip),%xmm3
+vpsrlq $26,%xmm11,%xmm2
+paddq %xmm2,%xmm12
+pand m26(%rip),%xmm11
+vpsrlq $26,%xmm5,%xmm2
+paddq %xmm2,%xmm7
+pand m26(%rip),%xmm5
+vpsrlq $25,%xmm12,%xmm2
+paddq %xmm2,%xmm13
+pand m25(%rip),%xmm12
+vpsrlq $25,%xmm7,%xmm2
+paddq %xmm2,%xmm9
+pand m25(%rip),%xmm7
+vpsrlq $26,%xmm13,%xmm2
+paddq %xmm2,%xmm0
+pand m26(%rip),%xmm13
+vpsrlq $26,%xmm9,%xmm2
+paddq %xmm2,%xmm10
+pand m26(%rip),%xmm9
+vpsrlq $25,%xmm0,%xmm2
+vpsllq $4,%xmm2,%xmm4
+paddq %xmm2,%xmm1
+psllq $1,%xmm2
+paddq %xmm2,%xmm4
+paddq %xmm4,%xmm1
+pand m25(%rip),%xmm0
+vpsrlq $25,%xmm10,%xmm2
+paddq %xmm2,%xmm11
+pand m25(%rip),%xmm10
+vpsrlq $26,%xmm1,%xmm2
+paddq %xmm2,%xmm3
+pand m26(%rip),%xmm1
+vpunpckhqdq %xmm3,%xmm1,%xmm2
+vpunpcklqdq %xmm3,%xmm1,%xmm1
+movdqa %xmm1,176(%rsp)
+vpaddq subc0(%rip),%xmm2,%xmm3
+psubq %xmm1,%xmm3
+vpunpckhqdq %xmm3,%xmm2,%xmm1
+vpunpcklqdq %xmm3,%xmm2,%xmm2
+movdqa %xmm2,192(%rsp)
+movdqa %xmm1,224(%rsp)
+psllq $1,%xmm1
+movdqa %xmm1,240(%rsp)
+pmuludq v121666_121666(%rip),%xmm3
+movdqa 80(%rsp),%xmm1
+vpunpcklqdq %xmm1,%xmm3,%xmm2
+vpunpckhqdq %xmm1,%xmm3,%xmm1
+vpunpckhqdq %xmm7,%xmm5,%xmm3
+vpunpcklqdq %xmm7,%xmm5,%xmm4
+movdqa %xmm4,256(%rsp)
+vpaddq subc2(%rip),%xmm3,%xmm5
+psubq %xmm4,%xmm5
+vpunpckhqdq %xmm5,%xmm3,%xmm4
+vpunpcklqdq %xmm5,%xmm3,%xmm3
+movdqa %xmm3,272(%rsp)
+movdqa %xmm4,288(%rsp)
+psllq $1,%xmm4
+movdqa %xmm4,304(%rsp)
+pmuludq v121666_121666(%rip),%xmm5
+movdqa 96(%rsp),%xmm3
+vpunpcklqdq %xmm3,%xmm5,%xmm4
+vpunpckhqdq %xmm3,%xmm5,%xmm3
+vpunpckhqdq %xmm10,%xmm9,%xmm5
+vpunpcklqdq %xmm10,%xmm9,%xmm6
+movdqa %xmm6,320(%rsp)
+vpaddq subc2(%rip),%xmm5,%xmm7
+psubq %xmm6,%xmm7
+vpunpckhqdq %xmm7,%xmm5,%xmm6
+vpunpcklqdq %xmm7,%xmm5,%xmm5
+movdqa %xmm5,336(%rsp)
+movdqa %xmm6,352(%rsp)
+psllq $1,%xmm6
+movdqa %xmm6,368(%rsp)
+pmuludq v121666_121666(%rip),%xmm7
+movdqa 112(%rsp),%xmm5
+vpunpcklqdq %xmm5,%xmm7,%xmm6
+vpunpckhqdq %xmm5,%xmm7,%xmm5
+vpunpckhqdq %xmm12,%xmm11,%xmm7
+vpunpcklqdq %xmm12,%xmm11,%xmm8
+movdqa %xmm8,384(%rsp)
+vpaddq subc2(%rip),%xmm7,%xmm9
+psubq %xmm8,%xmm9
+vpunpckhqdq %xmm9,%xmm7,%xmm8
+vpunpcklqdq %xmm9,%xmm7,%xmm7
+movdqa %xmm7,400(%rsp)
+movdqa %xmm8,416(%rsp)
+psllq $1,%xmm8
+movdqa %xmm8,432(%rsp)
+pmuludq v121666_121666(%rip),%xmm9
+movdqa 160(%rsp),%xmm7
+vpunpcklqdq %xmm7,%xmm9,%xmm8
+vpunpckhqdq %xmm7,%xmm9,%xmm7
+vpunpckhqdq %xmm0,%xmm13,%xmm9
+vpunpcklqdq %xmm0,%xmm13,%xmm0
+movdqa %xmm0,160(%rsp)
+vpaddq subc2(%rip),%xmm9,%xmm10
+psubq %xmm0,%xmm10
+vpunpckhqdq %xmm10,%xmm9,%xmm0
+vpunpcklqdq %xmm10,%xmm9,%xmm9
+movdqa %xmm9,448(%rsp)
+movdqa %xmm0,464(%rsp)
+psllq $1,%xmm0
+movdqa %xmm0,480(%rsp)
+pmuludq v121666_121666(%rip),%xmm10
+movdqa 208(%rsp),%xmm0
+vpunpcklqdq %xmm0,%xmm10,%xmm9
+vpunpckhqdq %xmm0,%xmm10,%xmm0
+vpsrlq $26,%xmm2,%xmm10
+paddq %xmm10,%xmm1
+pand m26(%rip),%xmm2
+vpsrlq $25,%xmm5,%xmm10
+paddq %xmm10,%xmm8
+pand m25(%rip),%xmm5
+vpsrlq $25,%xmm1,%xmm10
+paddq %xmm10,%xmm4
+pand m25(%rip),%xmm1
+vpsrlq $26,%xmm8,%xmm10
+paddq %xmm10,%xmm7
+pand m26(%rip),%xmm8
+vpsrlq $26,%xmm4,%xmm10
+paddq %xmm10,%xmm3
+pand m26(%rip),%xmm4
+vpsrlq $25,%xmm7,%xmm10
+paddq %xmm10,%xmm9
+pand m25(%rip),%xmm7
+vpsrlq $25,%xmm3,%xmm10
+paddq %xmm10,%xmm6
+pand m25(%rip),%xmm3
+vpsrlq $26,%xmm9,%xmm10
+paddq %xmm10,%xmm0
+pand m26(%rip),%xmm9
+vpsrlq $26,%xmm6,%xmm10
+paddq %xmm10,%xmm5
+pand m26(%rip),%xmm6
+vpsrlq $25,%xmm0,%xmm10
+vpsllq $4,%xmm10,%xmm11
+paddq %xmm10,%xmm2
+psllq $1,%xmm10
+paddq %xmm10,%xmm11
+paddq %xmm11,%xmm2
+pand m25(%rip),%xmm0
+vpsrlq $25,%xmm5,%xmm10
+paddq %xmm10,%xmm8
+pand m25(%rip),%xmm5
+vpsrlq $26,%xmm2,%xmm10
+paddq %xmm10,%xmm1
+pand m26(%rip),%xmm2
+vpunpckhqdq %xmm1,%xmm2,%xmm10
+movdqa %xmm10,80(%rsp)
+vpunpcklqdq %xmm1,%xmm2,%xmm1
+vpunpckhqdq %xmm3,%xmm4,%xmm2
+movdqa %xmm2,96(%rsp)
+vpunpcklqdq %xmm3,%xmm4,%xmm2
+vpunpckhqdq %xmm5,%xmm6,%xmm3
+movdqa %xmm3,112(%rsp)
+vpunpcklqdq %xmm5,%xmm6,%xmm3
+vpunpckhqdq %xmm7,%xmm8,%xmm4
+movdqa %xmm4,128(%rsp)
+vpunpcklqdq %xmm7,%xmm8,%xmm4
+vpunpckhqdq %xmm0,%xmm9,%xmm5
+movdqa %xmm5,144(%rsp)
+vpunpcklqdq %xmm0,%xmm9,%xmm0
+movdqa 176(%rsp),%xmm5
+paddq %xmm5,%xmm1
+vpunpcklqdq %xmm1,%xmm5,%xmm6
+vpunpckhqdq %xmm1,%xmm5,%xmm1
+vpmuludq 224(%rsp),%xmm6,%xmm5
+vpmuludq 192(%rsp),%xmm1,%xmm7
+paddq %xmm7,%xmm5
+vpmuludq 272(%rsp),%xmm6,%xmm7
+vpmuludq 240(%rsp),%xmm1,%xmm8
+paddq %xmm8,%xmm7
+vpmuludq 288(%rsp),%xmm6,%xmm8
+vpmuludq 272(%rsp),%xmm1,%xmm9
+paddq %xmm9,%xmm8
+vpmuludq 336(%rsp),%xmm6,%xmm9
+vpmuludq 304(%rsp),%xmm1,%xmm10
+paddq %xmm10,%xmm9
+vpmuludq 352(%rsp),%xmm6,%xmm10
+vpmuludq 336(%rsp),%xmm1,%xmm11
+paddq %xmm11,%xmm10
+vpmuludq 400(%rsp),%xmm6,%xmm11
+vpmuludq 368(%rsp),%xmm1,%xmm12
+paddq %xmm12,%xmm11
+vpmuludq 416(%rsp),%xmm6,%xmm12
+vpmuludq 400(%rsp),%xmm1,%xmm13
+paddq %xmm13,%xmm12
+vpmuludq 448(%rsp),%xmm6,%xmm13
+vpmuludq 432(%rsp),%xmm1,%xmm14
+paddq %xmm14,%xmm13
+vpmuludq 464(%rsp),%xmm6,%xmm14
+vpmuludq 448(%rsp),%xmm1,%xmm15
+paddq %xmm15,%xmm14
+vpmuludq 192(%rsp),%xmm6,%xmm6
+pmuludq v19_19(%rip),%xmm1
+vpmuludq 480(%rsp),%xmm1,%xmm1
+paddq %xmm1,%xmm6
+movdqa 256(%rsp),%xmm1
+paddq %xmm1,%xmm2
+vpunpcklqdq %xmm2,%xmm1,%xmm15
+vpunpckhqdq %xmm2,%xmm1,%xmm1
+vpmuludq 192(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm7
+vpmuludq 224(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm8
+vpmuludq 272(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm9
+vpmuludq 288(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm10
+vpmuludq 336(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm11
+vpmuludq 352(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 400(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm13
+vpmuludq 416(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm14
+pmuludq v19_19(%rip),%xmm15
+vpmuludq 448(%rsp),%xmm15,%xmm2
+paddq %xmm2,%xmm6
+pmuludq 464(%rsp),%xmm15
+paddq %xmm15,%xmm5
+vpmuludq 192(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm8
+vpmuludq 240(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm9
+vpmuludq 272(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm10
+vpmuludq 304(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm11
+vpmuludq 336(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 368(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm13
+vpmuludq 400(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm14
+pmuludq v19_19(%rip),%xmm1
+vpmuludq 432(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm6
+vpmuludq 448(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm5
+pmuludq 480(%rsp),%xmm1
+paddq %xmm1,%xmm7
+movdqa 320(%rsp),%xmm1
+paddq %xmm1,%xmm3
+vpunpcklqdq %xmm3,%xmm1,%xmm2
+vpunpckhqdq %xmm3,%xmm1,%xmm1
+vpmuludq 192(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm9
+vpmuludq 224(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm10
+vpmuludq 272(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 288(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm12
+vpmuludq 336(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 352(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm14
+pmuludq v19_19(%rip),%xmm2
+vpmuludq 400(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm6
+vpmuludq 416(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 448(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm7
+pmuludq 464(%rsp),%xmm2
+paddq %xmm2,%xmm8
+vpmuludq 192(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm10
+vpmuludq 240(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm11
+vpmuludq 272(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 304(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm13
+vpmuludq 336(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm14
+pmuludq v19_19(%rip),%xmm1
+vpmuludq 368(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm6
+vpmuludq 400(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm5
+vpmuludq 432(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm7
+vpmuludq 448(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm8
+pmuludq 480(%rsp),%xmm1
+paddq %xmm1,%xmm9
+movdqa 384(%rsp),%xmm1
+paddq %xmm1,%xmm4
+vpunpcklqdq %xmm4,%xmm1,%xmm2
+vpunpckhqdq %xmm4,%xmm1,%xmm1
+vpmuludq 192(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm11
+vpmuludq 224(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm12
+vpmuludq 272(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm13
+vpmuludq 288(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm14
+pmuludq v19_19(%rip),%xmm2
+vpmuludq 336(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm6
+vpmuludq 352(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm5
+vpmuludq 400(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm7
+vpmuludq 416(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm8
+vpmuludq 448(%rsp),%xmm2,%xmm3
+paddq %xmm3,%xmm9
+pmuludq 464(%rsp),%xmm2
+paddq %xmm2,%xmm10
+vpmuludq 192(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 240(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm13
+vpmuludq 272(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm14
+pmuludq v19_19(%rip),%xmm1
+vpmuludq 304(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm6
+vpmuludq 336(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm5
+vpmuludq 368(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm7
+vpmuludq 400(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm8
+vpmuludq 432(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm9
+vpmuludq 448(%rsp),%xmm1,%xmm2
+paddq %xmm2,%xmm10
+pmuludq 480(%rsp),%xmm1
+paddq %xmm1,%xmm11
+movdqa 160(%rsp),%xmm1
+paddq %xmm1,%xmm0
+vpunpcklqdq %xmm0,%xmm1,%xmm2
+vpunpckhqdq %xmm0,%xmm1,%xmm0
+vpmuludq 192(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm13
+vpmuludq 224(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm14
+pmuludq v19_19(%rip),%xmm2
+vpmuludq 272(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm6
+vpmuludq 288(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm5
+vpmuludq 336(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm7
+vpmuludq 352(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm8
+vpmuludq 400(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm9
+vpmuludq 416(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm10
+vpmuludq 448(%rsp),%xmm2,%xmm1
+paddq %xmm1,%xmm11
+pmuludq 464(%rsp),%xmm2
+paddq %xmm2,%xmm12
+vpmuludq 192(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm14
+pmuludq v19_19(%rip),%xmm0
+vpmuludq 240(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm6
+vpmuludq 272(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm5
+vpmuludq 304(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm7
+vpmuludq 336(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm8
+vpmuludq 368(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm9
+vpmuludq 400(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm10
+vpmuludq 432(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm11
+vpmuludq 448(%rsp),%xmm0,%xmm1
+paddq %xmm1,%xmm12
+pmuludq 480(%rsp),%xmm0
+paddq %xmm0,%xmm13
+vpsrlq $26,%xmm6,%xmm0
+paddq %xmm0,%xmm5
+pand m26(%rip),%xmm6
+vpsrlq $25,%xmm10,%xmm0
+paddq %xmm0,%xmm11
+pand m25(%rip),%xmm10
+vpsrlq $25,%xmm5,%xmm0
+paddq %xmm0,%xmm7
+pand m25(%rip),%xmm5
+vpsrlq $26,%xmm11,%xmm0
+paddq %xmm0,%xmm12
+pand m26(%rip),%xmm11
+vpsrlq $26,%xmm7,%xmm0
+paddq %xmm0,%xmm8
+pand m26(%rip),%xmm7
+vpsrlq $25,%xmm12,%xmm0
+paddq %xmm0,%xmm13
+pand m25(%rip),%xmm12
+vpsrlq $25,%xmm8,%xmm0
+paddq %xmm0,%xmm9
+pand m25(%rip),%xmm8
+vpsrlq $26,%xmm13,%xmm0
+paddq %xmm0,%xmm14
+pand m26(%rip),%xmm13
+vpsrlq $26,%xmm9,%xmm0
+paddq %xmm0,%xmm10
+pand m26(%rip),%xmm9
+vpsrlq $25,%xmm14,%xmm0
+vpsllq $4,%xmm0,%xmm1
+paddq %xmm0,%xmm6
+psllq $1,%xmm0
+paddq %xmm0,%xmm1
+paddq %xmm1,%xmm6
+pand m25(%rip),%xmm14
+vpsrlq $25,%xmm10,%xmm0
+paddq %xmm0,%xmm11
+pand m25(%rip),%xmm10
+vpsrlq $26,%xmm6,%xmm0
+paddq %xmm0,%xmm5
+pand m26(%rip),%xmm6
+vpunpckhqdq %xmm5,%xmm6,%xmm1
+vpunpcklqdq %xmm5,%xmm6,%xmm0
+vpunpckhqdq %xmm8,%xmm7,%xmm3
+vpunpcklqdq %xmm8,%xmm7,%xmm2
+vpunpckhqdq %xmm10,%xmm9,%xmm5
+vpunpcklqdq %xmm10,%xmm9,%xmm4
+vpunpckhqdq %xmm12,%xmm11,%xmm7
+vpunpcklqdq %xmm12,%xmm11,%xmm6
+vpunpckhqdq %xmm14,%xmm13,%xmm9
+vpunpcklqdq %xmm14,%xmm13,%xmm8
+cmp $0,%rdx
+jne ._ladder_base_loop
+movdqu %xmm1,80(%rdi)
+movdqu %xmm0,0(%rdi)
+movdqu %xmm3,96(%rdi)
+movdqu %xmm2,16(%rdi)
+movdqu %xmm5,112(%rdi)
+movdqu %xmm4,32(%rdi)
+movdqu %xmm7,128(%rdi)
+movdqu %xmm6,48(%rdi)
+movdqu %xmm9,144(%rdi)
+movdqu %xmm8,64(%rdi)
+movq 1536(%rsp),%r11
+movq 1544(%rsp),%r12
+movq 1552(%rsp),%r13
+add %r11,%rsp
+ret
+FN_END ladder_base_avx
+
+
+.p2align 5
GLOBAL_HIDDEN_FN_EXT fe51_pack_avx,2,0
fe51_pack_avx_local:
mov %rsp,%r11
@@ -1950,4 +3226,4 @@ movq 40(%rsp),%rbx
movq 48(%rsp),%rbp
add %r11,%rsp
ret
-FN_END fe51_nsquare_avx \ No newline at end of file
+FN_END fe51_nsquare_avx
diff --git a/src/libcryptobox/curve25519/avx.c b/src/libcryptobox/curve25519/avx.c
index b06b7223a..6e0a6661a 100644
--- a/src/libcryptobox/curve25519/avx.c
+++ b/src/libcryptobox/curve25519/avx.c
@@ -211,3 +211,44 @@ scalarmult_avx (unsigned char *q,
fe51_mul_avx (&x_51, &x_51, &z_51);
fe51_pack_avx (q, &x_51);
}
+
+#undef x2
+#undef z2
+#define x2 var[0]
+#define z2 var[1]
+
+int
+scalarmult_base_avx (unsigned char *q, const unsigned char *n)
+{
+ unsigned char e[32];
+
+ fe var[3];
+
+ fe51 x_51;
+ fe51 z_51;
+
+ memcpy (e, n, 32);
+ e[0] &= 248;
+ e[31] &= 127;
+ e[31] |= 64;
+
+ ladder_base_avx (var, e);
+
+ z_51.v[0] = (z2[1] << 26) + z2[0];
+ z_51.v[1] = (z2[3] << 26) + z2[2];
+ z_51.v[2] = (z2[5] << 26) + z2[4];
+ z_51.v[3] = (z2[7] << 26) + z2[6];
+ z_51.v[4] = (z2[9] << 26) + z2[8];
+
+ x_51.v[0] = (x2[1] << 26) + x2[0];
+ x_51.v[1] = (x2[3] << 26) + x2[2];
+ x_51.v[2] = (x2[5] << 26) + x2[4];
+ x_51.v[3] = (x2[7] << 26) + x2[6];
+ x_51.v[4] = (x2[9] << 26) + x2[8];
+
+ fe51_invert (&z_51, &z_51);
+ fe51_mul_avx (&x_51, &x_51, &z_51);
+ fe51_pack_avx (q, &x_51);
+
+ return 0;
+}
diff --git a/src/libcryptobox/curve25519/constants.S b/src/libcryptobox/curve25519/constants.S
index 054837d01..1c68955b4 100644
--- a/src/libcryptobox/curve25519/constants.S
+++ b/src/libcryptobox/curve25519/constants.S
@@ -11,6 +11,8 @@ SECTION_RODATA
.globl m26
.globl subc0
.globl subc2
+.globl v9_0
+.globl v9_9
.globl REDMASK51
@@ -20,6 +22,8 @@ v0_0: .quad 0, 0
v1_0: .quad 1, 0
v2_1: .quad 2, 1
v19_19: .quad 19, 19
+v9_0: .quad 9, 0
+v9_9: .quad 9, 9
v38_1: .quad 38, 1
v38_38: .quad 38, 38
v121666_121666: .quad 121666, 121666
diff --git a/src/libcryptobox/curve25519/curve25519-donna-c64.c b/src/libcryptobox/curve25519/curve25519-donna-c64.c
index 2d5b19b18..c83b27ecb 100644
--- a/src/libcryptobox/curve25519/curve25519-donna-c64.c
+++ b/src/libcryptobox/curve25519/curve25519-donna-c64.c
@@ -24,6 +24,7 @@
#include <string.h>
#include <stdint.h>
+#include "curve25519.h"
typedef uint8_t u8;
typedef uint64_t limb;
@@ -495,3 +496,10 @@ int scalarmult_donna (u8 *mypublic, const u8 *secret, const u8 *basepoint)
return 0;
}
+
+int
+scalarmult_base_donna (u8 *mypublic, const u8 *secret)
+{
+ return scalarmult_donna (mypublic, secret,
+ curve25519_basepoint);
+}
diff --git a/src/libcryptobox/curve25519/curve25519-donna.c b/src/libcryptobox/curve25519/curve25519-donna.c
index 0de4b73b0..e54bae7b6 100644
--- a/src/libcryptobox/curve25519/curve25519-donna.c
+++ b/src/libcryptobox/curve25519/curve25519-donna.c
@@ -47,6 +47,7 @@
#include <string.h>
#include <stdint.h>
+#include "curve25519.h"
#ifdef _MSC_VER
#define inline __inline
@@ -909,3 +910,10 @@ int scalarmult_donna (u8 *mypublic, const u8 *secret, const u8 *basepoint)
return 0;
}
+
+int
+scalarmult_base_donna (u8 *mypublic, const u8 *secret)
+{
+ return scalarmult_donna (mypublic, secret,
+ curve25519_basepoint);
+}
diff --git a/src/libcryptobox/curve25519/curve25519.c b/src/libcryptobox/curve25519/curve25519.c
index 34a361d60..220a1da13 100644
--- a/src/libcryptobox/curve25519/curve25519.c
+++ b/src/libcryptobox/curve25519/curve25519.c
@@ -36,13 +36,16 @@ typedef struct curve25519_impl_s {
void (*scalarmult) (guint8 *mypublic,
const guint8 *secret,
const guint8 *basepoint);
+ void (*scalarmult_base) (guint8 *mypublic,
+ const guint8 *secret);
} curve25519_impl_t;
#define CURVE25519_DECLARE(ext) \
- void scalarmult_##ext(guint8 *mypublic, const guint8 *secret, const guint8 *basepoint)
+ void scalarmult_##ext(guint8 *mypublic, const guint8 *secret, const guint8 *basepoint); \
+ void scalarmult_base_##ext(guint8 *mypublic, const guint8 *secret)
#define CURVE25519_IMPL(cpuflags, desc, ext) \
- {(cpuflags), desc, scalarmult_##ext}
+ {(cpuflags), desc, scalarmult_##ext, scalarmult_base_##ext}
#if defined(__LP64__)
#if defined(HAVE_AVX)
@@ -142,3 +145,10 @@ curve25519 (guchar *mypublic,
return 0;
}
+int
+curve25519_base (guchar *mypublic, const guchar *secret)
+{
+ curve25519_opt->scalarmult_base (mypublic, secret);
+
+ return 0;
+}
diff --git a/src/libcryptobox/curve25519/curve25519.h b/src/libcryptobox/curve25519/curve25519.h
index 17fed4e53..8ea440b40 100644
--- a/src/libcryptobox/curve25519/curve25519.h
+++ b/src/libcryptobox/curve25519/curve25519.h
@@ -6,6 +6,8 @@
static const guchar curve25519_basepoint[32] = {9};
int curve25519 (guchar *mypublic, const guchar *secret, const guchar *basepoint);
+/* Call for optimized implementation of scalarmult if needed */
+int curve25519_base (guchar *mypublic, const guchar *secret);
void curve25519_load (void);
#endif
diff --git a/src/libcryptobox/curve25519/ref.c b/src/libcryptobox/curve25519/ref.c
index f6134b0c4..a0b77ed99 100644
--- a/src/libcryptobox/curve25519/ref.c
+++ b/src/libcryptobox/curve25519/ref.c
@@ -7,6 +7,7 @@ Derived from public domain code by D. J. Bernstein.
*/
#include "config.h"
+#include "curve25519.h"
static void add (unsigned int out[32],
const unsigned int a[32],
@@ -332,3 +333,9 @@ int scalarmult_ref (unsigned char *q,
return 0;
}
+int
+scalarmult_base_ref (unsigned char *q,
+ const unsigned char *n)
+{
+ return scalarmult_ref (q, n, curve25519_basepoint);
+}