-rw-r--r-- 26811 lib25519-20240321/crypto_multiscalar/ed25519/amd64-maa4-p3/ge25519_add.S raw
#include "crypto_asm_hidden.h"
#define mask63 CRYPTO_SHARED_NAMESPACE(mask63)
#define EC2D0 CRYPTO_SHARED_NAMESPACE(EC2D0)
#define EC2D1 CRYPTO_SHARED_NAMESPACE(EC2D1)
#define EC2D2 CRYPTO_SHARED_NAMESPACE(EC2D2)
#define EC2D3 CRYPTO_SHARED_NAMESPACE(EC2D3)
// ge25519_add
.p2align 5
ASM_HIDDEN _CRYPTO_NAMESPACE(ge25519_add)
.globl _CRYPTO_NAMESPACE(ge25519_add)
ASM_HIDDEN CRYPTO_NAMESPACE(ge25519_add)
.globl CRYPTO_NAMESPACE(ge25519_add)
_CRYPTO_NAMESPACE(ge25519_add):
CRYPTO_NAMESPACE(ge25519_add):
movq %rsp,%r11
andq $-32,%rsp
subq $216,%rsp
movq %r11,0(%rsp)
movq %r12,8(%rsp)
movq %r13,16(%rsp)
movq %r14,24(%rsp)
movq %r15,32(%rsp)
movq %rbx,40(%rsp)
movq %rbp,48(%rsp)
movq %rdx,%rcx
/* ge25519_add_p1p1 */
// load
movq 32(%rsi),%rdx
movq 40(%rsi),%r8
movq 48(%rsi),%r9
movq 56(%rsi),%rax
// copy
movq %rdx,%r10
movq %r8,%r11
movq %r9,%r12
movq %rax,%r13
// sub
subq 0(%rsi),%rdx
sbbq 8(%rsi),%r8
sbbq 16(%rsi),%r9
sbbq 24(%rsi),%rax
movq $0,%r14
movq $38,%r15
cmovae %r14,%r15
subq %r15,%rdx
sbbq %r14,%r8
sbbq %r14,%r9
sbbq %r14,%rax
cmovc %r15,%r14
subq %r14,%rdx
// add
addq 0(%rsi),%r10
adcq 8(%rsi),%r11
adcq 16(%rsi),%r12
adcq 24(%rsi),%r13
movq $0,%r14
movq $38,%r15
cmovae %r14,%r15
addq %r15,%r10
adcq %r14,%r11
adcq %r14,%r12
adcq %r14,%r13
cmovc %r15,%r14
addq %r14,%r10
// store
movq %rdx,56(%rsp)
movq %r8,64(%rsp)
movq %r9,72(%rsp)
movq %rax,80(%rsp)
// store
movq %r10,88(%rsp)
movq %r11,96(%rsp)
movq %r12,104(%rsp)
movq %r13,112(%rsp)
// load
movq 32(%rcx),%rdx
movq 40(%rcx),%r8
movq 48(%rcx),%r9
movq 56(%rcx),%rax
// copy
movq %rdx,%r10
movq %r8,%r11
movq %r9,%r12
movq %rax,%r13
// sub
subq 0(%rcx),%rdx
sbbq 8(%rcx),%r8
sbbq 16(%rcx),%r9
sbbq 24(%rcx),%rax
movq $0,%r14
movq $38,%r15
cmovae %r14,%r15
subq %r15,%rdx
sbbq %r14,%r8
sbbq %r14,%r9
sbbq %r14,%rax
cmovc %r15,%r14
subq %r14,%rdx
// add
addq 0(%rcx),%r10
adcq 8(%rcx),%r11
adcq 16(%rcx),%r12
adcq 24(%rcx),%r13
movq $0,%r14
movq $38,%r15
cmovae %r14,%r15
addq %r15,%r10
adcq %r14,%r11
adcq %r14,%r12
adcq %r14,%r13
cmovc %r15,%r14
addq %r14,%r10
// store
movq %rdx,120(%rsp)
movq %r8,128(%rsp)
movq %r9,136(%rsp)
movq %rax,144(%rsp)
// store
movq %r10,152(%rsp)
movq %r11,160(%rsp)
movq %r12,168(%rsp)
movq %r13,176(%rsp)
// mul
movq 64(%rsp),%rax
mulq 144(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 72(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 80(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 72(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 80(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 80(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 56(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 64(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 72(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 80(%rsp),%rax
mulq 120(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 56(%rsp),%rax
mulq 120(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 56(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 64(%rsp),%rax
mulq 120(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 56(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 64(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 72(%rsp),%rax
mulq 120(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// store
movq %r8,56(%rsp)
movq %r10,64(%rsp)
movq %r12,72(%rsp)
movq %r14,80(%rsp)
// mul
movq 96(%rsp),%rax
mulq 176(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 104(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 112(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 104(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 112(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 112(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 88(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 96(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 104(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 112(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 88(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 88(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 96(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 88(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 96(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 104(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// copy
movq %r8,%r9
movq %r10,%r11
movq %r12,%r13
movq %r14,%r15
// add
addq 56(%rsp),%r9
adcq 64(%rsp),%r11
adcq 72(%rsp),%r13
adcq 80(%rsp),%r15
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
addq %rax,%r9
adcq %rdx,%r11
adcq %rdx,%r13
adcq %rdx,%r15
cmovc %rax,%rdx
addq %rdx,%r9
// sub
subq 56(%rsp),%r8
sbbq 64(%rsp),%r10
sbbq 72(%rsp),%r12
sbbq 80(%rsp),%r14
movq $0,%rdx
mov $38,%rax
cmovae %rdx,%rax
subq %rax,%r8
sbbq %rdx,%r10
sbbq %rdx,%r12
sbbq %rdx,%r14
cmovc %rax,%rdx
subq %rdx,%r8
// store
movq %r8,64(%rsp)
movq %r10,72(%rsp)
movq %r12,80(%rsp)
movq %r14,88(%rsp)
// store
movq %r9,128(%rsp)
movq %r11,136(%rsp)
movq %r13,144(%rsp)
movq %r15,152(%rsp)
// mul
movq 104(%rsi),%rax
mulq 120(%rcx)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 112(%rsi),%rax
mulq 112(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 120(%rsi),%rax
mulq 104(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 112(%rsi),%rax
mulq 120(%rcx)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 120(%rsi),%rax
mulq 112(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 120(%rsi),%rax
mulq 120(%rcx)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 96(%rsi),%rax
mulq 120(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 104(%rsi),%rax
mulq 112(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 112(%rsi),%rax
mulq 104(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 120(%rsi),%rax
mulq 96(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 96(%rsi),%rax
mulq 96(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 96(%rsi),%rax
mulq 104(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 104(%rsi),%rax
mulq 96(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 96(%rsi),%rax
mulq 112(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 104(%rsi),%rax
mulq 104(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 112(%rsi),%rax
mulq 96(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// store
movq %r8,184(%rsp)
movq %r10,192(%rsp)
movq %r12,200(%rsp)
movq %r14,208(%rsp)
// mul
movq EC2D1(%rip),%rax
mulq 208(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq EC2D2(%rip),%rax
mulq 200(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq EC2D3(%rip),%rax
mulq 192(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq EC2D2(%rip),%rax
mulq 208(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq EC2D3(%rip),%rax
mulq 200(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq EC2D3(%rip),%rax
mulq 208(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq EC2D0(%rip),%rax
mulq 208(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq EC2D1(%rip),%rax
mulq 200(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq EC2D2(%rip),%rax
mulq 192(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq EC2D3(%rip),%rax
mulq 184(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq EC2D0(%rip),%rax
mulq 184(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq EC2D0(%rip),%rax
mulq 192(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq EC2D1(%rip),%rax
mulq 184(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq EC2D0(%rip),%rax
mulq 200(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq EC2D1(%rip),%rax
mulq 192(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq EC2D2(%rip),%rax
mulq 184(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// store
movq %r8,184(%rsp)
movq %r10,192(%rsp)
movq %r12,200(%rsp)
movq %r14,208(%rsp)
// mul
movq 72(%rsi),%rax
mulq 88(%rcx)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 80(%rsi),%rax
mulq 80(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 88(%rsi),%rax
mulq 72(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 80(%rsi),%rax
mulq 88(%rcx)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 88(%rsi),%rax
mulq 80(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 88(%rsi),%rax
mulq 88(%rcx)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 64(%rsi),%rax
mulq 88(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 72(%rsi),%rax
mulq 80(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 80(%rsi),%rax
mulq 72(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 88(%rsi),%rax
mulq 64(%rcx)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 64(%rsi),%rax
mulq 64(%rcx)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 64(%rsi),%rax
mulq 72(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 72(%rsi),%rax
mulq 64(%rcx)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 64(%rsi),%rax
mulq 80(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 72(%rsi),%rax
mulq 72(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 80(%rsi),%rax
mulq 64(%rcx)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// double
addq %r8,%r8
adcq %r10,%r10
adcq %r12,%r12
adcq %r14,%r14
movq $0,%rdx
movq $38,%rcx
cmovae %rdx,%rcx
addq %rcx,%r8
adcq %rdx,%r10
adcq %rdx,%r12
adcq %rdx,%r14
cmovc %rcx,%rdx
addq %rdx,%r8
// copy
movq %r8,%r9
movq %r10,%r11
movq %r12,%r13
movq %r14,%r15
// add
addq 184(%rsp),%r9
adcq 192(%rsp),%r11
adcq 200(%rsp),%r13
adcq 208(%rsp),%r15
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
addq %rax,%r9
adcq %rdx,%r11
adcq %rdx,%r13
adcq %rdx,%r15
cmovc %rax,%rdx
addq %rdx,%r9
// sub
subq 184(%rsp),%r8
sbbq 192(%rsp),%r10
sbbq 200(%rsp),%r12
sbbq 208(%rsp),%r14
movq $0,%rdx
mov $38,%rax
cmovae %rdx,%rax
subq %rax,%r8
sbbq %rdx,%r10
sbbq %rdx,%r12
sbbq %rdx,%r14
cmovc %rax,%rdx
subq %rdx,%r8
// store
movq %r9,96(%rsp)
movq %r11,104(%rsp)
movq %r13,112(%rsp)
movq %r15,120(%rsp)
// store
movq %r8,160(%rsp)
movq %r10,168(%rsp)
movq %r12,176(%rsp)
movq %r14,184(%rsp)
/* p1p1 to p3 */
// mul
movq 72(%rsp),%rax
mulq 184(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 80(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 88(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 80(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 88(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 88(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 64(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 72(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 80(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 88(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 64(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 64(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 72(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 64(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 72(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 80(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,0(%rdi)
movq %r10,8(%rdi)
movq %r12,16(%rdi)
movq %r14,24(%rdi)
// mul
movq 104(%rsp),%rax
mulq 152(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 112(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 120(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 112(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 120(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 120(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 96(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 104(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 112(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 120(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 96(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 96(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 104(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 96(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 104(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 112(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,32(%rdi)
movq %r10,40(%rdi)
movq %r12,48(%rdi)
movq %r14,56(%rdi)
// mul
movq 104(%rsp),%rax
mulq 184(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 112(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 120(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 112(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 120(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 120(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 96(%rsp),%rax
mulq 184(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 104(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 112(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 120(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 96(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 96(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 104(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 96(%rsp),%rax
mulq 176(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 104(%rsp),%rax
mulq 168(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 112(%rsp),%rax
mulq 160(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,64(%rdi)
movq %r10,72(%rdi)
movq %r12,80(%rdi)
movq %r14,88(%rdi)
// mul
movq 72(%rsp),%rax
mulq 152(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 80(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 88(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 80(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 88(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 88(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 64(%rsp),%rax
mulq 152(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 72(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 80(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 88(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 64(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 64(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 72(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 64(%rsp),%rax
mulq 144(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 72(%rsp),%rax
mulq 136(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 80(%rsp),%rax
mulq 128(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,96(%rdi)
movq %r10,104(%rdi)
movq %r12,112(%rdi)
movq %r14,120(%rdi)
movq 0(%rsp),%r11
movq 8(%rsp),%r12
movq 16(%rsp),%r13
movq 24(%rsp),%r14
movq 32(%rsp),%r15
movq 40(%rsp),%rbx
movq 48(%rsp),%rbp
movq %r11,%rsp
ret