Source release 14.0.0
This commit is contained in:
1586
third_party/boringssl/kit/linux-x86_64/crypto/chacha/chacha-x86_64.S
vendored
Normal file
1586
third_party/boringssl/kit/linux-x86_64/crypto/chacha/chacha-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3066
third_party/boringssl/kit/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S
vendored
Normal file
3066
third_party/boringssl/kit/linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
8974
third_party/boringssl/kit/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S
vendored
Normal file
8974
third_party/boringssl/kit/linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2640
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aes-x86_64.S
vendored
Normal file
2640
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aes-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
834
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S
vendored
Normal file
834
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S
vendored
Normal file
@@ -0,0 +1,834 @@
|
||||
#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
|
||||
.text
|
||||
|
||||
.type _aesni_ctr32_ghash_6x,@function
|
||||
.align 32
|
||||
_aesni_ctr32_ghash_6x:
|
||||
.cfi_startproc
|
||||
vmovdqu 32(%r11),%xmm2
|
||||
subq $6,%rdx
|
||||
vpxor %xmm4,%xmm4,%xmm4
|
||||
vmovdqu 0-128(%rcx),%xmm15
|
||||
vpaddb %xmm2,%xmm1,%xmm10
|
||||
vpaddb %xmm2,%xmm10,%xmm11
|
||||
vpaddb %xmm2,%xmm11,%xmm12
|
||||
vpaddb %xmm2,%xmm12,%xmm13
|
||||
vpaddb %xmm2,%xmm13,%xmm14
|
||||
vpxor %xmm15,%xmm1,%xmm9
|
||||
vmovdqu %xmm4,16+8(%rsp)
|
||||
jmp .Loop6x
|
||||
|
||||
.align 32
|
||||
.Loop6x:
|
||||
addl $100663296,%ebx
|
||||
jc .Lhandle_ctr32
|
||||
vmovdqu 0-32(%r9),%xmm3
|
||||
vpaddb %xmm2,%xmm14,%xmm1
|
||||
vpxor %xmm15,%xmm10,%xmm10
|
||||
vpxor %xmm15,%xmm11,%xmm11
|
||||
|
||||
.Lresume_ctr32:
|
||||
vmovdqu %xmm1,(%r8)
|
||||
vpclmulqdq $0x10,%xmm3,%xmm7,%xmm5
|
||||
vpxor %xmm15,%xmm12,%xmm12
|
||||
vmovups 16-128(%rcx),%xmm2
|
||||
vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
xorq %r12,%r12
|
||||
cmpq %r14,%r15
|
||||
|
||||
vaesenc %xmm2,%xmm9,%xmm9
|
||||
vmovdqu 48+8(%rsp),%xmm0
|
||||
vpxor %xmm15,%xmm13,%xmm13
|
||||
vpclmulqdq $0x00,%xmm3,%xmm7,%xmm1
|
||||
vaesenc %xmm2,%xmm10,%xmm10
|
||||
vpxor %xmm15,%xmm14,%xmm14
|
||||
setnc %r12b
|
||||
vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7
|
||||
vaesenc %xmm2,%xmm11,%xmm11
|
||||
vmovdqu 16-32(%r9),%xmm3
|
||||
negq %r12
|
||||
vaesenc %xmm2,%xmm12,%xmm12
|
||||
vpxor %xmm5,%xmm6,%xmm6
|
||||
vpclmulqdq $0x00,%xmm3,%xmm0,%xmm5
|
||||
vpxor %xmm4,%xmm8,%xmm8
|
||||
vaesenc %xmm2,%xmm13,%xmm13
|
||||
vpxor %xmm5,%xmm1,%xmm4
|
||||
andq $0x60,%r12
|
||||
vmovups 32-128(%rcx),%xmm15
|
||||
vpclmulqdq $0x10,%xmm3,%xmm0,%xmm1
|
||||
vaesenc %xmm2,%xmm14,%xmm14
|
||||
|
||||
vpclmulqdq $0x01,%xmm3,%xmm0,%xmm2
|
||||
leaq (%r14,%r12,1),%r14
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor 16+8(%rsp),%xmm8,%xmm8
|
||||
vpclmulqdq $0x11,%xmm3,%xmm0,%xmm3
|
||||
vmovdqu 64+8(%rsp),%xmm0
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
movbeq 88(%r14),%r13
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
movbeq 80(%r14),%r12
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
movq %r13,32+8(%rsp)
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
movq %r12,40+8(%rsp)
|
||||
vmovdqu 48-32(%r9),%xmm5
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vmovups 48-128(%rcx),%xmm15
|
||||
vpxor %xmm1,%xmm6,%xmm6
|
||||
vpclmulqdq $0x00,%xmm5,%xmm0,%xmm1
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm2,%xmm6,%xmm6
|
||||
vpclmulqdq $0x10,%xmm5,%xmm0,%xmm2
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vpxor %xmm3,%xmm7,%xmm7
|
||||
vpclmulqdq $0x01,%xmm5,%xmm0,%xmm3
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vpclmulqdq $0x11,%xmm5,%xmm0,%xmm5
|
||||
vmovdqu 80+8(%rsp),%xmm0
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vpxor %xmm1,%xmm4,%xmm4
|
||||
vmovdqu 64-32(%r9),%xmm1
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vmovups 64-128(%rcx),%xmm15
|
||||
vpxor %xmm2,%xmm6,%xmm6
|
||||
vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm3,%xmm6,%xmm6
|
||||
vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
movbeq 72(%r14),%r13
|
||||
vpxor %xmm5,%xmm7,%xmm7
|
||||
vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
movbeq 64(%r14),%r12
|
||||
vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1
|
||||
vmovdqu 96+8(%rsp),%xmm0
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
movq %r13,48+8(%rsp)
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
movq %r12,56+8(%rsp)
|
||||
vpxor %xmm2,%xmm4,%xmm4
|
||||
vmovdqu 96-32(%r9),%xmm2
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vmovups 80-128(%rcx),%xmm15
|
||||
vpxor %xmm3,%xmm6,%xmm6
|
||||
vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm5,%xmm6,%xmm6
|
||||
vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
movbeq 56(%r14),%r13
|
||||
vpxor %xmm1,%xmm7,%xmm7
|
||||
vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1
|
||||
vpxor 112+8(%rsp),%xmm8,%xmm8
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
movbeq 48(%r14),%r12
|
||||
vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
movq %r13,64+8(%rsp)
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
movq %r12,72+8(%rsp)
|
||||
vpxor %xmm3,%xmm4,%xmm4
|
||||
vmovdqu 112-32(%r9),%xmm3
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vmovups 96-128(%rcx),%xmm15
|
||||
vpxor %xmm5,%xmm6,%xmm6
|
||||
vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm1,%xmm6,%xmm6
|
||||
vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
movbeq 40(%r14),%r13
|
||||
vpxor %xmm2,%xmm7,%xmm7
|
||||
vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
movbeq 32(%r14),%r12
|
||||
vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
movq %r13,80+8(%rsp)
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
movq %r12,88+8(%rsp)
|
||||
vpxor %xmm5,%xmm6,%xmm6
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
vpxor %xmm1,%xmm6,%xmm6
|
||||
|
||||
vmovups 112-128(%rcx),%xmm15
|
||||
vpslldq $8,%xmm6,%xmm5
|
||||
vpxor %xmm2,%xmm4,%xmm4
|
||||
vmovdqu 16(%r11),%xmm3
|
||||
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm8,%xmm7,%xmm7
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vpxor %xmm5,%xmm4,%xmm4
|
||||
movbeq 24(%r14),%r13
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
movbeq 16(%r14),%r12
|
||||
vpalignr $8,%xmm4,%xmm4,%xmm0
|
||||
vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4
|
||||
movq %r13,96+8(%rsp)
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
movq %r12,104+8(%rsp)
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vmovups 128-128(%rcx),%xmm1
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vaesenc %xmm1,%xmm9,%xmm9
|
||||
vmovups 144-128(%rcx),%xmm15
|
||||
vaesenc %xmm1,%xmm10,%xmm10
|
||||
vpsrldq $8,%xmm6,%xmm6
|
||||
vaesenc %xmm1,%xmm11,%xmm11
|
||||
vpxor %xmm6,%xmm7,%xmm7
|
||||
vaesenc %xmm1,%xmm12,%xmm12
|
||||
vpxor %xmm0,%xmm4,%xmm4
|
||||
movbeq 8(%r14),%r13
|
||||
vaesenc %xmm1,%xmm13,%xmm13
|
||||
movbeq 0(%r14),%r12
|
||||
vaesenc %xmm1,%xmm14,%xmm14
|
||||
vmovups 160-128(%rcx),%xmm1
|
||||
cmpl $11,%ebp
|
||||
jb .Lenc_tail
|
||||
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vaesenc %xmm1,%xmm9,%xmm9
|
||||
vaesenc %xmm1,%xmm10,%xmm10
|
||||
vaesenc %xmm1,%xmm11,%xmm11
|
||||
vaesenc %xmm1,%xmm12,%xmm12
|
||||
vaesenc %xmm1,%xmm13,%xmm13
|
||||
vmovups 176-128(%rcx),%xmm15
|
||||
vaesenc %xmm1,%xmm14,%xmm14
|
||||
vmovups 192-128(%rcx),%xmm1
|
||||
je .Lenc_tail
|
||||
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
|
||||
vaesenc %xmm1,%xmm9,%xmm9
|
||||
vaesenc %xmm1,%xmm10,%xmm10
|
||||
vaesenc %xmm1,%xmm11,%xmm11
|
||||
vaesenc %xmm1,%xmm12,%xmm12
|
||||
vaesenc %xmm1,%xmm13,%xmm13
|
||||
vmovups 208-128(%rcx),%xmm15
|
||||
vaesenc %xmm1,%xmm14,%xmm14
|
||||
vmovups 224-128(%rcx),%xmm1
|
||||
jmp .Lenc_tail
|
||||
|
||||
.align 32
|
||||
.Lhandle_ctr32:
|
||||
vmovdqu (%r11),%xmm0
|
||||
vpshufb %xmm0,%xmm1,%xmm6
|
||||
vmovdqu 48(%r11),%xmm5
|
||||
vpaddd 64(%r11),%xmm6,%xmm10
|
||||
vpaddd %xmm5,%xmm6,%xmm11
|
||||
vmovdqu 0-32(%r9),%xmm3
|
||||
vpaddd %xmm5,%xmm10,%xmm12
|
||||
vpshufb %xmm0,%xmm10,%xmm10
|
||||
vpaddd %xmm5,%xmm11,%xmm13
|
||||
vpshufb %xmm0,%xmm11,%xmm11
|
||||
vpxor %xmm15,%xmm10,%xmm10
|
||||
vpaddd %xmm5,%xmm12,%xmm14
|
||||
vpshufb %xmm0,%xmm12,%xmm12
|
||||
vpxor %xmm15,%xmm11,%xmm11
|
||||
vpaddd %xmm5,%xmm13,%xmm1
|
||||
vpshufb %xmm0,%xmm13,%xmm13
|
||||
vpshufb %xmm0,%xmm14,%xmm14
|
||||
vpshufb %xmm0,%xmm1,%xmm1
|
||||
jmp .Lresume_ctr32
|
||||
|
||||
.align 32
|
||||
.Lenc_tail:
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vmovdqu %xmm7,16+8(%rsp)
|
||||
vpalignr $8,%xmm4,%xmm4,%xmm8
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4
|
||||
vpxor 0(%rdi),%xmm1,%xmm2
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vpxor 16(%rdi),%xmm1,%xmm0
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vpxor 32(%rdi),%xmm1,%xmm5
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vpxor 48(%rdi),%xmm1,%xmm6
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
vpxor 64(%rdi),%xmm1,%xmm7
|
||||
vpxor 80(%rdi),%xmm1,%xmm3
|
||||
vmovdqu (%r8),%xmm1
|
||||
|
||||
vaesenclast %xmm2,%xmm9,%xmm9
|
||||
vmovdqu 32(%r11),%xmm2
|
||||
vaesenclast %xmm0,%xmm10,%xmm10
|
||||
vpaddb %xmm2,%xmm1,%xmm0
|
||||
movq %r13,112+8(%rsp)
|
||||
leaq 96(%rdi),%rdi
|
||||
vaesenclast %xmm5,%xmm11,%xmm11
|
||||
vpaddb %xmm2,%xmm0,%xmm5
|
||||
movq %r12,120+8(%rsp)
|
||||
leaq 96(%rsi),%rsi
|
||||
vmovdqu 0-128(%rcx),%xmm15
|
||||
vaesenclast %xmm6,%xmm12,%xmm12
|
||||
vpaddb %xmm2,%xmm5,%xmm6
|
||||
vaesenclast %xmm7,%xmm13,%xmm13
|
||||
vpaddb %xmm2,%xmm6,%xmm7
|
||||
vaesenclast %xmm3,%xmm14,%xmm14
|
||||
vpaddb %xmm2,%xmm7,%xmm3
|
||||
|
||||
addq $0x60,%r10
|
||||
subq $0x6,%rdx
|
||||
jc .L6x_done
|
||||
|
||||
vmovups %xmm9,-96(%rsi)
|
||||
vpxor %xmm15,%xmm1,%xmm9
|
||||
vmovups %xmm10,-80(%rsi)
|
||||
vmovdqa %xmm0,%xmm10
|
||||
vmovups %xmm11,-64(%rsi)
|
||||
vmovdqa %xmm5,%xmm11
|
||||
vmovups %xmm12,-48(%rsi)
|
||||
vmovdqa %xmm6,%xmm12
|
||||
vmovups %xmm13,-32(%rsi)
|
||||
vmovdqa %xmm7,%xmm13
|
||||
vmovups %xmm14,-16(%rsi)
|
||||
vmovdqa %xmm3,%xmm14
|
||||
vmovdqu 32+8(%rsp),%xmm7
|
||||
jmp .Loop6x
|
||||
|
||||
.L6x_done:
|
||||
vpxor 16+8(%rsp),%xmm8,%xmm8
|
||||
vpxor %xmm4,%xmm8,%xmm8
|
||||
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x
|
||||
.globl aesni_gcm_decrypt
|
||||
.hidden aesni_gcm_decrypt
|
||||
.type aesni_gcm_decrypt,@function
|
||||
.align 32
|
||||
aesni_gcm_decrypt:
|
||||
.cfi_startproc
|
||||
xorq %r10,%r10
|
||||
|
||||
|
||||
|
||||
cmpq $0x60,%rdx
|
||||
jb .Lgcm_dec_abort
|
||||
|
||||
leaq (%rsp),%rax
|
||||
.cfi_def_cfa_register %rax
|
||||
pushq %rbx
|
||||
.cfi_offset %rbx,-16
|
||||
pushq %rbp
|
||||
.cfi_offset %rbp,-24
|
||||
pushq %r12
|
||||
.cfi_offset %r12,-32
|
||||
pushq %r13
|
||||
.cfi_offset %r13,-40
|
||||
pushq %r14
|
||||
.cfi_offset %r14,-48
|
||||
pushq %r15
|
||||
.cfi_offset %r15,-56
|
||||
vzeroupper
|
||||
|
||||
vmovdqu (%r8),%xmm1
|
||||
addq $-128,%rsp
|
||||
movl 12(%r8),%ebx
|
||||
leaq .Lbswap_mask(%rip),%r11
|
||||
leaq -128(%rcx),%r14
|
||||
movq $0xf80,%r15
|
||||
vmovdqu (%r9),%xmm8
|
||||
andq $-128,%rsp
|
||||
vmovdqu (%r11),%xmm0
|
||||
leaq 128(%rcx),%rcx
|
||||
leaq 32+32(%r9),%r9
|
||||
movl 240-128(%rcx),%ebp
|
||||
vpshufb %xmm0,%xmm8,%xmm8
|
||||
|
||||
andq %r15,%r14
|
||||
andq %rsp,%r15
|
||||
subq %r14,%r15
|
||||
jc .Ldec_no_key_aliasing
|
||||
cmpq $768,%r15
|
||||
jnc .Ldec_no_key_aliasing
|
||||
subq %r15,%rsp
|
||||
.Ldec_no_key_aliasing:
|
||||
|
||||
vmovdqu 80(%rdi),%xmm7
|
||||
leaq (%rdi),%r14
|
||||
vmovdqu 64(%rdi),%xmm4
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
leaq -192(%rdi,%rdx,1),%r15
|
||||
|
||||
vmovdqu 48(%rdi),%xmm5
|
||||
shrq $4,%rdx
|
||||
xorq %r10,%r10
|
||||
vmovdqu 32(%rdi),%xmm6
|
||||
vpshufb %xmm0,%xmm7,%xmm7
|
||||
vmovdqu 16(%rdi),%xmm2
|
||||
vpshufb %xmm0,%xmm4,%xmm4
|
||||
vmovdqu (%rdi),%xmm3
|
||||
vpshufb %xmm0,%xmm5,%xmm5
|
||||
vmovdqu %xmm4,48(%rsp)
|
||||
vpshufb %xmm0,%xmm6,%xmm6
|
||||
vmovdqu %xmm5,64(%rsp)
|
||||
vpshufb %xmm0,%xmm2,%xmm2
|
||||
vmovdqu %xmm6,80(%rsp)
|
||||
vpshufb %xmm0,%xmm3,%xmm3
|
||||
vmovdqu %xmm2,96(%rsp)
|
||||
vmovdqu %xmm3,112(%rsp)
|
||||
|
||||
call _aesni_ctr32_ghash_6x
|
||||
|
||||
vmovups %xmm9,-96(%rsi)
|
||||
vmovups %xmm10,-80(%rsi)
|
||||
vmovups %xmm11,-64(%rsi)
|
||||
vmovups %xmm12,-48(%rsi)
|
||||
vmovups %xmm13,-32(%rsi)
|
||||
vmovups %xmm14,-16(%rsi)
|
||||
|
||||
vpshufb (%r11),%xmm8,%xmm8
|
||||
vmovdqu %xmm8,-64(%r9)
|
||||
|
||||
vzeroupper
|
||||
movq -48(%rax),%r15
|
||||
.cfi_restore %r15
|
||||
movq -40(%rax),%r14
|
||||
.cfi_restore %r14
|
||||
movq -32(%rax),%r13
|
||||
.cfi_restore %r13
|
||||
movq -24(%rax),%r12
|
||||
.cfi_restore %r12
|
||||
movq -16(%rax),%rbp
|
||||
.cfi_restore %rbp
|
||||
movq -8(%rax),%rbx
|
||||
.cfi_restore %rbx
|
||||
leaq (%rax),%rsp
|
||||
.cfi_def_cfa_register %rsp
|
||||
.Lgcm_dec_abort:
|
||||
movq %r10,%rax
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size aesni_gcm_decrypt,.-aesni_gcm_decrypt
|
||||
.type _aesni_ctr32_6x,@function
|
||||
.align 32
|
||||
_aesni_ctr32_6x:
|
||||
.cfi_startproc
|
||||
vmovdqu 0-128(%rcx),%xmm4
|
||||
vmovdqu 32(%r11),%xmm2
|
||||
leaq -1(%rbp),%r13
|
||||
vmovups 16-128(%rcx),%xmm15
|
||||
leaq 32-128(%rcx),%r12
|
||||
vpxor %xmm4,%xmm1,%xmm9
|
||||
addl $100663296,%ebx
|
||||
jc .Lhandle_ctr32_2
|
||||
vpaddb %xmm2,%xmm1,%xmm10
|
||||
vpaddb %xmm2,%xmm10,%xmm11
|
||||
vpxor %xmm4,%xmm10,%xmm10
|
||||
vpaddb %xmm2,%xmm11,%xmm12
|
||||
vpxor %xmm4,%xmm11,%xmm11
|
||||
vpaddb %xmm2,%xmm12,%xmm13
|
||||
vpxor %xmm4,%xmm12,%xmm12
|
||||
vpaddb %xmm2,%xmm13,%xmm14
|
||||
vpxor %xmm4,%xmm13,%xmm13
|
||||
vpaddb %xmm2,%xmm14,%xmm1
|
||||
vpxor %xmm4,%xmm14,%xmm14
|
||||
jmp .Loop_ctr32
|
||||
|
||||
.align 16
|
||||
.Loop_ctr32:
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
vmovups (%r12),%xmm15
|
||||
leaq 16(%r12),%r12
|
||||
decl %r13d
|
||||
jnz .Loop_ctr32
|
||||
|
||||
vmovdqu (%r12),%xmm3
|
||||
vaesenc %xmm15,%xmm9,%xmm9
|
||||
vpxor 0(%rdi),%xmm3,%xmm4
|
||||
vaesenc %xmm15,%xmm10,%xmm10
|
||||
vpxor 16(%rdi),%xmm3,%xmm5
|
||||
vaesenc %xmm15,%xmm11,%xmm11
|
||||
vpxor 32(%rdi),%xmm3,%xmm6
|
||||
vaesenc %xmm15,%xmm12,%xmm12
|
||||
vpxor 48(%rdi),%xmm3,%xmm8
|
||||
vaesenc %xmm15,%xmm13,%xmm13
|
||||
vpxor 64(%rdi),%xmm3,%xmm2
|
||||
vaesenc %xmm15,%xmm14,%xmm14
|
||||
vpxor 80(%rdi),%xmm3,%xmm3
|
||||
leaq 96(%rdi),%rdi
|
||||
|
||||
vaesenclast %xmm4,%xmm9,%xmm9
|
||||
vaesenclast %xmm5,%xmm10,%xmm10
|
||||
vaesenclast %xmm6,%xmm11,%xmm11
|
||||
vaesenclast %xmm8,%xmm12,%xmm12
|
||||
vaesenclast %xmm2,%xmm13,%xmm13
|
||||
vaesenclast %xmm3,%xmm14,%xmm14
|
||||
vmovups %xmm9,0(%rsi)
|
||||
vmovups %xmm10,16(%rsi)
|
||||
vmovups %xmm11,32(%rsi)
|
||||
vmovups %xmm12,48(%rsi)
|
||||
vmovups %xmm13,64(%rsi)
|
||||
vmovups %xmm14,80(%rsi)
|
||||
leaq 96(%rsi),%rsi
|
||||
|
||||
.byte 0xf3,0xc3
|
||||
.align 32
|
||||
.Lhandle_ctr32_2:
|
||||
vpshufb %xmm0,%xmm1,%xmm6
|
||||
vmovdqu 48(%r11),%xmm5
|
||||
vpaddd 64(%r11),%xmm6,%xmm10
|
||||
vpaddd %xmm5,%xmm6,%xmm11
|
||||
vpaddd %xmm5,%xmm10,%xmm12
|
||||
vpshufb %xmm0,%xmm10,%xmm10
|
||||
vpaddd %xmm5,%xmm11,%xmm13
|
||||
vpshufb %xmm0,%xmm11,%xmm11
|
||||
vpxor %xmm4,%xmm10,%xmm10
|
||||
vpaddd %xmm5,%xmm12,%xmm14
|
||||
vpshufb %xmm0,%xmm12,%xmm12
|
||||
vpxor %xmm4,%xmm11,%xmm11
|
||||
vpaddd %xmm5,%xmm13,%xmm1
|
||||
vpshufb %xmm0,%xmm13,%xmm13
|
||||
vpxor %xmm4,%xmm12,%xmm12
|
||||
vpshufb %xmm0,%xmm14,%xmm14
|
||||
vpxor %xmm4,%xmm13,%xmm13
|
||||
vpshufb %xmm0,%xmm1,%xmm1
|
||||
vpxor %xmm4,%xmm14,%xmm14
|
||||
jmp .Loop_ctr32
|
||||
.cfi_endproc
|
||||
.size _aesni_ctr32_6x,.-_aesni_ctr32_6x
|
||||
|
||||
.globl aesni_gcm_encrypt
|
||||
.hidden aesni_gcm_encrypt
|
||||
.type aesni_gcm_encrypt,@function
|
||||
.align 32
|
||||
aesni_gcm_encrypt:
|
||||
.cfi_startproc
|
||||
xorq %r10,%r10
|
||||
|
||||
|
||||
|
||||
|
||||
cmpq $288,%rdx
|
||||
jb .Lgcm_enc_abort
|
||||
|
||||
leaq (%rsp),%rax
|
||||
.cfi_def_cfa_register %rax
|
||||
pushq %rbx
|
||||
.cfi_offset %rbx,-16
|
||||
pushq %rbp
|
||||
.cfi_offset %rbp,-24
|
||||
pushq %r12
|
||||
.cfi_offset %r12,-32
|
||||
pushq %r13
|
||||
.cfi_offset %r13,-40
|
||||
pushq %r14
|
||||
.cfi_offset %r14,-48
|
||||
pushq %r15
|
||||
.cfi_offset %r15,-56
|
||||
vzeroupper
|
||||
|
||||
vmovdqu (%r8),%xmm1
|
||||
addq $-128,%rsp
|
||||
movl 12(%r8),%ebx
|
||||
leaq .Lbswap_mask(%rip),%r11
|
||||
leaq -128(%rcx),%r14
|
||||
movq $0xf80,%r15
|
||||
leaq 128(%rcx),%rcx
|
||||
vmovdqu (%r11),%xmm0
|
||||
andq $-128,%rsp
|
||||
movl 240-128(%rcx),%ebp
|
||||
|
||||
andq %r15,%r14
|
||||
andq %rsp,%r15
|
||||
subq %r14,%r15
|
||||
jc .Lenc_no_key_aliasing
|
||||
cmpq $768,%r15
|
||||
jnc .Lenc_no_key_aliasing
|
||||
subq %r15,%rsp
|
||||
.Lenc_no_key_aliasing:
|
||||
|
||||
leaq (%rsi),%r14
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
leaq -192(%rsi,%rdx,1),%r15
|
||||
|
||||
shrq $4,%rdx
|
||||
|
||||
call _aesni_ctr32_6x
|
||||
vpshufb %xmm0,%xmm9,%xmm8
|
||||
vpshufb %xmm0,%xmm10,%xmm2
|
||||
vmovdqu %xmm8,112(%rsp)
|
||||
vpshufb %xmm0,%xmm11,%xmm4
|
||||
vmovdqu %xmm2,96(%rsp)
|
||||
vpshufb %xmm0,%xmm12,%xmm5
|
||||
vmovdqu %xmm4,80(%rsp)
|
||||
vpshufb %xmm0,%xmm13,%xmm6
|
||||
vmovdqu %xmm5,64(%rsp)
|
||||
vpshufb %xmm0,%xmm14,%xmm7
|
||||
vmovdqu %xmm6,48(%rsp)
|
||||
|
||||
call _aesni_ctr32_6x
|
||||
|
||||
vmovdqu (%r9),%xmm8
|
||||
leaq 32+32(%r9),%r9
|
||||
subq $12,%rdx
|
||||
movq $192,%r10
|
||||
vpshufb %xmm0,%xmm8,%xmm8
|
||||
|
||||
call _aesni_ctr32_ghash_6x
|
||||
vmovdqu 32(%rsp),%xmm7
|
||||
vmovdqu (%r11),%xmm0
|
||||
vmovdqu 0-32(%r9),%xmm3
|
||||
vpunpckhqdq %xmm7,%xmm7,%xmm1
|
||||
vmovdqu 32-32(%r9),%xmm15
|
||||
vmovups %xmm9,-96(%rsi)
|
||||
vpshufb %xmm0,%xmm9,%xmm9
|
||||
vpxor %xmm7,%xmm1,%xmm1
|
||||
vmovups %xmm10,-80(%rsi)
|
||||
vpshufb %xmm0,%xmm10,%xmm10
|
||||
vmovups %xmm11,-64(%rsi)
|
||||
vpshufb %xmm0,%xmm11,%xmm11
|
||||
vmovups %xmm12,-48(%rsi)
|
||||
vpshufb %xmm0,%xmm12,%xmm12
|
||||
vmovups %xmm13,-32(%rsi)
|
||||
vpshufb %xmm0,%xmm13,%xmm13
|
||||
vmovups %xmm14,-16(%rsi)
|
||||
vpshufb %xmm0,%xmm14,%xmm14
|
||||
vmovdqu %xmm9,16(%rsp)
|
||||
vmovdqu 48(%rsp),%xmm6
|
||||
vmovdqu 16-32(%r9),%xmm0
|
||||
vpunpckhqdq %xmm6,%xmm6,%xmm2
|
||||
vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5
|
||||
vpxor %xmm6,%xmm2,%xmm2
|
||||
vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7
|
||||
vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1
|
||||
|
||||
vmovdqu 64(%rsp),%xmm9
|
||||
vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4
|
||||
vmovdqu 48-32(%r9),%xmm3
|
||||
vpxor %xmm5,%xmm4,%xmm4
|
||||
vpunpckhqdq %xmm9,%xmm9,%xmm5
|
||||
vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6
|
||||
vpxor %xmm9,%xmm5,%xmm5
|
||||
vpxor %xmm7,%xmm6,%xmm6
|
||||
vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2
|
||||
vmovdqu 80-32(%r9),%xmm15
|
||||
vpxor %xmm1,%xmm2,%xmm2
|
||||
|
||||
vmovdqu 80(%rsp),%xmm1
|
||||
vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7
|
||||
vmovdqu 64-32(%r9),%xmm0
|
||||
vpxor %xmm4,%xmm7,%xmm7
|
||||
vpunpckhqdq %xmm1,%xmm1,%xmm4
|
||||
vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9
|
||||
vpxor %xmm1,%xmm4,%xmm4
|
||||
vpxor %xmm6,%xmm9,%xmm9
|
||||
vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5
|
||||
vpxor %xmm2,%xmm5,%xmm5
|
||||
|
||||
vmovdqu 96(%rsp),%xmm2
|
||||
vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6
|
||||
vmovdqu 96-32(%r9),%xmm3
|
||||
vpxor %xmm7,%xmm6,%xmm6
|
||||
vpunpckhqdq %xmm2,%xmm2,%xmm7
|
||||
vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1
|
||||
vpxor %xmm2,%xmm7,%xmm7
|
||||
vpxor %xmm9,%xmm1,%xmm1
|
||||
vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4
|
||||
vmovdqu 128-32(%r9),%xmm15
|
||||
vpxor %xmm5,%xmm4,%xmm4
|
||||
|
||||
vpxor 112(%rsp),%xmm8,%xmm8
|
||||
vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5
|
||||
vmovdqu 112-32(%r9),%xmm0
|
||||
vpunpckhqdq %xmm8,%xmm8,%xmm9
|
||||
vpxor %xmm6,%xmm5,%xmm5
|
||||
vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2
|
||||
vpxor %xmm8,%xmm9,%xmm9
|
||||
vpxor %xmm1,%xmm2,%xmm2
|
||||
vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7
|
||||
vpxor %xmm4,%xmm7,%xmm4
|
||||
|
||||
vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6
|
||||
vmovdqu 0-32(%r9),%xmm3
|
||||
vpunpckhqdq %xmm14,%xmm14,%xmm1
|
||||
vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8
|
||||
vpxor %xmm14,%xmm1,%xmm1
|
||||
vpxor %xmm5,%xmm6,%xmm5
|
||||
vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9
|
||||
vmovdqu 32-32(%r9),%xmm15
|
||||
vpxor %xmm2,%xmm8,%xmm7
|
||||
vpxor %xmm4,%xmm9,%xmm6
|
||||
|
||||
vmovdqu 16-32(%r9),%xmm0
|
||||
vpxor %xmm5,%xmm7,%xmm9
|
||||
vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4
|
||||
vpxor %xmm9,%xmm6,%xmm6
|
||||
vpunpckhqdq %xmm13,%xmm13,%xmm2
|
||||
vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14
|
||||
vpxor %xmm13,%xmm2,%xmm2
|
||||
vpslldq $8,%xmm6,%xmm9
|
||||
vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1
|
||||
vpxor %xmm9,%xmm5,%xmm8
|
||||
vpsrldq $8,%xmm6,%xmm6
|
||||
vpxor %xmm6,%xmm7,%xmm7
|
||||
|
||||
vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5
|
||||
vmovdqu 48-32(%r9),%xmm3
|
||||
vpxor %xmm4,%xmm5,%xmm5
|
||||
vpunpckhqdq %xmm12,%xmm12,%xmm9
|
||||
vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13
|
||||
vpxor %xmm12,%xmm9,%xmm9
|
||||
vpxor %xmm14,%xmm13,%xmm13
|
||||
vpalignr $8,%xmm8,%xmm8,%xmm14
|
||||
vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2
|
||||
vmovdqu 80-32(%r9),%xmm15
|
||||
vpxor %xmm1,%xmm2,%xmm2
|
||||
|
||||
vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4
|
||||
vmovdqu 64-32(%r9),%xmm0
|
||||
vpxor %xmm5,%xmm4,%xmm4
|
||||
vpunpckhqdq %xmm11,%xmm11,%xmm1
|
||||
vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12
|
||||
vpxor %xmm11,%xmm1,%xmm1
|
||||
vpxor %xmm13,%xmm12,%xmm12
|
||||
vxorps 16(%rsp),%xmm7,%xmm7
|
||||
vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9
|
||||
vpxor %xmm2,%xmm9,%xmm9
|
||||
|
||||
vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8
|
||||
vxorps %xmm14,%xmm8,%xmm8
|
||||
|
||||
vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5
|
||||
vmovdqu 96-32(%r9),%xmm3
|
||||
vpxor %xmm4,%xmm5,%xmm5
|
||||
vpunpckhqdq %xmm10,%xmm10,%xmm2
|
||||
vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11
|
||||
vpxor %xmm10,%xmm2,%xmm2
|
||||
vpalignr $8,%xmm8,%xmm8,%xmm14
|
||||
vpxor %xmm12,%xmm11,%xmm11
|
||||
vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1
|
||||
vmovdqu 128-32(%r9),%xmm15
|
||||
vpxor %xmm9,%xmm1,%xmm1
|
||||
|
||||
vxorps %xmm7,%xmm14,%xmm14
|
||||
vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8
|
||||
vxorps %xmm14,%xmm8,%xmm8
|
||||
|
||||
vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4
|
||||
vmovdqu 112-32(%r9),%xmm0
|
||||
vpxor %xmm5,%xmm4,%xmm4
|
||||
vpunpckhqdq %xmm8,%xmm8,%xmm9
|
||||
vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10
|
||||
vpxor %xmm8,%xmm9,%xmm9
|
||||
vpxor %xmm11,%xmm10,%xmm10
|
||||
vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2
|
||||
vpxor %xmm1,%xmm2,%xmm2
|
||||
|
||||
vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5
|
||||
vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7
|
||||
vpxor %xmm4,%xmm5,%xmm5
|
||||
vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6
|
||||
vpxor %xmm10,%xmm7,%xmm7
|
||||
vpxor %xmm2,%xmm6,%xmm6
|
||||
|
||||
vpxor %xmm5,%xmm7,%xmm4
|
||||
vpxor %xmm4,%xmm6,%xmm6
|
||||
vpslldq $8,%xmm6,%xmm1
|
||||
vmovdqu 16(%r11),%xmm3
|
||||
vpsrldq $8,%xmm6,%xmm6
|
||||
vpxor %xmm1,%xmm5,%xmm8
|
||||
vpxor %xmm6,%xmm7,%xmm7
|
||||
|
||||
vpalignr $8,%xmm8,%xmm8,%xmm2
|
||||
vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8
|
||||
vpxor %xmm2,%xmm8,%xmm8
|
||||
|
||||
vpalignr $8,%xmm8,%xmm8,%xmm2
|
||||
vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8
|
||||
vpxor %xmm7,%xmm2,%xmm2
|
||||
vpxor %xmm2,%xmm8,%xmm8
|
||||
vpshufb (%r11),%xmm8,%xmm8
|
||||
vmovdqu %xmm8,-64(%r9)
|
||||
|
||||
vzeroupper
|
||||
movq -48(%rax),%r15
|
||||
.cfi_restore %r15
|
||||
movq -40(%rax),%r14
|
||||
.cfi_restore %r14
|
||||
movq -32(%rax),%r13
|
||||
.cfi_restore %r13
|
||||
movq -24(%rax),%r12
|
||||
.cfi_restore %r12
|
||||
movq -16(%rax),%rbp
|
||||
.cfi_restore %rbp
|
||||
movq -8(%rax),%rbx
|
||||
.cfi_restore %rbx
|
||||
leaq (%rax),%rsp
|
||||
.cfi_def_cfa_register %rsp
|
||||
.Lgcm_enc_abort:
|
||||
movq %r10,%rax
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size aesni_gcm_encrypt,.-aesni_gcm_encrypt
|
||||
.align 64
|
||||
.Lbswap_mask:
|
||||
.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
|
||||
.Lpoly:
|
||||
.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
|
||||
.Lone_msb:
|
||||
.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
|
||||
.Ltwo_lsb:
|
||||
.byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
||||
.Lone_lsb:
|
||||
.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
||||
.byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
|
||||
.align 64
|
||||
#endif
|
||||
4451
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aesni-x86_64.S
vendored
Normal file
4451
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/aesni-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2598
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/bsaes-x86_64.S
vendored
Normal file
2598
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/bsaes-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1806
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/ghash-x86_64.S
vendored
Normal file
1806
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/ghash-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
671
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/md5-x86_64.S
vendored
Normal file
671
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/md5-x86_64.S
vendored
Normal file
@@ -0,0 +1,671 @@
|
||||
#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
|
||||
.text
|
||||
.align 16
|
||||
|
||||
.globl md5_block_asm_data_order
|
||||
.hidden md5_block_asm_data_order
|
||||
.type md5_block_asm_data_order,@function
|
||||
md5_block_asm_data_order:
|
||||
pushq %rbp
|
||||
pushq %rbx
|
||||
pushq %r12
|
||||
pushq %r14
|
||||
pushq %r15
|
||||
.Lprologue:
|
||||
|
||||
|
||||
|
||||
|
||||
movq %rdi,%rbp
|
||||
shlq $6,%rdx
|
||||
leaq (%rsi,%rdx,1),%rdi
|
||||
movl 0(%rbp),%eax
|
||||
movl 4(%rbp),%ebx
|
||||
movl 8(%rbp),%ecx
|
||||
movl 12(%rbp),%edx
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cmpq %rdi,%rsi
|
||||
je .Lend
|
||||
|
||||
|
||||
.Lloop:
|
||||
movl %eax,%r8d
|
||||
movl %ebx,%r9d
|
||||
movl %ecx,%r14d
|
||||
movl %edx,%r15d
|
||||
movl 0(%rsi),%r10d
|
||||
movl %edx,%r11d
|
||||
xorl %ecx,%r11d
|
||||
leal -680876936(%rax,%r10,1),%eax
|
||||
andl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
movl 4(%rsi),%r10d
|
||||
addl %r11d,%eax
|
||||
roll $7,%eax
|
||||
movl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
xorl %ebx,%r11d
|
||||
leal -389564586(%rdx,%r10,1),%edx
|
||||
andl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
movl 8(%rsi),%r10d
|
||||
addl %r11d,%edx
|
||||
roll $12,%edx
|
||||
movl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
xorl %eax,%r11d
|
||||
leal 606105819(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
movl 12(%rsi),%r10d
|
||||
addl %r11d,%ecx
|
||||
roll $17,%ecx
|
||||
movl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
xorl %edx,%r11d
|
||||
leal -1044525330(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
movl 16(%rsi),%r10d
|
||||
addl %r11d,%ebx
|
||||
roll $22,%ebx
|
||||
movl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
xorl %ecx,%r11d
|
||||
leal -176418897(%rax,%r10,1),%eax
|
||||
andl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
movl 20(%rsi),%r10d
|
||||
addl %r11d,%eax
|
||||
roll $7,%eax
|
||||
movl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
xorl %ebx,%r11d
|
||||
leal 1200080426(%rdx,%r10,1),%edx
|
||||
andl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
movl 24(%rsi),%r10d
|
||||
addl %r11d,%edx
|
||||
roll $12,%edx
|
||||
movl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
xorl %eax,%r11d
|
||||
leal -1473231341(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
movl 28(%rsi),%r10d
|
||||
addl %r11d,%ecx
|
||||
roll $17,%ecx
|
||||
movl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
xorl %edx,%r11d
|
||||
leal -45705983(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
movl 32(%rsi),%r10d
|
||||
addl %r11d,%ebx
|
||||
roll $22,%ebx
|
||||
movl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
xorl %ecx,%r11d
|
||||
leal 1770035416(%rax,%r10,1),%eax
|
||||
andl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
movl 36(%rsi),%r10d
|
||||
addl %r11d,%eax
|
||||
roll $7,%eax
|
||||
movl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
xorl %ebx,%r11d
|
||||
leal -1958414417(%rdx,%r10,1),%edx
|
||||
andl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
movl 40(%rsi),%r10d
|
||||
addl %r11d,%edx
|
||||
roll $12,%edx
|
||||
movl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
xorl %eax,%r11d
|
||||
leal -42063(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
movl 44(%rsi),%r10d
|
||||
addl %r11d,%ecx
|
||||
roll $17,%ecx
|
||||
movl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
xorl %edx,%r11d
|
||||
leal -1990404162(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
movl 48(%rsi),%r10d
|
||||
addl %r11d,%ebx
|
||||
roll $22,%ebx
|
||||
movl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
xorl %ecx,%r11d
|
||||
leal 1804603682(%rax,%r10,1),%eax
|
||||
andl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
movl 52(%rsi),%r10d
|
||||
addl %r11d,%eax
|
||||
roll $7,%eax
|
||||
movl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
xorl %ebx,%r11d
|
||||
leal -40341101(%rdx,%r10,1),%edx
|
||||
andl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
movl 56(%rsi),%r10d
|
||||
addl %r11d,%edx
|
||||
roll $12,%edx
|
||||
movl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
xorl %eax,%r11d
|
||||
leal -1502002290(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
movl 60(%rsi),%r10d
|
||||
addl %r11d,%ecx
|
||||
roll $17,%ecx
|
||||
movl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
xorl %edx,%r11d
|
||||
leal 1236535329(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
movl 0(%rsi),%r10d
|
||||
addl %r11d,%ebx
|
||||
roll $22,%ebx
|
||||
movl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
movl 4(%rsi),%r10d
|
||||
movl %edx,%r11d
|
||||
movl %edx,%r12d
|
||||
notl %r11d
|
||||
leal -165796510(%rax,%r10,1),%eax
|
||||
andl %ebx,%r12d
|
||||
andl %ecx,%r11d
|
||||
movl 24(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ecx,%r11d
|
||||
addl %r12d,%eax
|
||||
movl %ecx,%r12d
|
||||
roll $5,%eax
|
||||
addl %ebx,%eax
|
||||
notl %r11d
|
||||
leal -1069501632(%rdx,%r10,1),%edx
|
||||
andl %eax,%r12d
|
||||
andl %ebx,%r11d
|
||||
movl 44(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ebx,%r11d
|
||||
addl %r12d,%edx
|
||||
movl %ebx,%r12d
|
||||
roll $9,%edx
|
||||
addl %eax,%edx
|
||||
notl %r11d
|
||||
leal 643717713(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r12d
|
||||
andl %eax,%r11d
|
||||
movl 0(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %eax,%r11d
|
||||
addl %r12d,%ecx
|
||||
movl %eax,%r12d
|
||||
roll $14,%ecx
|
||||
addl %edx,%ecx
|
||||
notl %r11d
|
||||
leal -373897302(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r12d
|
||||
andl %edx,%r11d
|
||||
movl 20(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %edx,%r11d
|
||||
addl %r12d,%ebx
|
||||
movl %edx,%r12d
|
||||
roll $20,%ebx
|
||||
addl %ecx,%ebx
|
||||
notl %r11d
|
||||
leal -701558691(%rax,%r10,1),%eax
|
||||
andl %ebx,%r12d
|
||||
andl %ecx,%r11d
|
||||
movl 40(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ecx,%r11d
|
||||
addl %r12d,%eax
|
||||
movl %ecx,%r12d
|
||||
roll $5,%eax
|
||||
addl %ebx,%eax
|
||||
notl %r11d
|
||||
leal 38016083(%rdx,%r10,1),%edx
|
||||
andl %eax,%r12d
|
||||
andl %ebx,%r11d
|
||||
movl 60(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ebx,%r11d
|
||||
addl %r12d,%edx
|
||||
movl %ebx,%r12d
|
||||
roll $9,%edx
|
||||
addl %eax,%edx
|
||||
notl %r11d
|
||||
leal -660478335(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r12d
|
||||
andl %eax,%r11d
|
||||
movl 16(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %eax,%r11d
|
||||
addl %r12d,%ecx
|
||||
movl %eax,%r12d
|
||||
roll $14,%ecx
|
||||
addl %edx,%ecx
|
||||
notl %r11d
|
||||
leal -405537848(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r12d
|
||||
andl %edx,%r11d
|
||||
movl 36(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %edx,%r11d
|
||||
addl %r12d,%ebx
|
||||
movl %edx,%r12d
|
||||
roll $20,%ebx
|
||||
addl %ecx,%ebx
|
||||
notl %r11d
|
||||
leal 568446438(%rax,%r10,1),%eax
|
||||
andl %ebx,%r12d
|
||||
andl %ecx,%r11d
|
||||
movl 56(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ecx,%r11d
|
||||
addl %r12d,%eax
|
||||
movl %ecx,%r12d
|
||||
roll $5,%eax
|
||||
addl %ebx,%eax
|
||||
notl %r11d
|
||||
leal -1019803690(%rdx,%r10,1),%edx
|
||||
andl %eax,%r12d
|
||||
andl %ebx,%r11d
|
||||
movl 12(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ebx,%r11d
|
||||
addl %r12d,%edx
|
||||
movl %ebx,%r12d
|
||||
roll $9,%edx
|
||||
addl %eax,%edx
|
||||
notl %r11d
|
||||
leal -187363961(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r12d
|
||||
andl %eax,%r11d
|
||||
movl 32(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %eax,%r11d
|
||||
addl %r12d,%ecx
|
||||
movl %eax,%r12d
|
||||
roll $14,%ecx
|
||||
addl %edx,%ecx
|
||||
notl %r11d
|
||||
leal 1163531501(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r12d
|
||||
andl %edx,%r11d
|
||||
movl 52(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %edx,%r11d
|
||||
addl %r12d,%ebx
|
||||
movl %edx,%r12d
|
||||
roll $20,%ebx
|
||||
addl %ecx,%ebx
|
||||
notl %r11d
|
||||
leal -1444681467(%rax,%r10,1),%eax
|
||||
andl %ebx,%r12d
|
||||
andl %ecx,%r11d
|
||||
movl 8(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ecx,%r11d
|
||||
addl %r12d,%eax
|
||||
movl %ecx,%r12d
|
||||
roll $5,%eax
|
||||
addl %ebx,%eax
|
||||
notl %r11d
|
||||
leal -51403784(%rdx,%r10,1),%edx
|
||||
andl %eax,%r12d
|
||||
andl %ebx,%r11d
|
||||
movl 28(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %ebx,%r11d
|
||||
addl %r12d,%edx
|
||||
movl %ebx,%r12d
|
||||
roll $9,%edx
|
||||
addl %eax,%edx
|
||||
notl %r11d
|
||||
leal 1735328473(%rcx,%r10,1),%ecx
|
||||
andl %edx,%r12d
|
||||
andl %eax,%r11d
|
||||
movl 48(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %eax,%r11d
|
||||
addl %r12d,%ecx
|
||||
movl %eax,%r12d
|
||||
roll $14,%ecx
|
||||
addl %edx,%ecx
|
||||
notl %r11d
|
||||
leal -1926607734(%rbx,%r10,1),%ebx
|
||||
andl %ecx,%r12d
|
||||
andl %edx,%r11d
|
||||
movl 0(%rsi),%r10d
|
||||
orl %r11d,%r12d
|
||||
movl %edx,%r11d
|
||||
addl %r12d,%ebx
|
||||
movl %edx,%r12d
|
||||
roll $20,%ebx
|
||||
addl %ecx,%ebx
|
||||
movl 20(%rsi),%r10d
|
||||
movl %ecx,%r11d
|
||||
leal -378558(%rax,%r10,1),%eax
|
||||
movl 32(%rsi),%r10d
|
||||
xorl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%eax
|
||||
roll $4,%eax
|
||||
movl %ebx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -2022574463(%rdx,%r10,1),%edx
|
||||
movl 44(%rsi),%r10d
|
||||
xorl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%edx
|
||||
roll $11,%edx
|
||||
movl %eax,%r11d
|
||||
addl %eax,%edx
|
||||
leal 1839030562(%rcx,%r10,1),%ecx
|
||||
movl 56(%rsi),%r10d
|
||||
xorl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ecx
|
||||
roll $16,%ecx
|
||||
movl %edx,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -35309556(%rbx,%r10,1),%ebx
|
||||
movl 4(%rsi),%r10d
|
||||
xorl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%ebx
|
||||
roll $23,%ebx
|
||||
movl %ecx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal -1530992060(%rax,%r10,1),%eax
|
||||
movl 16(%rsi),%r10d
|
||||
xorl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%eax
|
||||
roll $4,%eax
|
||||
movl %ebx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal 1272893353(%rdx,%r10,1),%edx
|
||||
movl 28(%rsi),%r10d
|
||||
xorl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%edx
|
||||
roll $11,%edx
|
||||
movl %eax,%r11d
|
||||
addl %eax,%edx
|
||||
leal -155497632(%rcx,%r10,1),%ecx
|
||||
movl 40(%rsi),%r10d
|
||||
xorl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ecx
|
||||
roll $16,%ecx
|
||||
movl %edx,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -1094730640(%rbx,%r10,1),%ebx
|
||||
movl 52(%rsi),%r10d
|
||||
xorl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%ebx
|
||||
roll $23,%ebx
|
||||
movl %ecx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal 681279174(%rax,%r10,1),%eax
|
||||
movl 0(%rsi),%r10d
|
||||
xorl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%eax
|
||||
roll $4,%eax
|
||||
movl %ebx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -358537222(%rdx,%r10,1),%edx
|
||||
movl 12(%rsi),%r10d
|
||||
xorl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%edx
|
||||
roll $11,%edx
|
||||
movl %eax,%r11d
|
||||
addl %eax,%edx
|
||||
leal -722521979(%rcx,%r10,1),%ecx
|
||||
movl 24(%rsi),%r10d
|
||||
xorl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ecx
|
||||
roll $16,%ecx
|
||||
movl %edx,%r11d
|
||||
addl %edx,%ecx
|
||||
leal 76029189(%rbx,%r10,1),%ebx
|
||||
movl 36(%rsi),%r10d
|
||||
xorl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%ebx
|
||||
roll $23,%ebx
|
||||
movl %ecx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal -640364487(%rax,%r10,1),%eax
|
||||
movl 48(%rsi),%r10d
|
||||
xorl %edx,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%eax
|
||||
roll $4,%eax
|
||||
movl %ebx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -421815835(%rdx,%r10,1),%edx
|
||||
movl 60(%rsi),%r10d
|
||||
xorl %ecx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%edx
|
||||
roll $11,%edx
|
||||
movl %eax,%r11d
|
||||
addl %eax,%edx
|
||||
leal 530742520(%rcx,%r10,1),%ecx
|
||||
movl 8(%rsi),%r10d
|
||||
xorl %ebx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ecx
|
||||
roll $16,%ecx
|
||||
movl %edx,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -995338651(%rbx,%r10,1),%ebx
|
||||
movl 0(%rsi),%r10d
|
||||
xorl %eax,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%ebx
|
||||
roll $23,%ebx
|
||||
movl %ecx,%r11d
|
||||
addl %ecx,%ebx
|
||||
movl 0(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
xorl %edx,%r11d
|
||||
leal -198630844(%rax,%r10,1),%eax
|
||||
orl %ebx,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%eax
|
||||
movl 28(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $6,%eax
|
||||
xorl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal 1126891415(%rdx,%r10,1),%edx
|
||||
orl %eax,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%edx
|
||||
movl 56(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $10,%edx
|
||||
xorl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
leal -1416354905(%rcx,%r10,1),%ecx
|
||||
orl %edx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%ecx
|
||||
movl 20(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $15,%ecx
|
||||
xorl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -57434055(%rbx,%r10,1),%ebx
|
||||
orl %ecx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ebx
|
||||
movl 48(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $21,%ebx
|
||||
xorl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal 1700485571(%rax,%r10,1),%eax
|
||||
orl %ebx,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%eax
|
||||
movl 12(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $6,%eax
|
||||
xorl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -1894986606(%rdx,%r10,1),%edx
|
||||
orl %eax,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%edx
|
||||
movl 40(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $10,%edx
|
||||
xorl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
leal -1051523(%rcx,%r10,1),%ecx
|
||||
orl %edx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%ecx
|
||||
movl 4(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $15,%ecx
|
||||
xorl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -2054922799(%rbx,%r10,1),%ebx
|
||||
orl %ecx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ebx
|
||||
movl 32(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $21,%ebx
|
||||
xorl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal 1873313359(%rax,%r10,1),%eax
|
||||
orl %ebx,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%eax
|
||||
movl 60(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $6,%eax
|
||||
xorl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -30611744(%rdx,%r10,1),%edx
|
||||
orl %eax,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%edx
|
||||
movl 24(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $10,%edx
|
||||
xorl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
leal -1560198380(%rcx,%r10,1),%ecx
|
||||
orl %edx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%ecx
|
||||
movl 52(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $15,%ecx
|
||||
xorl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
leal 1309151649(%rbx,%r10,1),%ebx
|
||||
orl %ecx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ebx
|
||||
movl 16(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $21,%ebx
|
||||
xorl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
leal -145523070(%rax,%r10,1),%eax
|
||||
orl %ebx,%r11d
|
||||
xorl %ecx,%r11d
|
||||
addl %r11d,%eax
|
||||
movl 44(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $6,%eax
|
||||
xorl %ecx,%r11d
|
||||
addl %ebx,%eax
|
||||
leal -1120210379(%rdx,%r10,1),%edx
|
||||
orl %eax,%r11d
|
||||
xorl %ebx,%r11d
|
||||
addl %r11d,%edx
|
||||
movl 8(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $10,%edx
|
||||
xorl %ebx,%r11d
|
||||
addl %eax,%edx
|
||||
leal 718787259(%rcx,%r10,1),%ecx
|
||||
orl %edx,%r11d
|
||||
xorl %eax,%r11d
|
||||
addl %r11d,%ecx
|
||||
movl 36(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $15,%ecx
|
||||
xorl %eax,%r11d
|
||||
addl %edx,%ecx
|
||||
leal -343485551(%rbx,%r10,1),%ebx
|
||||
orl %ecx,%r11d
|
||||
xorl %edx,%r11d
|
||||
addl %r11d,%ebx
|
||||
movl 0(%rsi),%r10d
|
||||
movl $0xffffffff,%r11d
|
||||
roll $21,%ebx
|
||||
xorl %edx,%r11d
|
||||
addl %ecx,%ebx
|
||||
|
||||
addl %r8d,%eax
|
||||
addl %r9d,%ebx
|
||||
addl %r14d,%ecx
|
||||
addl %r15d,%edx
|
||||
|
||||
|
||||
addq $64,%rsi
|
||||
cmpq %rdi,%rsi
|
||||
jb .Lloop
|
||||
|
||||
|
||||
.Lend:
|
||||
movl %eax,0(%rbp)
|
||||
movl %ebx,4(%rbp)
|
||||
movl %ecx,8(%rbp)
|
||||
movl %edx,12(%rbp)
|
||||
|
||||
movq (%rsp),%r15
|
||||
movq 8(%rsp),%r14
|
||||
movq 16(%rsp),%r12
|
||||
movq 24(%rsp),%rbx
|
||||
movq 32(%rsp),%rbp
|
||||
addq $40,%rsp
|
||||
.Lepilogue:
|
||||
.byte 0xf3,0xc3
|
||||
.size md5_block_asm_data_order,.-md5_block_asm_data_order
|
||||
#endif
|
||||
1933
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm.S
vendored
Normal file
1933
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/p256-x86_64-asm.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
48
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/rdrand-x86_64.S
vendored
Normal file
48
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/rdrand-x86_64.S
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
|
||||
.text
|
||||
|
||||
|
||||
|
||||
|
||||
.globl CRYPTO_rdrand
|
||||
.hidden CRYPTO_rdrand
|
||||
.type CRYPTO_rdrand,@function
|
||||
.align 16
|
||||
CRYPTO_rdrand:
|
||||
xorq %rax,%rax
|
||||
|
||||
|
||||
.byte 0x48, 0x0f, 0xc7, 0xf1
|
||||
|
||||
adcq %rax,%rax
|
||||
movq %rcx,0(%rdi)
|
||||
.byte 0xf3,0xc3
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.globl CRYPTO_rdrand_multiple8_buf
|
||||
.hidden CRYPTO_rdrand_multiple8_buf
|
||||
.type CRYPTO_rdrand_multiple8_buf,@function
|
||||
.align 16
|
||||
CRYPTO_rdrand_multiple8_buf:
|
||||
testq %rsi,%rsi
|
||||
jz .Lout
|
||||
movq $8,%rdx
|
||||
.Lloop:
|
||||
|
||||
|
||||
.byte 0x48, 0x0f, 0xc7, 0xf1
|
||||
jnc .Lerr
|
||||
movq %rcx,0(%rdi)
|
||||
addq %rdx,%rdi
|
||||
subq %rdx,%rsi
|
||||
jnz .Lloop
|
||||
.Lout:
|
||||
movq $1,%rax
|
||||
.byte 0xf3,0xc3
|
||||
.Lerr:
|
||||
xorq %rax,%rax
|
||||
.byte 0xf3,0xc3
|
||||
#endif
|
||||
1744
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/rsaz-avx2.S
vendored
Normal file
1744
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/rsaz-avx2.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3544
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha1-x86_64.S
vendored
Normal file
3544
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha1-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3906
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha256-x86_64.S
vendored
Normal file
3906
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha256-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4028
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha512-x86_64.S
vendored
Normal file
4028
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/sha512-x86_64.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
834
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/vpaes-x86_64.S
vendored
Normal file
834
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/vpaes-x86_64.S
vendored
Normal file
@@ -0,0 +1,834 @@
|
||||
#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
|
||||
.text
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_encrypt_core,@function
|
||||
.align 16
|
||||
_vpaes_encrypt_core:
|
||||
movq %rdx,%r9
|
||||
movq $16,%r11
|
||||
movl 240(%rdx),%eax
|
||||
movdqa %xmm9,%xmm1
|
||||
movdqa .Lk_ipt(%rip),%xmm2
|
||||
pandn %xmm0,%xmm1
|
||||
movdqu (%r9),%xmm5
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm0
|
||||
.byte 102,15,56,0,208
|
||||
movdqa .Lk_ipt+16(%rip),%xmm0
|
||||
.byte 102,15,56,0,193
|
||||
pxor %xmm5,%xmm2
|
||||
addq $16,%r9
|
||||
pxor %xmm2,%xmm0
|
||||
leaq .Lk_mc_backward(%rip),%r10
|
||||
jmp .Lenc_entry
|
||||
|
||||
.align 16
|
||||
.Lenc_loop:
|
||||
|
||||
movdqa %xmm13,%xmm4
|
||||
movdqa %xmm12,%xmm0
|
||||
.byte 102,15,56,0,226
|
||||
.byte 102,15,56,0,195
|
||||
pxor %xmm5,%xmm4
|
||||
movdqa %xmm15,%xmm5
|
||||
pxor %xmm4,%xmm0
|
||||
movdqa -64(%r11,%r10,1),%xmm1
|
||||
.byte 102,15,56,0,234
|
||||
movdqa (%r11,%r10,1),%xmm4
|
||||
movdqa %xmm14,%xmm2
|
||||
.byte 102,15,56,0,211
|
||||
movdqa %xmm0,%xmm3
|
||||
pxor %xmm5,%xmm2
|
||||
.byte 102,15,56,0,193
|
||||
addq $16,%r9
|
||||
pxor %xmm2,%xmm0
|
||||
.byte 102,15,56,0,220
|
||||
addq $16,%r11
|
||||
pxor %xmm0,%xmm3
|
||||
.byte 102,15,56,0,193
|
||||
andq $0x30,%r11
|
||||
subq $1,%rax
|
||||
pxor %xmm3,%xmm0
|
||||
|
||||
.Lenc_entry:
|
||||
|
||||
movdqa %xmm9,%xmm1
|
||||
movdqa %xmm11,%xmm5
|
||||
pandn %xmm0,%xmm1
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm0
|
||||
.byte 102,15,56,0,232
|
||||
movdqa %xmm10,%xmm3
|
||||
pxor %xmm1,%xmm0
|
||||
.byte 102,15,56,0,217
|
||||
movdqa %xmm10,%xmm4
|
||||
pxor %xmm5,%xmm3
|
||||
.byte 102,15,56,0,224
|
||||
movdqa %xmm10,%xmm2
|
||||
pxor %xmm5,%xmm4
|
||||
.byte 102,15,56,0,211
|
||||
movdqa %xmm10,%xmm3
|
||||
pxor %xmm0,%xmm2
|
||||
.byte 102,15,56,0,220
|
||||
movdqu (%r9),%xmm5
|
||||
pxor %xmm1,%xmm3
|
||||
jnz .Lenc_loop
|
||||
|
||||
|
||||
movdqa -96(%r10),%xmm4
|
||||
movdqa -80(%r10),%xmm0
|
||||
.byte 102,15,56,0,226
|
||||
pxor %xmm5,%xmm4
|
||||
.byte 102,15,56,0,195
|
||||
movdqa 64(%r11,%r10,1),%xmm1
|
||||
pxor %xmm4,%xmm0
|
||||
.byte 102,15,56,0,193
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_encrypt_core,.-_vpaes_encrypt_core
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_decrypt_core,@function
|
||||
.align 16
|
||||
_vpaes_decrypt_core:
|
||||
movq %rdx,%r9
|
||||
movl 240(%rdx),%eax
|
||||
movdqa %xmm9,%xmm1
|
||||
movdqa .Lk_dipt(%rip),%xmm2
|
||||
pandn %xmm0,%xmm1
|
||||
movq %rax,%r11
|
||||
psrld $4,%xmm1
|
||||
movdqu (%r9),%xmm5
|
||||
shlq $4,%r11
|
||||
pand %xmm9,%xmm0
|
||||
.byte 102,15,56,0,208
|
||||
movdqa .Lk_dipt+16(%rip),%xmm0
|
||||
xorq $0x30,%r11
|
||||
leaq .Lk_dsbd(%rip),%r10
|
||||
.byte 102,15,56,0,193
|
||||
andq $0x30,%r11
|
||||
pxor %xmm5,%xmm2
|
||||
movdqa .Lk_mc_forward+48(%rip),%xmm5
|
||||
pxor %xmm2,%xmm0
|
||||
addq $16,%r9
|
||||
addq %r10,%r11
|
||||
jmp .Ldec_entry
|
||||
|
||||
.align 16
|
||||
.Ldec_loop:
|
||||
|
||||
|
||||
|
||||
movdqa -32(%r10),%xmm4
|
||||
movdqa -16(%r10),%xmm1
|
||||
.byte 102,15,56,0,226
|
||||
.byte 102,15,56,0,203
|
||||
pxor %xmm4,%xmm0
|
||||
movdqa 0(%r10),%xmm4
|
||||
pxor %xmm1,%xmm0
|
||||
movdqa 16(%r10),%xmm1
|
||||
|
||||
.byte 102,15,56,0,226
|
||||
.byte 102,15,56,0,197
|
||||
.byte 102,15,56,0,203
|
||||
pxor %xmm4,%xmm0
|
||||
movdqa 32(%r10),%xmm4
|
||||
pxor %xmm1,%xmm0
|
||||
movdqa 48(%r10),%xmm1
|
||||
|
||||
.byte 102,15,56,0,226
|
||||
.byte 102,15,56,0,197
|
||||
.byte 102,15,56,0,203
|
||||
pxor %xmm4,%xmm0
|
||||
movdqa 64(%r10),%xmm4
|
||||
pxor %xmm1,%xmm0
|
||||
movdqa 80(%r10),%xmm1
|
||||
|
||||
.byte 102,15,56,0,226
|
||||
.byte 102,15,56,0,197
|
||||
.byte 102,15,56,0,203
|
||||
pxor %xmm4,%xmm0
|
||||
addq $16,%r9
|
||||
.byte 102,15,58,15,237,12
|
||||
pxor %xmm1,%xmm0
|
||||
subq $1,%rax
|
||||
|
||||
.Ldec_entry:
|
||||
|
||||
movdqa %xmm9,%xmm1
|
||||
pandn %xmm0,%xmm1
|
||||
movdqa %xmm11,%xmm2
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm0
|
||||
.byte 102,15,56,0,208
|
||||
movdqa %xmm10,%xmm3
|
||||
pxor %xmm1,%xmm0
|
||||
.byte 102,15,56,0,217
|
||||
movdqa %xmm10,%xmm4
|
||||
pxor %xmm2,%xmm3
|
||||
.byte 102,15,56,0,224
|
||||
pxor %xmm2,%xmm4
|
||||
movdqa %xmm10,%xmm2
|
||||
.byte 102,15,56,0,211
|
||||
movdqa %xmm10,%xmm3
|
||||
pxor %xmm0,%xmm2
|
||||
.byte 102,15,56,0,220
|
||||
movdqu (%r9),%xmm0
|
||||
pxor %xmm1,%xmm3
|
||||
jnz .Ldec_loop
|
||||
|
||||
|
||||
movdqa 96(%r10),%xmm4
|
||||
.byte 102,15,56,0,226
|
||||
pxor %xmm0,%xmm4
|
||||
movdqa 112(%r10),%xmm0
|
||||
movdqa -352(%r11),%xmm2
|
||||
.byte 102,15,56,0,195
|
||||
pxor %xmm4,%xmm0
|
||||
.byte 102,15,56,0,194
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_decrypt_core,.-_vpaes_decrypt_core
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_schedule_core,@function
|
||||
.align 16
|
||||
_vpaes_schedule_core:
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
call _vpaes_preheat
|
||||
movdqa .Lk_rcon(%rip),%xmm8
|
||||
movdqu (%rdi),%xmm0
|
||||
|
||||
|
||||
movdqa %xmm0,%xmm3
|
||||
leaq .Lk_ipt(%rip),%r11
|
||||
call _vpaes_schedule_transform
|
||||
movdqa %xmm0,%xmm7
|
||||
|
||||
leaq .Lk_sr(%rip),%r10
|
||||
testq %rcx,%rcx
|
||||
jnz .Lschedule_am_decrypting
|
||||
|
||||
|
||||
movdqu %xmm0,(%rdx)
|
||||
jmp .Lschedule_go
|
||||
|
||||
.Lschedule_am_decrypting:
|
||||
|
||||
movdqa (%r8,%r10,1),%xmm1
|
||||
.byte 102,15,56,0,217
|
||||
movdqu %xmm3,(%rdx)
|
||||
xorq $0x30,%r8
|
||||
|
||||
.Lschedule_go:
|
||||
cmpl $192,%esi
|
||||
ja .Lschedule_256
|
||||
je .Lschedule_192
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.Lschedule_128:
|
||||
movl $10,%esi
|
||||
|
||||
.Loop_schedule_128:
|
||||
call _vpaes_schedule_round
|
||||
decq %rsi
|
||||
jz .Lschedule_mangle_last
|
||||
call _vpaes_schedule_mangle
|
||||
jmp .Loop_schedule_128
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.align 16
|
||||
.Lschedule_192:
|
||||
movdqu 8(%rdi),%xmm0
|
||||
call _vpaes_schedule_transform
|
||||
movdqa %xmm0,%xmm6
|
||||
pxor %xmm4,%xmm4
|
||||
movhlps %xmm4,%xmm6
|
||||
movl $4,%esi
|
||||
|
||||
.Loop_schedule_192:
|
||||
call _vpaes_schedule_round
|
||||
.byte 102,15,58,15,198,8
|
||||
call _vpaes_schedule_mangle
|
||||
call _vpaes_schedule_192_smear
|
||||
call _vpaes_schedule_mangle
|
||||
call _vpaes_schedule_round
|
||||
decq %rsi
|
||||
jz .Lschedule_mangle_last
|
||||
call _vpaes_schedule_mangle
|
||||
call _vpaes_schedule_192_smear
|
||||
jmp .Loop_schedule_192
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.align 16
|
||||
.Lschedule_256:
|
||||
movdqu 16(%rdi),%xmm0
|
||||
call _vpaes_schedule_transform
|
||||
movl $7,%esi
|
||||
|
||||
.Loop_schedule_256:
|
||||
call _vpaes_schedule_mangle
|
||||
movdqa %xmm0,%xmm6
|
||||
|
||||
|
||||
call _vpaes_schedule_round
|
||||
decq %rsi
|
||||
jz .Lschedule_mangle_last
|
||||
call _vpaes_schedule_mangle
|
||||
|
||||
|
||||
pshufd $0xFF,%xmm0,%xmm0
|
||||
movdqa %xmm7,%xmm5
|
||||
movdqa %xmm6,%xmm7
|
||||
call _vpaes_schedule_low_round
|
||||
movdqa %xmm5,%xmm7
|
||||
|
||||
jmp .Loop_schedule_256
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.align 16
|
||||
.Lschedule_mangle_last:
|
||||
|
||||
leaq .Lk_deskew(%rip),%r11
|
||||
testq %rcx,%rcx
|
||||
jnz .Lschedule_mangle_last_dec
|
||||
|
||||
|
||||
movdqa (%r8,%r10,1),%xmm1
|
||||
.byte 102,15,56,0,193
|
||||
leaq .Lk_opt(%rip),%r11
|
||||
addq $32,%rdx
|
||||
|
||||
.Lschedule_mangle_last_dec:
|
||||
addq $-16,%rdx
|
||||
pxor .Lk_s63(%rip),%xmm0
|
||||
call _vpaes_schedule_transform
|
||||
movdqu %xmm0,(%rdx)
|
||||
|
||||
|
||||
pxor %xmm0,%xmm0
|
||||
pxor %xmm1,%xmm1
|
||||
pxor %xmm2,%xmm2
|
||||
pxor %xmm3,%xmm3
|
||||
pxor %xmm4,%xmm4
|
||||
pxor %xmm5,%xmm5
|
||||
pxor %xmm6,%xmm6
|
||||
pxor %xmm7,%xmm7
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_schedule_core,.-_vpaes_schedule_core
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_schedule_192_smear,@function
|
||||
.align 16
|
||||
_vpaes_schedule_192_smear:
|
||||
pshufd $0x80,%xmm6,%xmm1
|
||||
pshufd $0xFE,%xmm7,%xmm0
|
||||
pxor %xmm1,%xmm6
|
||||
pxor %xmm1,%xmm1
|
||||
pxor %xmm0,%xmm6
|
||||
movdqa %xmm6,%xmm0
|
||||
movhlps %xmm1,%xmm6
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_schedule_round,@function
|
||||
.align 16
|
||||
_vpaes_schedule_round:
|
||||
|
||||
pxor %xmm1,%xmm1
|
||||
.byte 102,65,15,58,15,200,15
|
||||
.byte 102,69,15,58,15,192,15
|
||||
pxor %xmm1,%xmm7
|
||||
|
||||
|
||||
pshufd $0xFF,%xmm0,%xmm0
|
||||
.byte 102,15,58,15,192,1
|
||||
|
||||
|
||||
|
||||
|
||||
_vpaes_schedule_low_round:
|
||||
|
||||
movdqa %xmm7,%xmm1
|
||||
pslldq $4,%xmm7
|
||||
pxor %xmm1,%xmm7
|
||||
movdqa %xmm7,%xmm1
|
||||
pslldq $8,%xmm7
|
||||
pxor %xmm1,%xmm7
|
||||
pxor .Lk_s63(%rip),%xmm7
|
||||
|
||||
|
||||
movdqa %xmm9,%xmm1
|
||||
pandn %xmm0,%xmm1
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm0
|
||||
movdqa %xmm11,%xmm2
|
||||
.byte 102,15,56,0,208
|
||||
pxor %xmm1,%xmm0
|
||||
movdqa %xmm10,%xmm3
|
||||
.byte 102,15,56,0,217
|
||||
pxor %xmm2,%xmm3
|
||||
movdqa %xmm10,%xmm4
|
||||
.byte 102,15,56,0,224
|
||||
pxor %xmm2,%xmm4
|
||||
movdqa %xmm10,%xmm2
|
||||
.byte 102,15,56,0,211
|
||||
pxor %xmm0,%xmm2
|
||||
movdqa %xmm10,%xmm3
|
||||
.byte 102,15,56,0,220
|
||||
pxor %xmm1,%xmm3
|
||||
movdqa %xmm13,%xmm4
|
||||
.byte 102,15,56,0,226
|
||||
movdqa %xmm12,%xmm0
|
||||
.byte 102,15,56,0,195
|
||||
pxor %xmm4,%xmm0
|
||||
|
||||
|
||||
pxor %xmm7,%xmm0
|
||||
movdqa %xmm0,%xmm7
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_schedule_round,.-_vpaes_schedule_round
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_schedule_transform,@function
|
||||
.align 16
|
||||
_vpaes_schedule_transform:
|
||||
movdqa %xmm9,%xmm1
|
||||
pandn %xmm0,%xmm1
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm0
|
||||
movdqa (%r11),%xmm2
|
||||
.byte 102,15,56,0,208
|
||||
movdqa 16(%r11),%xmm0
|
||||
.byte 102,15,56,0,193
|
||||
pxor %xmm2,%xmm0
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_schedule_transform,.-_vpaes_schedule_transform
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_schedule_mangle,@function
|
||||
.align 16
|
||||
_vpaes_schedule_mangle:
|
||||
movdqa %xmm0,%xmm4
|
||||
movdqa .Lk_mc_forward(%rip),%xmm5
|
||||
testq %rcx,%rcx
|
||||
jnz .Lschedule_mangle_dec
|
||||
|
||||
|
||||
addq $16,%rdx
|
||||
pxor .Lk_s63(%rip),%xmm4
|
||||
.byte 102,15,56,0,229
|
||||
movdqa %xmm4,%xmm3
|
||||
.byte 102,15,56,0,229
|
||||
pxor %xmm4,%xmm3
|
||||
.byte 102,15,56,0,229
|
||||
pxor %xmm4,%xmm3
|
||||
|
||||
jmp .Lschedule_mangle_both
|
||||
.align 16
|
||||
.Lschedule_mangle_dec:
|
||||
|
||||
leaq .Lk_dksd(%rip),%r11
|
||||
movdqa %xmm9,%xmm1
|
||||
pandn %xmm4,%xmm1
|
||||
psrld $4,%xmm1
|
||||
pand %xmm9,%xmm4
|
||||
|
||||
movdqa 0(%r11),%xmm2
|
||||
.byte 102,15,56,0,212
|
||||
movdqa 16(%r11),%xmm3
|
||||
.byte 102,15,56,0,217
|
||||
pxor %xmm2,%xmm3
|
||||
.byte 102,15,56,0,221
|
||||
|
||||
movdqa 32(%r11),%xmm2
|
||||
.byte 102,15,56,0,212
|
||||
pxor %xmm3,%xmm2
|
||||
movdqa 48(%r11),%xmm3
|
||||
.byte 102,15,56,0,217
|
||||
pxor %xmm2,%xmm3
|
||||
.byte 102,15,56,0,221
|
||||
|
||||
movdqa 64(%r11),%xmm2
|
||||
.byte 102,15,56,0,212
|
||||
pxor %xmm3,%xmm2
|
||||
movdqa 80(%r11),%xmm3
|
||||
.byte 102,15,56,0,217
|
||||
pxor %xmm2,%xmm3
|
||||
.byte 102,15,56,0,221
|
||||
|
||||
movdqa 96(%r11),%xmm2
|
||||
.byte 102,15,56,0,212
|
||||
pxor %xmm3,%xmm2
|
||||
movdqa 112(%r11),%xmm3
|
||||
.byte 102,15,56,0,217
|
||||
pxor %xmm2,%xmm3
|
||||
|
||||
addq $-16,%rdx
|
||||
|
||||
.Lschedule_mangle_both:
|
||||
movdqa (%r8,%r10,1),%xmm1
|
||||
.byte 102,15,56,0,217
|
||||
addq $-16,%r8
|
||||
andq $0x30,%r8
|
||||
movdqu %xmm3,(%rdx)
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
|
||||
|
||||
|
||||
|
||||
|
||||
.globl vpaes_set_encrypt_key
|
||||
.hidden vpaes_set_encrypt_key
|
||||
.type vpaes_set_encrypt_key,@function
|
||||
.align 16
|
||||
vpaes_set_encrypt_key:
|
||||
movl %esi,%eax
|
||||
shrl $5,%eax
|
||||
addl $5,%eax
|
||||
movl %eax,240(%rdx)
|
||||
|
||||
movl $0,%ecx
|
||||
movl $0x30,%r8d
|
||||
call _vpaes_schedule_core
|
||||
xorl %eax,%eax
|
||||
.byte 0xf3,0xc3
|
||||
.size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key
|
||||
|
||||
.globl vpaes_set_decrypt_key
|
||||
.hidden vpaes_set_decrypt_key
|
||||
.type vpaes_set_decrypt_key,@function
|
||||
.align 16
|
||||
vpaes_set_decrypt_key:
|
||||
movl %esi,%eax
|
||||
shrl $5,%eax
|
||||
addl $5,%eax
|
||||
movl %eax,240(%rdx)
|
||||
shll $4,%eax
|
||||
leaq 16(%rdx,%rax,1),%rdx
|
||||
|
||||
movl $1,%ecx
|
||||
movl %esi,%r8d
|
||||
shrl $1,%r8d
|
||||
andl $32,%r8d
|
||||
xorl $32,%r8d
|
||||
call _vpaes_schedule_core
|
||||
xorl %eax,%eax
|
||||
.byte 0xf3,0xc3
|
||||
.size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key
|
||||
|
||||
.globl vpaes_encrypt
|
||||
.hidden vpaes_encrypt
|
||||
.type vpaes_encrypt,@function
|
||||
.align 16
|
||||
vpaes_encrypt:
|
||||
movdqu (%rdi),%xmm0
|
||||
call _vpaes_preheat
|
||||
call _vpaes_encrypt_core
|
||||
movdqu %xmm0,(%rsi)
|
||||
.byte 0xf3,0xc3
|
||||
.size vpaes_encrypt,.-vpaes_encrypt
|
||||
|
||||
.globl vpaes_decrypt
|
||||
.hidden vpaes_decrypt
|
||||
.type vpaes_decrypt,@function
|
||||
.align 16
|
||||
vpaes_decrypt:
|
||||
movdqu (%rdi),%xmm0
|
||||
call _vpaes_preheat
|
||||
call _vpaes_decrypt_core
|
||||
movdqu %xmm0,(%rsi)
|
||||
.byte 0xf3,0xc3
|
||||
.size vpaes_decrypt,.-vpaes_decrypt
|
||||
.globl vpaes_cbc_encrypt
|
||||
.hidden vpaes_cbc_encrypt
|
||||
.type vpaes_cbc_encrypt,@function
|
||||
.align 16
|
||||
vpaes_cbc_encrypt:
|
||||
xchgq %rcx,%rdx
|
||||
subq $16,%rcx
|
||||
jc .Lcbc_abort
|
||||
movdqu (%r8),%xmm6
|
||||
subq %rdi,%rsi
|
||||
call _vpaes_preheat
|
||||
cmpl $0,%r9d
|
||||
je .Lcbc_dec_loop
|
||||
jmp .Lcbc_enc_loop
|
||||
.align 16
|
||||
.Lcbc_enc_loop:
|
||||
movdqu (%rdi),%xmm0
|
||||
pxor %xmm6,%xmm0
|
||||
call _vpaes_encrypt_core
|
||||
movdqa %xmm0,%xmm6
|
||||
movdqu %xmm0,(%rsi,%rdi,1)
|
||||
leaq 16(%rdi),%rdi
|
||||
subq $16,%rcx
|
||||
jnc .Lcbc_enc_loop
|
||||
jmp .Lcbc_done
|
||||
.align 16
|
||||
.Lcbc_dec_loop:
|
||||
movdqu (%rdi),%xmm0
|
||||
movdqa %xmm0,%xmm7
|
||||
call _vpaes_decrypt_core
|
||||
pxor %xmm6,%xmm0
|
||||
movdqa %xmm7,%xmm6
|
||||
movdqu %xmm0,(%rsi,%rdi,1)
|
||||
leaq 16(%rdi),%rdi
|
||||
subq $16,%rcx
|
||||
jnc .Lcbc_dec_loop
|
||||
.Lcbc_done:
|
||||
movdqu %xmm6,(%r8)
|
||||
.Lcbc_abort:
|
||||
.byte 0xf3,0xc3
|
||||
.size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_preheat,@function
|
||||
.align 16
|
||||
_vpaes_preheat:
|
||||
leaq .Lk_s0F(%rip),%r10
|
||||
movdqa -32(%r10),%xmm10
|
||||
movdqa -16(%r10),%xmm11
|
||||
movdqa 0(%r10),%xmm9
|
||||
movdqa 48(%r10),%xmm13
|
||||
movdqa 64(%r10),%xmm12
|
||||
movdqa 80(%r10),%xmm15
|
||||
movdqa 96(%r10),%xmm14
|
||||
.byte 0xf3,0xc3
|
||||
.size _vpaes_preheat,.-_vpaes_preheat
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.type _vpaes_consts,@object
|
||||
.align 64
|
||||
_vpaes_consts:
|
||||
.Lk_inv:
|
||||
.quad 0x0E05060F0D080180, 0x040703090A0B0C02
|
||||
.quad 0x01040A060F0B0780, 0x030D0E0C02050809
|
||||
|
||||
.Lk_s0F:
|
||||
.quad 0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F0F0F0F0F
|
||||
|
||||
.Lk_ipt:
|
||||
.quad 0xC2B2E8985A2A7000, 0xCABAE09052227808
|
||||
.quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81
|
||||
|
||||
.Lk_sb1:
|
||||
.quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544
|
||||
.quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF
|
||||
.Lk_sb2:
|
||||
.quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD
|
||||
.quad 0x69EB88400AE12900, 0xC2A163C8AB82234A
|
||||
.Lk_sbo:
|
||||
.quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878
|
||||
.quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA
|
||||
|
||||
.Lk_mc_forward:
|
||||
.quad 0x0407060500030201, 0x0C0F0E0D080B0A09
|
||||
.quad 0x080B0A0904070605, 0x000302010C0F0E0D
|
||||
.quad 0x0C0F0E0D080B0A09, 0x0407060500030201
|
||||
.quad 0x000302010C0F0E0D, 0x080B0A0904070605
|
||||
|
||||
.Lk_mc_backward:
|
||||
.quad 0x0605040702010003, 0x0E0D0C0F0A09080B
|
||||
.quad 0x020100030E0D0C0F, 0x0A09080B06050407
|
||||
.quad 0x0E0D0C0F0A09080B, 0x0605040702010003
|
||||
.quad 0x0A09080B06050407, 0x020100030E0D0C0F
|
||||
|
||||
.Lk_sr:
|
||||
.quad 0x0706050403020100, 0x0F0E0D0C0B0A0908
|
||||
.quad 0x030E09040F0A0500, 0x0B06010C07020D08
|
||||
.quad 0x0F060D040B020900, 0x070E050C030A0108
|
||||
.quad 0x0B0E0104070A0D00, 0x0306090C0F020508
|
||||
|
||||
.Lk_rcon:
|
||||
.quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81
|
||||
|
||||
.Lk_s63:
|
||||
.quad 0x5B5B5B5B5B5B5B5B, 0x5B5B5B5B5B5B5B5B
|
||||
|
||||
.Lk_opt:
|
||||
.quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808
|
||||
.quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0
|
||||
|
||||
.Lk_deskew:
|
||||
.quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A
|
||||
.quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.Lk_dksd:
|
||||
.quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9
|
||||
.quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E
|
||||
.Lk_dksb:
|
||||
.quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99
|
||||
.quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8
|
||||
.Lk_dkse:
|
||||
.quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086
|
||||
.quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487
|
||||
.Lk_dks9:
|
||||
.quad 0xB6116FC87ED9A700, 0x4AED933482255BFC
|
||||
.quad 0x4576516227143300, 0x8BB89FACE9DAFDCE
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.Lk_dipt:
|
||||
.quad 0x0F505B040B545F00, 0x154A411E114E451A
|
||||
.quad 0x86E383E660056500, 0x12771772F491F194
|
||||
|
||||
.Lk_dsb9:
|
||||
.quad 0x851C03539A86D600, 0xCAD51F504F994CC9
|
||||
.quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565
|
||||
.Lk_dsbd:
|
||||
.quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439
|
||||
.quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3
|
||||
.Lk_dsbb:
|
||||
.quad 0xD022649296B44200, 0x602646F6B0F2D404
|
||||
.quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B
|
||||
.Lk_dsbe:
|
||||
.quad 0x46F2929626D4D000, 0x2242600464B4F6B0
|
||||
.quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32
|
||||
.Lk_dsbo:
|
||||
.quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D
|
||||
.quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C
|
||||
.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0
|
||||
.align 64
|
||||
.size _vpaes_consts,.-_vpaes_consts
|
||||
#endif
|
||||
865
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/x86_64-mont.S
vendored
Normal file
865
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/x86_64-mont.S
vendored
Normal file
@@ -0,0 +1,865 @@
|
||||
#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
|
||||
.text
|
||||
|
||||
.extern OPENSSL_ia32cap_P
|
||||
.hidden OPENSSL_ia32cap_P
|
||||
|
||||
.globl bn_mul_mont
|
||||
.hidden bn_mul_mont
|
||||
.type bn_mul_mont,@function
|
||||
.align 16
|
||||
bn_mul_mont:
|
||||
.cfi_startproc
|
||||
movl %r9d,%r9d
|
||||
movq %rsp,%rax
|
||||
.cfi_def_cfa_register %rax
|
||||
testl $3,%r9d
|
||||
jnz .Lmul_enter
|
||||
cmpl $8,%r9d
|
||||
jb .Lmul_enter
|
||||
cmpq %rsi,%rdx
|
||||
jne .Lmul4x_enter
|
||||
testl $7,%r9d
|
||||
jz .Lsqr8x_enter
|
||||
jmp .Lmul4x_enter
|
||||
|
||||
.align 16
|
||||
.Lmul_enter:
|
||||
pushq %rbx
|
||||
.cfi_offset %rbx,-16
|
||||
pushq %rbp
|
||||
.cfi_offset %rbp,-24
|
||||
pushq %r12
|
||||
.cfi_offset %r12,-32
|
||||
pushq %r13
|
||||
.cfi_offset %r13,-40
|
||||
pushq %r14
|
||||
.cfi_offset %r14,-48
|
||||
pushq %r15
|
||||
.cfi_offset %r15,-56
|
||||
|
||||
negq %r9
|
||||
movq %rsp,%r11
|
||||
leaq -16(%rsp,%r9,8),%r10
|
||||
negq %r9
|
||||
andq $-1024,%r10
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
subq %r10,%r11
|
||||
andq $-4096,%r11
|
||||
leaq (%r10,%r11,1),%rsp
|
||||
movq (%rsp),%r11
|
||||
cmpq %r10,%rsp
|
||||
ja .Lmul_page_walk
|
||||
jmp .Lmul_page_walk_done
|
||||
|
||||
.align 16
|
||||
.Lmul_page_walk:
|
||||
leaq -4096(%rsp),%rsp
|
||||
movq (%rsp),%r11
|
||||
cmpq %r10,%rsp
|
||||
ja .Lmul_page_walk
|
||||
.Lmul_page_walk_done:
|
||||
|
||||
movq %rax,8(%rsp,%r9,8)
|
||||
.cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08
|
||||
.Lmul_body:
|
||||
movq %rdx,%r12
|
||||
movq (%r8),%r8
|
||||
movq (%r12),%rbx
|
||||
movq (%rsi),%rax
|
||||
|
||||
xorq %r14,%r14
|
||||
xorq %r15,%r15
|
||||
|
||||
movq %r8,%rbp
|
||||
mulq %rbx
|
||||
movq %rax,%r10
|
||||
movq (%rcx),%rax
|
||||
|
||||
imulq %r10,%rbp
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r10
|
||||
movq 8(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r13
|
||||
|
||||
leaq 1(%r15),%r15
|
||||
jmp .L1st_enter
|
||||
|
||||
.align 16
|
||||
.L1st:
|
||||
addq %rax,%r13
|
||||
movq (%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%r13
|
||||
movq %r10,%r11
|
||||
adcq $0,%rdx
|
||||
movq %r13,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
.L1st_enter:
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq (%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
leaq 1(%r15),%r15
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
cmpq %r9,%r15
|
||||
jne .L1st
|
||||
|
||||
addq %rax,%r13
|
||||
movq (%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
movq %r10,%r11
|
||||
|
||||
xorq %rdx,%rdx
|
||||
addq %r11,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-8(%rsp,%r9,8)
|
||||
movq %rdx,(%rsp,%r9,8)
|
||||
|
||||
leaq 1(%r14),%r14
|
||||
jmp .Louter
|
||||
.align 16
|
||||
.Louter:
|
||||
movq (%r12,%r14,8),%rbx
|
||||
xorq %r15,%r15
|
||||
movq %r8,%rbp
|
||||
movq (%rsp),%r10
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq (%rcx),%rax
|
||||
adcq $0,%rdx
|
||||
|
||||
imulq %r10,%rbp
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r10
|
||||
movq 8(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
movq 8(%rsp),%r10
|
||||
movq %rdx,%r13
|
||||
|
||||
leaq 1(%r15),%r15
|
||||
jmp .Linner_enter
|
||||
|
||||
.align 16
|
||||
.Linner:
|
||||
addq %rax,%r13
|
||||
movq (%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
movq (%rsp,%r15,8),%r10
|
||||
adcq $0,%rdx
|
||||
movq %r13,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
.Linner_enter:
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq (%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%r10
|
||||
movq %rdx,%r11
|
||||
adcq $0,%r11
|
||||
leaq 1(%r15),%r15
|
||||
|
||||
mulq %rbp
|
||||
cmpq %r9,%r15
|
||||
jne .Linner
|
||||
|
||||
addq %rax,%r13
|
||||
movq (%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
movq (%rsp,%r15,8),%r10
|
||||
adcq $0,%rdx
|
||||
movq %r13,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
xorq %rdx,%rdx
|
||||
addq %r11,%r13
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-8(%rsp,%r9,8)
|
||||
movq %rdx,(%rsp,%r9,8)
|
||||
|
||||
leaq 1(%r14),%r14
|
||||
cmpq %r9,%r14
|
||||
jb .Louter
|
||||
|
||||
xorq %r14,%r14
|
||||
movq (%rsp),%rax
|
||||
leaq (%rsp),%rsi
|
||||
movq %r9,%r15
|
||||
jmp .Lsub
|
||||
.align 16
|
||||
.Lsub: sbbq (%rcx,%r14,8),%rax
|
||||
movq %rax,(%rdi,%r14,8)
|
||||
movq 8(%rsi,%r14,8),%rax
|
||||
leaq 1(%r14),%r14
|
||||
decq %r15
|
||||
jnz .Lsub
|
||||
|
||||
sbbq $0,%rax
|
||||
xorq %r14,%r14
|
||||
andq %rax,%rsi
|
||||
notq %rax
|
||||
movq %rdi,%rcx
|
||||
andq %rax,%rcx
|
||||
movq %r9,%r15
|
||||
orq %rcx,%rsi
|
||||
.align 16
|
||||
.Lcopy:
|
||||
movq (%rsi,%r14,8),%rax
|
||||
movq %r14,(%rsp,%r14,8)
|
||||
movq %rax,(%rdi,%r14,8)
|
||||
leaq 1(%r14),%r14
|
||||
subq $1,%r15
|
||||
jnz .Lcopy
|
||||
|
||||
movq 8(%rsp,%r9,8),%rsi
|
||||
.cfi_def_cfa %rsi,8
|
||||
movq $1,%rax
|
||||
movq -48(%rsi),%r15
|
||||
.cfi_restore %r15
|
||||
movq -40(%rsi),%r14
|
||||
.cfi_restore %r14
|
||||
movq -32(%rsi),%r13
|
||||
.cfi_restore %r13
|
||||
movq -24(%rsi),%r12
|
||||
.cfi_restore %r12
|
||||
movq -16(%rsi),%rbp
|
||||
.cfi_restore %rbp
|
||||
movq -8(%rsi),%rbx
|
||||
.cfi_restore %rbx
|
||||
leaq (%rsi),%rsp
|
||||
.cfi_def_cfa_register %rsp
|
||||
.Lmul_epilogue:
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size bn_mul_mont,.-bn_mul_mont
|
||||
.type bn_mul4x_mont,@function
|
||||
.align 16
|
||||
bn_mul4x_mont:
|
||||
.cfi_startproc
|
||||
movl %r9d,%r9d
|
||||
movq %rsp,%rax
|
||||
.cfi_def_cfa_register %rax
|
||||
.Lmul4x_enter:
|
||||
pushq %rbx
|
||||
.cfi_offset %rbx,-16
|
||||
pushq %rbp
|
||||
.cfi_offset %rbp,-24
|
||||
pushq %r12
|
||||
.cfi_offset %r12,-32
|
||||
pushq %r13
|
||||
.cfi_offset %r13,-40
|
||||
pushq %r14
|
||||
.cfi_offset %r14,-48
|
||||
pushq %r15
|
||||
.cfi_offset %r15,-56
|
||||
|
||||
negq %r9
|
||||
movq %rsp,%r11
|
||||
leaq -32(%rsp,%r9,8),%r10
|
||||
negq %r9
|
||||
andq $-1024,%r10
|
||||
|
||||
subq %r10,%r11
|
||||
andq $-4096,%r11
|
||||
leaq (%r10,%r11,1),%rsp
|
||||
movq (%rsp),%r11
|
||||
cmpq %r10,%rsp
|
||||
ja .Lmul4x_page_walk
|
||||
jmp .Lmul4x_page_walk_done
|
||||
|
||||
.Lmul4x_page_walk:
|
||||
leaq -4096(%rsp),%rsp
|
||||
movq (%rsp),%r11
|
||||
cmpq %r10,%rsp
|
||||
ja .Lmul4x_page_walk
|
||||
.Lmul4x_page_walk_done:
|
||||
|
||||
movq %rax,8(%rsp,%r9,8)
|
||||
.cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08
|
||||
.Lmul4x_body:
|
||||
movq %rdi,16(%rsp,%r9,8)
|
||||
movq %rdx,%r12
|
||||
movq (%r8),%r8
|
||||
movq (%r12),%rbx
|
||||
movq (%rsi),%rax
|
||||
|
||||
xorq %r14,%r14
|
||||
xorq %r15,%r15
|
||||
|
||||
movq %r8,%rbp
|
||||
mulq %rbx
|
||||
movq %rax,%r10
|
||||
movq (%rcx),%rax
|
||||
|
||||
imulq %r10,%rbp
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r10
|
||||
movq 8(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq 8(%rcx),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq 16(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
leaq 4(%r15),%r15
|
||||
adcq $0,%rdx
|
||||
movq %rdi,(%rsp)
|
||||
movq %rdx,%r13
|
||||
jmp .L1st4x
|
||||
.align 16
|
||||
.L1st4x:
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq -16(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq -8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-24(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq -8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq (%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq (%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq 8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-8(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq 8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
leaq 4(%r15),%r15
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq -16(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-32(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
cmpq %r9,%r15
|
||||
jb .L1st4x
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq -16(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq -8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-24(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq -8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq (%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
xorq %rdi,%rdi
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdi
|
||||
movq %r13,-8(%rsp,%r15,8)
|
||||
movq %rdi,(%rsp,%r15,8)
|
||||
|
||||
leaq 1(%r14),%r14
|
||||
.align 4
|
||||
.Louter4x:
|
||||
movq (%r12,%r14,8),%rbx
|
||||
xorq %r15,%r15
|
||||
movq (%rsp),%r10
|
||||
movq %r8,%rbp
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq (%rcx),%rax
|
||||
adcq $0,%rdx
|
||||
|
||||
imulq %r10,%rbp
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r10
|
||||
movq 8(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq 8(%rcx),%rax
|
||||
adcq $0,%rdx
|
||||
addq 8(%rsp),%r11
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq 16(%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
leaq 4(%r15),%r15
|
||||
adcq $0,%rdx
|
||||
movq %rdi,(%rsp)
|
||||
movq %rdx,%r13
|
||||
jmp .Linner4x
|
||||
.align 16
|
||||
.Linner4x:
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq -16(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq -16(%rsp,%r15,8),%r10
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq -8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-24(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq -8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq -8(%rsp,%r15,8),%r11
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq (%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq (%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq (%rsp,%r15,8),%r10
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq 8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-8(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq 8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq 8(%rsp,%r15,8),%r11
|
||||
adcq $0,%rdx
|
||||
leaq 4(%r15),%r15
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq -16(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-32(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
cmpq %r9,%r15
|
||||
jb .Linner4x
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r10
|
||||
movq -16(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq -16(%rsp,%r15,8),%r10
|
||||
adcq $0,%rdx
|
||||
movq %rdx,%r11
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%r13
|
||||
movq -8(%rsi,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdx
|
||||
movq %r13,-24(%rsp,%r15,8)
|
||||
movq %rdx,%rdi
|
||||
|
||||
mulq %rbx
|
||||
addq %rax,%r11
|
||||
movq -8(%rcx,%r15,8),%rax
|
||||
adcq $0,%rdx
|
||||
addq -8(%rsp,%r15,8),%r11
|
||||
adcq $0,%rdx
|
||||
leaq 1(%r14),%r14
|
||||
movq %rdx,%r10
|
||||
|
||||
mulq %rbp
|
||||
addq %rax,%rdi
|
||||
movq (%rsi),%rax
|
||||
adcq $0,%rdx
|
||||
addq %r11,%rdi
|
||||
adcq $0,%rdx
|
||||
movq %rdi,-16(%rsp,%r15,8)
|
||||
movq %rdx,%r13
|
||||
|
||||
xorq %rdi,%rdi
|
||||
addq %r10,%r13
|
||||
adcq $0,%rdi
|
||||
addq (%rsp,%r9,8),%r13
|
||||
adcq $0,%rdi
|
||||
movq %r13,-8(%rsp,%r15,8)
|
||||
movq %rdi,(%rsp,%r15,8)
|
||||
|
||||
cmpq %r9,%r14
|
||||
jb .Louter4x
|
||||
movq 16(%rsp,%r9,8),%rdi
|
||||
leaq -4(%r9),%r15
|
||||
movq 0(%rsp),%rax
|
||||
pxor %xmm0,%xmm0
|
||||
movq 8(%rsp),%rdx
|
||||
shrq $2,%r15
|
||||
leaq (%rsp),%rsi
|
||||
xorq %r14,%r14
|
||||
|
||||
subq 0(%rcx),%rax
|
||||
movq 16(%rsi),%rbx
|
||||
movq 24(%rsi),%rbp
|
||||
sbbq 8(%rcx),%rdx
|
||||
jmp .Lsub4x
|
||||
.align 16
|
||||
.Lsub4x:
|
||||
movq %rax,0(%rdi,%r14,8)
|
||||
movq %rdx,8(%rdi,%r14,8)
|
||||
sbbq 16(%rcx,%r14,8),%rbx
|
||||
movq 32(%rsi,%r14,8),%rax
|
||||
movq 40(%rsi,%r14,8),%rdx
|
||||
sbbq 24(%rcx,%r14,8),%rbp
|
||||
movq %rbx,16(%rdi,%r14,8)
|
||||
movq %rbp,24(%rdi,%r14,8)
|
||||
sbbq 32(%rcx,%r14,8),%rax
|
||||
movq 48(%rsi,%r14,8),%rbx
|
||||
movq 56(%rsi,%r14,8),%rbp
|
||||
sbbq 40(%rcx,%r14,8),%rdx
|
||||
leaq 4(%r14),%r14
|
||||
decq %r15
|
||||
jnz .Lsub4x
|
||||
|
||||
movq %rax,0(%rdi,%r14,8)
|
||||
movq 32(%rsi,%r14,8),%rax
|
||||
sbbq 16(%rcx,%r14,8),%rbx
|
||||
movq %rdx,8(%rdi,%r14,8)
|
||||
sbbq 24(%rcx,%r14,8),%rbp
|
||||
movq %rbx,16(%rdi,%r14,8)
|
||||
|
||||
sbbq $0,%rax
|
||||
movq %rbp,24(%rdi,%r14,8)
|
||||
xorq %r14,%r14
|
||||
andq %rax,%rsi
|
||||
notq %rax
|
||||
movq %rdi,%rcx
|
||||
andq %rax,%rcx
|
||||
leaq -4(%r9),%r15
|
||||
orq %rcx,%rsi
|
||||
shrq $2,%r15
|
||||
|
||||
movdqu (%rsi),%xmm1
|
||||
movdqa %xmm0,(%rsp)
|
||||
movdqu %xmm1,(%rdi)
|
||||
jmp .Lcopy4x
|
||||
.align 16
|
||||
.Lcopy4x:
|
||||
movdqu 16(%rsi,%r14,1),%xmm2
|
||||
movdqu 32(%rsi,%r14,1),%xmm1
|
||||
movdqa %xmm0,16(%rsp,%r14,1)
|
||||
movdqu %xmm2,16(%rdi,%r14,1)
|
||||
movdqa %xmm0,32(%rsp,%r14,1)
|
||||
movdqu %xmm1,32(%rdi,%r14,1)
|
||||
leaq 32(%r14),%r14
|
||||
decq %r15
|
||||
jnz .Lcopy4x
|
||||
|
||||
movdqu 16(%rsi,%r14,1),%xmm2
|
||||
movdqa %xmm0,16(%rsp,%r14,1)
|
||||
movdqu %xmm2,16(%rdi,%r14,1)
|
||||
movq 8(%rsp,%r9,8),%rsi
|
||||
.cfi_def_cfa %rsi, 8
|
||||
movq $1,%rax
|
||||
movq -48(%rsi),%r15
|
||||
.cfi_restore %r15
|
||||
movq -40(%rsi),%r14
|
||||
.cfi_restore %r14
|
||||
movq -32(%rsi),%r13
|
||||
.cfi_restore %r13
|
||||
movq -24(%rsi),%r12
|
||||
.cfi_restore %r12
|
||||
movq -16(%rsi),%rbp
|
||||
.cfi_restore %rbp
|
||||
movq -8(%rsi),%rbx
|
||||
.cfi_restore %rbx
|
||||
leaq (%rsi),%rsp
|
||||
.cfi_def_cfa_register %rsp
|
||||
.Lmul4x_epilogue:
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size bn_mul4x_mont,.-bn_mul4x_mont
|
||||
.extern bn_sqr8x_internal
|
||||
.hidden bn_sqr8x_internal
|
||||
|
||||
.type bn_sqr8x_mont,@function
|
||||
.align 32
|
||||
bn_sqr8x_mont:
|
||||
.cfi_startproc
|
||||
movq %rsp,%rax
|
||||
.cfi_def_cfa_register %rax
|
||||
.Lsqr8x_enter:
|
||||
pushq %rbx
|
||||
.cfi_offset %rbx,-16
|
||||
pushq %rbp
|
||||
.cfi_offset %rbp,-24
|
||||
pushq %r12
|
||||
.cfi_offset %r12,-32
|
||||
pushq %r13
|
||||
.cfi_offset %r13,-40
|
||||
pushq %r14
|
||||
.cfi_offset %r14,-48
|
||||
pushq %r15
|
||||
.cfi_offset %r15,-56
|
||||
.Lsqr8x_prologue:
|
||||
|
||||
movl %r9d,%r10d
|
||||
shll $3,%r9d
|
||||
shlq $3+2,%r10
|
||||
negq %r9
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
leaq -64(%rsp,%r9,2),%r11
|
||||
movq %rsp,%rbp
|
||||
movq (%r8),%r8
|
||||
subq %rsi,%r11
|
||||
andq $4095,%r11
|
||||
cmpq %r11,%r10
|
||||
jb .Lsqr8x_sp_alt
|
||||
subq %r11,%rbp
|
||||
leaq -64(%rbp,%r9,2),%rbp
|
||||
jmp .Lsqr8x_sp_done
|
||||
|
||||
.align 32
|
||||
.Lsqr8x_sp_alt:
|
||||
leaq 4096-64(,%r9,2),%r10
|
||||
leaq -64(%rbp,%r9,2),%rbp
|
||||
subq %r10,%r11
|
||||
movq $0,%r10
|
||||
cmovcq %r10,%r11
|
||||
subq %r11,%rbp
|
||||
.Lsqr8x_sp_done:
|
||||
andq $-64,%rbp
|
||||
movq %rsp,%r11
|
||||
subq %rbp,%r11
|
||||
andq $-4096,%r11
|
||||
leaq (%r11,%rbp,1),%rsp
|
||||
movq (%rsp),%r10
|
||||
cmpq %rbp,%rsp
|
||||
ja .Lsqr8x_page_walk
|
||||
jmp .Lsqr8x_page_walk_done
|
||||
|
||||
.align 16
|
||||
.Lsqr8x_page_walk:
|
||||
leaq -4096(%rsp),%rsp
|
||||
movq (%rsp),%r10
|
||||
cmpq %rbp,%rsp
|
||||
ja .Lsqr8x_page_walk
|
||||
.Lsqr8x_page_walk_done:
|
||||
|
||||
movq %r9,%r10
|
||||
negq %r9
|
||||
|
||||
movq %r8,32(%rsp)
|
||||
movq %rax,40(%rsp)
|
||||
.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
|
||||
.Lsqr8x_body:
|
||||
|
||||
.byte 102,72,15,110,209
|
||||
pxor %xmm0,%xmm0
|
||||
.byte 102,72,15,110,207
|
||||
.byte 102,73,15,110,218
|
||||
call bn_sqr8x_internal
|
||||
|
||||
|
||||
|
||||
|
||||
leaq (%rdi,%r9,1),%rbx
|
||||
movq %r9,%rcx
|
||||
movq %r9,%rdx
|
||||
.byte 102,72,15,126,207
|
||||
sarq $3+2,%rcx
|
||||
jmp .Lsqr8x_sub
|
||||
|
||||
.align 32
|
||||
.Lsqr8x_sub:
|
||||
movq 0(%rbx),%r12
|
||||
movq 8(%rbx),%r13
|
||||
movq 16(%rbx),%r14
|
||||
movq 24(%rbx),%r15
|
||||
leaq 32(%rbx),%rbx
|
||||
sbbq 0(%rbp),%r12
|
||||
sbbq 8(%rbp),%r13
|
||||
sbbq 16(%rbp),%r14
|
||||
sbbq 24(%rbp),%r15
|
||||
leaq 32(%rbp),%rbp
|
||||
movq %r12,0(%rdi)
|
||||
movq %r13,8(%rdi)
|
||||
movq %r14,16(%rdi)
|
||||
movq %r15,24(%rdi)
|
||||
leaq 32(%rdi),%rdi
|
||||
incq %rcx
|
||||
jnz .Lsqr8x_sub
|
||||
|
||||
sbbq $0,%rax
|
||||
leaq (%rbx,%r9,1),%rbx
|
||||
leaq (%rdi,%r9,1),%rdi
|
||||
|
||||
.byte 102,72,15,110,200
|
||||
pxor %xmm0,%xmm0
|
||||
pshufd $0,%xmm1,%xmm1
|
||||
movq 40(%rsp),%rsi
|
||||
.cfi_def_cfa %rsi,8
|
||||
jmp .Lsqr8x_cond_copy
|
||||
|
||||
.align 32
|
||||
.Lsqr8x_cond_copy:
|
||||
movdqa 0(%rbx),%xmm2
|
||||
movdqa 16(%rbx),%xmm3
|
||||
leaq 32(%rbx),%rbx
|
||||
movdqu 0(%rdi),%xmm4
|
||||
movdqu 16(%rdi),%xmm5
|
||||
leaq 32(%rdi),%rdi
|
||||
movdqa %xmm0,-32(%rbx)
|
||||
movdqa %xmm0,-16(%rbx)
|
||||
movdqa %xmm0,-32(%rbx,%rdx,1)
|
||||
movdqa %xmm0,-16(%rbx,%rdx,1)
|
||||
pcmpeqd %xmm1,%xmm0
|
||||
pand %xmm1,%xmm2
|
||||
pand %xmm1,%xmm3
|
||||
pand %xmm0,%xmm4
|
||||
pand %xmm0,%xmm5
|
||||
pxor %xmm0,%xmm0
|
||||
por %xmm2,%xmm4
|
||||
por %xmm3,%xmm5
|
||||
movdqu %xmm4,-32(%rdi)
|
||||
movdqu %xmm5,-16(%rdi)
|
||||
addq $32,%r9
|
||||
jnz .Lsqr8x_cond_copy
|
||||
|
||||
movq $1,%rax
|
||||
movq -48(%rsi),%r15
|
||||
.cfi_restore %r15
|
||||
movq -40(%rsi),%r14
|
||||
.cfi_restore %r14
|
||||
movq -32(%rsi),%r13
|
||||
.cfi_restore %r13
|
||||
movq -24(%rsi),%r12
|
||||
.cfi_restore %r12
|
||||
movq -16(%rsi),%rbp
|
||||
.cfi_restore %rbp
|
||||
movq -8(%rsi),%rbx
|
||||
.cfi_restore %rbx
|
||||
leaq (%rsi),%rsp
|
||||
.cfi_def_cfa_register %rsp
|
||||
.Lsqr8x_epilogue:
|
||||
.byte 0xf3,0xc3
|
||||
.cfi_endproc
|
||||
.size bn_sqr8x_mont,.-bn_sqr8x_mont
|
||||
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
|
||||
.align 16
|
||||
#endif
|
||||
2392
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/x86_64-mont5.S
vendored
Normal file
2392
third_party/boringssl/kit/linux-x86_64/crypto/fipsmodule/x86_64-mont5.S
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user