summaryrefslogtreecommitdiffstats
path: root/CryptoPkg/Library/OpensslLib/X64Gcc
diff options
context:
space:
mode:
authorChristopher Zurcher <christopher.zurcher@microsoft.com>2021-07-21 06:06:46 +0800
committermergify[bot] <37929162+mergify[bot]@users.noreply.github.com>2021-07-26 07:30:11 +0000
commit147f34b56ce0e2e18285ef7d0695753ac0aa5085 (patch)
tree5650720df23ef3cf8597052f5498115f51069cd5 /CryptoPkg/Library/OpensslLib/X64Gcc
parent878a92a887ef4ca879d336f323e91b13cc767a59 (diff)
downloadedk2-147f34b56ce0e2e18285ef7d0695753ac0aa5085.tar.gz
edk2-147f34b56ce0e2e18285ef7d0695753ac0aa5085.tar.bz2
edk2-147f34b56ce0e2e18285ef7d0695753ac0aa5085.zip
CryptoPkg/OpensslLib: Commit the auto-generated assembly files for X64
BZ: https://bugzilla.tianocore.org/show_bug.cgi?id=2507 Adding the auto-generated assembly files for X64 architectures. Cc: Jiewen Yao <jiewen.yao@intel.com> Cc: Jian J Wang <jian.j.wang@intel.com> Cc: Xiaoyu Lu <xiaoyux.lu@intel.com> Cc: Mike Kinney <michael.d.kinney@intel.com> Cc: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Christopher Zurcher <christopher.zurcher@microsoft.com> Reviewed-by: Jiewen Yao <Jiewen.yao@intel.com>
Diffstat (limited to 'CryptoPkg/Library/OpensslLib/X64Gcc')
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-mb-x86_64.S552
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha1-x86_64.S1719
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha256-x86_64.S69
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-x86_64.S4484
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/vpaes-x86_64.S863
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/aesni-gcm-x86_64.S29
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/ghash-x86_64.S1386
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-mb-x86_64.S2962
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-x86_64.S2631
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-mb-x86_64.S3286
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-x86_64.S3097
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha512-x86_64.S1811
-rw-r--r--CryptoPkg/Library/OpensslLib/X64Gcc/crypto/x86_64cpuid.S491
13 files changed, 23380 insertions, 0 deletions
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-mb-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-mb-x86_64.S
new file mode 100644
index 0000000000..7749fd685a
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-mb-x86_64.S
@@ -0,0 +1,552 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/aes/asm/aesni-mb-x86_64.pl
+#
+# Copyright 2013-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+
+.globl aesni_multi_cbc_encrypt
+.type aesni_multi_cbc_encrypt,@function
+.align 32
+aesni_multi_cbc_encrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+
+
+
+
+
+
+ subq $48,%rsp
+ andq $-64,%rsp
+ movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
+
+.Lenc4x_body:
+ movdqu (%rsi),%xmm12
+ leaq 120(%rsi),%rsi
+ leaq 80(%rdi),%rdi
+
+.Lenc4x_loop_grande:
+ movl %edx,24(%rsp)
+ xorl %edx,%edx
+ movl -64(%rdi),%ecx
+ movq -80(%rdi),%r8
+ cmpl %edx,%ecx
+ movq -72(%rdi),%r12
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu -56(%rdi),%xmm2
+ movl %ecx,32(%rsp)
+ cmovleq %rsp,%r8
+ movl -24(%rdi),%ecx
+ movq -40(%rdi),%r9
+ cmpl %edx,%ecx
+ movq -32(%rdi),%r13
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu -16(%rdi),%xmm3
+ movl %ecx,36(%rsp)
+ cmovleq %rsp,%r9
+ movl 16(%rdi),%ecx
+ movq 0(%rdi),%r10
+ cmpl %edx,%ecx
+ movq 8(%rdi),%r14
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu 24(%rdi),%xmm4
+ movl %ecx,40(%rsp)
+ cmovleq %rsp,%r10
+ movl 56(%rdi),%ecx
+ movq 40(%rdi),%r11
+ cmpl %edx,%ecx
+ movq 48(%rdi),%r15
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu 64(%rdi),%xmm5
+ movl %ecx,44(%rsp)
+ cmovleq %rsp,%r11
+ testl %edx,%edx
+ jz .Lenc4x_done
+
+ movups 16-120(%rsi),%xmm1
+ pxor %xmm12,%xmm2
+ movups 32-120(%rsi),%xmm0
+ pxor %xmm12,%xmm3
+ movl 240-120(%rsi),%eax
+ pxor %xmm12,%xmm4
+ movdqu (%r8),%xmm6
+ pxor %xmm12,%xmm5
+ movdqu (%r9),%xmm7
+ pxor %xmm6,%xmm2
+ movdqu (%r10),%xmm8
+ pxor %xmm7,%xmm3
+ movdqu (%r11),%xmm9
+ pxor %xmm8,%xmm4
+ pxor %xmm9,%xmm5
+ movdqa 32(%rsp),%xmm10
+ xorq %rbx,%rbx
+ jmp .Loop_enc4x
+
+.align 32
+.Loop_enc4x:
+ addq $16,%rbx
+ leaq 16(%rsp),%rbp
+ movl $1,%ecx
+ subq %rbx,%rbp
+
+.byte 102,15,56,220,209
+ prefetcht0 31(%r8,%rbx,1)
+ prefetcht0 31(%r9,%rbx,1)
+.byte 102,15,56,220,217
+ prefetcht0 31(%r10,%rbx,1)
+ prefetcht0 31(%r10,%rbx,1)
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 48-120(%rsi),%xmm1
+ cmpl 32(%rsp),%ecx
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+ cmovgeq %rbp,%r8
+ cmovgq %rbp,%r12
+.byte 102,15,56,220,232
+ movups -56(%rsi),%xmm0
+ cmpl 36(%rsp),%ecx
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+ cmovgeq %rbp,%r9
+ cmovgq %rbp,%r13
+.byte 102,15,56,220,233
+ movups -40(%rsi),%xmm1
+ cmpl 40(%rsp),%ecx
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+ cmovgeq %rbp,%r10
+ cmovgq %rbp,%r14
+.byte 102,15,56,220,232
+ movups -24(%rsi),%xmm0
+ cmpl 44(%rsp),%ecx
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+ cmovgeq %rbp,%r11
+ cmovgq %rbp,%r15
+.byte 102,15,56,220,233
+ movups -8(%rsi),%xmm1
+ movdqa %xmm10,%xmm11
+.byte 102,15,56,220,208
+ prefetcht0 15(%r12,%rbx,1)
+ prefetcht0 15(%r13,%rbx,1)
+.byte 102,15,56,220,216
+ prefetcht0 15(%r14,%rbx,1)
+ prefetcht0 15(%r15,%rbx,1)
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups 128-120(%rsi),%xmm0
+ pxor %xmm12,%xmm12
+
+.byte 102,15,56,220,209
+ pcmpgtd %xmm12,%xmm11
+ movdqu -120(%rsi),%xmm12
+.byte 102,15,56,220,217
+ paddd %xmm11,%xmm10
+ movdqa %xmm10,32(%rsp)
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 144-120(%rsi),%xmm1
+
+ cmpl $11,%eax
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups 160-120(%rsi),%xmm0
+
+ jb .Lenc4x_tail
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 176-120(%rsi),%xmm1
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups 192-120(%rsi),%xmm0
+
+ je .Lenc4x_tail
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 208-120(%rsi),%xmm1
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups 224-120(%rsi),%xmm0
+ jmp .Lenc4x_tail
+
+.align 32
+.Lenc4x_tail:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movdqu (%r8,%rbx,1),%xmm6
+ movdqu 16-120(%rsi),%xmm1
+
+.byte 102,15,56,221,208
+ movdqu (%r9,%rbx,1),%xmm7
+ pxor %xmm12,%xmm6
+.byte 102,15,56,221,216
+ movdqu (%r10,%rbx,1),%xmm8
+ pxor %xmm12,%xmm7
+.byte 102,15,56,221,224
+ movdqu (%r11,%rbx,1),%xmm9
+ pxor %xmm12,%xmm8
+.byte 102,15,56,221,232
+ movdqu 32-120(%rsi),%xmm0
+ pxor %xmm12,%xmm9
+
+ movups %xmm2,-16(%r12,%rbx,1)
+ pxor %xmm6,%xmm2
+ movups %xmm3,-16(%r13,%rbx,1)
+ pxor %xmm7,%xmm3
+ movups %xmm4,-16(%r14,%rbx,1)
+ pxor %xmm8,%xmm4
+ movups %xmm5,-16(%r15,%rbx,1)
+ pxor %xmm9,%xmm5
+
+ decl %edx
+ jnz .Loop_enc4x
+
+ movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movl 24(%rsp),%edx
+
+
+
+
+
+
+
+
+
+
+ leaq 160(%rdi),%rdi
+ decl %edx
+ jnz .Lenc4x_loop_grande
+
+.Lenc4x_done:
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lenc4x_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_multi_cbc_encrypt,.-aesni_multi_cbc_encrypt
+
+.globl aesni_multi_cbc_decrypt
+.type aesni_multi_cbc_decrypt,@function
+.align 32
+aesni_multi_cbc_decrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+
+
+
+
+
+
+ subq $48,%rsp
+ andq $-64,%rsp
+ movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
+
+.Ldec4x_body:
+ movdqu (%rsi),%xmm12
+ leaq 120(%rsi),%rsi
+ leaq 80(%rdi),%rdi
+
+.Ldec4x_loop_grande:
+ movl %edx,24(%rsp)
+ xorl %edx,%edx
+ movl -64(%rdi),%ecx
+ movq -80(%rdi),%r8
+ cmpl %edx,%ecx
+ movq -72(%rdi),%r12
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu -56(%rdi),%xmm6
+ movl %ecx,32(%rsp)
+ cmovleq %rsp,%r8
+ movl -24(%rdi),%ecx
+ movq -40(%rdi),%r9
+ cmpl %edx,%ecx
+ movq -32(%rdi),%r13
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu -16(%rdi),%xmm7
+ movl %ecx,36(%rsp)
+ cmovleq %rsp,%r9
+ movl 16(%rdi),%ecx
+ movq 0(%rdi),%r10
+ cmpl %edx,%ecx
+ movq 8(%rdi),%r14
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu 24(%rdi),%xmm8
+ movl %ecx,40(%rsp)
+ cmovleq %rsp,%r10
+ movl 56(%rdi),%ecx
+ movq 40(%rdi),%r11
+ cmpl %edx,%ecx
+ movq 48(%rdi),%r15
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movdqu 64(%rdi),%xmm9
+ movl %ecx,44(%rsp)
+ cmovleq %rsp,%r11
+ testl %edx,%edx
+ jz .Ldec4x_done
+
+ movups 16-120(%rsi),%xmm1
+ movups 32-120(%rsi),%xmm0
+ movl 240-120(%rsi),%eax
+ movdqu (%r8),%xmm2
+ movdqu (%r9),%xmm3
+ pxor %xmm12,%xmm2
+ movdqu (%r10),%xmm4
+ pxor %xmm12,%xmm3
+ movdqu (%r11),%xmm5
+ pxor %xmm12,%xmm4
+ pxor %xmm12,%xmm5
+ movdqa 32(%rsp),%xmm10
+ xorq %rbx,%rbx
+ jmp .Loop_dec4x
+
+.align 32
+.Loop_dec4x:
+ addq $16,%rbx
+ leaq 16(%rsp),%rbp
+ movl $1,%ecx
+ subq %rbx,%rbp
+
+.byte 102,15,56,222,209
+ prefetcht0 31(%r8,%rbx,1)
+ prefetcht0 31(%r9,%rbx,1)
+.byte 102,15,56,222,217
+ prefetcht0 31(%r10,%rbx,1)
+ prefetcht0 31(%r11,%rbx,1)
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 48-120(%rsi),%xmm1
+ cmpl 32(%rsp),%ecx
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+ cmovgeq %rbp,%r8
+ cmovgq %rbp,%r12
+.byte 102,15,56,222,232
+ movups -56(%rsi),%xmm0
+ cmpl 36(%rsp),%ecx
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+ cmovgeq %rbp,%r9
+ cmovgq %rbp,%r13
+.byte 102,15,56,222,233
+ movups -40(%rsi),%xmm1
+ cmpl 40(%rsp),%ecx
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+ cmovgeq %rbp,%r10
+ cmovgq %rbp,%r14
+.byte 102,15,56,222,232
+ movups -24(%rsi),%xmm0
+ cmpl 44(%rsp),%ecx
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+ cmovgeq %rbp,%r11
+ cmovgq %rbp,%r15
+.byte 102,15,56,222,233
+ movups -8(%rsi),%xmm1
+ movdqa %xmm10,%xmm11
+.byte 102,15,56,222,208
+ prefetcht0 15(%r12,%rbx,1)
+ prefetcht0 15(%r13,%rbx,1)
+.byte 102,15,56,222,216
+ prefetcht0 15(%r14,%rbx,1)
+ prefetcht0 15(%r15,%rbx,1)
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups 128-120(%rsi),%xmm0
+ pxor %xmm12,%xmm12
+
+.byte 102,15,56,222,209
+ pcmpgtd %xmm12,%xmm11
+ movdqu -120(%rsi),%xmm12
+.byte 102,15,56,222,217
+ paddd %xmm11,%xmm10
+ movdqa %xmm10,32(%rsp)
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 144-120(%rsi),%xmm1
+
+ cmpl $11,%eax
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups 160-120(%rsi),%xmm0
+
+ jb .Ldec4x_tail
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 176-120(%rsi),%xmm1
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups 192-120(%rsi),%xmm0
+
+ je .Ldec4x_tail
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 208-120(%rsi),%xmm1
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups 224-120(%rsi),%xmm0
+ jmp .Ldec4x_tail
+
+.align 32
+.Ldec4x_tail:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+ pxor %xmm0,%xmm6
+ pxor %xmm0,%xmm7
+.byte 102,15,56,222,233
+ movdqu 16-120(%rsi),%xmm1
+ pxor %xmm0,%xmm8
+ pxor %xmm0,%xmm9
+ movdqu 32-120(%rsi),%xmm0
+
+.byte 102,15,56,223,214
+.byte 102,15,56,223,223
+ movdqu -16(%r8,%rbx,1),%xmm6
+ movdqu -16(%r9,%rbx,1),%xmm7
+.byte 102,65,15,56,223,224
+.byte 102,65,15,56,223,233
+ movdqu -16(%r10,%rbx,1),%xmm8
+ movdqu -16(%r11,%rbx,1),%xmm9
+
+ movups %xmm2,-16(%r12,%rbx,1)
+ movdqu (%r8,%rbx,1),%xmm2
+ movups %xmm3,-16(%r13,%rbx,1)
+ movdqu (%r9,%rbx,1),%xmm3
+ pxor %xmm12,%xmm2
+ movups %xmm4,-16(%r14,%rbx,1)
+ movdqu (%r10,%rbx,1),%xmm4
+ pxor %xmm12,%xmm3
+ movups %xmm5,-16(%r15,%rbx,1)
+ movdqu (%r11,%rbx,1),%xmm5
+ pxor %xmm12,%xmm4
+ pxor %xmm12,%xmm5
+
+ decl %edx
+ jnz .Loop_dec4x
+
+ movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movl 24(%rsp),%edx
+
+ leaq 160(%rdi),%rdi
+ decl %edx
+ jnz .Ldec4x_loop_grande
+
+.Ldec4x_done:
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Ldec4x_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_multi_cbc_decrypt,.-aesni_multi_cbc_decrypt
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha1-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha1-x86_64.S
new file mode 100644
index 0000000000..ab763a2eec
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha1-x86_64.S
@@ -0,0 +1,1719 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/aes/asm/aesni-sha1-x86_64.pl
+#
+# Copyright 2011-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl aesni_cbc_sha1_enc
+.type aesni_cbc_sha1_enc,@function
+.align 32
+aesni_cbc_sha1_enc:
+.cfi_startproc
+
+ movl OPENSSL_ia32cap_P+0(%rip),%r10d
+ movq OPENSSL_ia32cap_P+4(%rip),%r11
+ btq $61,%r11
+ jc aesni_cbc_sha1_enc_shaext
+ jmp aesni_cbc_sha1_enc_ssse3
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc
+.type aesni_cbc_sha1_enc_ssse3,@function
+.align 32
+aesni_cbc_sha1_enc_ssse3:
+.cfi_startproc
+ movq 8(%rsp),%r10
+
+
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ leaq -104(%rsp),%rsp
+.cfi_adjust_cfa_offset 104
+
+
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %rdx,%r14
+ leaq 112(%rcx),%r15
+ movdqu (%r8),%xmm2
+ movq %r8,88(%rsp)
+ shlq $6,%r14
+ subq %r12,%r13
+ movl 240-112(%r15),%r8d
+ addq %r10,%r14
+
+ leaq K_XX_XX(%rip),%r11
+ movl 0(%r9),%eax
+ movl 4(%r9),%ebx
+ movl 8(%r9),%ecx
+ movl 12(%r9),%edx
+ movl %ebx,%esi
+ movl 16(%r9),%ebp
+ movl %ecx,%edi
+ xorl %edx,%edi
+ andl %edi,%esi
+
+ movdqa 64(%r11),%xmm3
+ movdqa 0(%r11),%xmm13
+ movdqu 0(%r10),%xmm4
+ movdqu 16(%r10),%xmm5
+ movdqu 32(%r10),%xmm6
+ movdqu 48(%r10),%xmm7
+.byte 102,15,56,0,227
+.byte 102,15,56,0,235
+.byte 102,15,56,0,243
+ addq $64,%r10
+ paddd %xmm13,%xmm4
+.byte 102,15,56,0,251
+ paddd %xmm13,%xmm5
+ paddd %xmm13,%xmm6
+ movdqa %xmm4,0(%rsp)
+ psubd %xmm13,%xmm4
+ movdqa %xmm5,16(%rsp)
+ psubd %xmm13,%xmm5
+ movdqa %xmm6,32(%rsp)
+ psubd %xmm13,%xmm6
+ movups -112(%r15),%xmm15
+ movups 16-112(%r15),%xmm0
+ jmp .Loop_ssse3
+.align 32
+.Loop_ssse3:
+ rorl $2,%ebx
+ movups 0(%r12),%xmm14
+ xorps %xmm15,%xmm14
+ xorps %xmm14,%xmm2
+ movups -80(%r15),%xmm1
+.byte 102,15,56,220,208
+ pshufd $238,%xmm4,%xmm8
+ xorl %edx,%esi
+ movdqa %xmm7,%xmm12
+ paddd %xmm7,%xmm13
+ movl %eax,%edi
+ addl 0(%rsp),%ebp
+ punpcklqdq %xmm5,%xmm8
+ xorl %ecx,%ebx
+ roll $5,%eax
+ addl %esi,%ebp
+ psrldq $4,%xmm12
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ pxor %xmm4,%xmm8
+ addl %eax,%ebp
+ rorl $7,%eax
+ pxor %xmm6,%xmm12
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 4(%rsp),%edx
+ pxor %xmm12,%xmm8
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm13,48(%rsp)
+ addl %edi,%edx
+ movups -64(%r15),%xmm0
+.byte 102,15,56,220,209
+ andl %eax,%esi
+ movdqa %xmm8,%xmm3
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ movdqa %xmm8,%xmm12
+ xorl %ebx,%esi
+ pslldq $12,%xmm3
+ paddd %xmm8,%xmm8
+ movl %edx,%edi
+ addl 8(%rsp),%ecx
+ psrld $31,%xmm12
+ xorl %eax,%ebp
+ roll $5,%edx
+ addl %esi,%ecx
+ movdqa %xmm3,%xmm13
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ psrld $30,%xmm3
+ addl %edx,%ecx
+ rorl $7,%edx
+ por %xmm12,%xmm8
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 12(%rsp),%ebx
+ movups -48(%r15),%xmm1
+.byte 102,15,56,220,208
+ pslld $2,%xmm13
+ pxor %xmm3,%xmm8
+ xorl %ebp,%edx
+ movdqa 0(%r11),%xmm3
+ roll $5,%ecx
+ addl %edi,%ebx
+ andl %edx,%esi
+ pxor %xmm13,%xmm8
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ pshufd $238,%xmm5,%xmm9
+ xorl %ebp,%esi
+ movdqa %xmm8,%xmm13
+ paddd %xmm8,%xmm3
+ movl %ebx,%edi
+ addl 16(%rsp),%eax
+ punpcklqdq %xmm6,%xmm9
+ xorl %edx,%ecx
+ roll $5,%ebx
+ addl %esi,%eax
+ psrldq $4,%xmm13
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ pxor %xmm5,%xmm9
+ addl %ebx,%eax
+ rorl $7,%ebx
+ movups -32(%r15),%xmm0
+.byte 102,15,56,220,209
+ pxor %xmm7,%xmm13
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 20(%rsp),%ebp
+ pxor %xmm13,%xmm9
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa %xmm3,0(%rsp)
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movdqa %xmm9,%xmm12
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ movdqa %xmm9,%xmm13
+ xorl %ecx,%esi
+ pslldq $12,%xmm12
+ paddd %xmm9,%xmm9
+ movl %ebp,%edi
+ addl 24(%rsp),%edx
+ psrld $31,%xmm13
+ xorl %ebx,%eax
+ roll $5,%ebp
+ addl %esi,%edx
+ movups -16(%r15),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm12,%xmm3
+ andl %eax,%edi
+ xorl %ebx,%eax
+ psrld $30,%xmm12
+ addl %ebp,%edx
+ rorl $7,%ebp
+ por %xmm13,%xmm9
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 28(%rsp),%ecx
+ pslld $2,%xmm3
+ pxor %xmm12,%xmm9
+ xorl %eax,%ebp
+ movdqa 16(%r11),%xmm12
+ roll $5,%edx
+ addl %edi,%ecx
+ andl %ebp,%esi
+ pxor %xmm3,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ rorl $7,%edx
+ pshufd $238,%xmm6,%xmm10
+ xorl %eax,%esi
+ movdqa %xmm9,%xmm3
+ paddd %xmm9,%xmm12
+ movl %ecx,%edi
+ addl 32(%rsp),%ebx
+ movups 0(%r15),%xmm0
+.byte 102,15,56,220,209
+ punpcklqdq %xmm7,%xmm10
+ xorl %ebp,%edx
+ roll $5,%ecx
+ addl %esi,%ebx
+ psrldq $4,%xmm3
+ andl %edx,%edi
+ xorl %ebp,%edx
+ pxor %xmm6,%xmm10
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ pxor %xmm8,%xmm3
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ addl 36(%rsp),%eax
+ pxor %xmm3,%xmm10
+ xorl %edx,%ecx
+ roll $5,%ebx
+ movdqa %xmm12,16(%rsp)
+ addl %edi,%eax
+ andl %ecx,%esi
+ movdqa %xmm10,%xmm13
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ rorl $7,%ebx
+ movups 16(%r15),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm10,%xmm3
+ xorl %edx,%esi
+ pslldq $12,%xmm13
+ paddd %xmm10,%xmm10
+ movl %eax,%edi
+ addl 40(%rsp),%ebp
+ psrld $31,%xmm3
+ xorl %ecx,%ebx
+ roll $5,%eax
+ addl %esi,%ebp
+ movdqa %xmm13,%xmm12
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ psrld $30,%xmm13
+ addl %eax,%ebp
+ rorl $7,%eax
+ por %xmm3,%xmm10
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 44(%rsp),%edx
+ pslld $2,%xmm12
+ pxor %xmm13,%xmm10
+ xorl %ebx,%eax
+ movdqa 16(%r11),%xmm13
+ roll $5,%ebp
+ addl %edi,%edx
+ movups 32(%r15),%xmm0
+.byte 102,15,56,220,209
+ andl %eax,%esi
+ pxor %xmm12,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ pshufd $238,%xmm7,%xmm11
+ xorl %ebx,%esi
+ movdqa %xmm10,%xmm12
+ paddd %xmm10,%xmm13
+ movl %edx,%edi
+ addl 48(%rsp),%ecx
+ punpcklqdq %xmm8,%xmm11
+ xorl %eax,%ebp
+ roll $5,%edx
+ addl %esi,%ecx
+ psrldq $4,%xmm12
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ pxor %xmm7,%xmm11
+ addl %edx,%ecx
+ rorl $7,%edx
+ pxor %xmm9,%xmm12
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 52(%rsp),%ebx
+ movups 48(%r15),%xmm1
+.byte 102,15,56,220,208
+ pxor %xmm12,%xmm11
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm13,32(%rsp)
+ addl %edi,%ebx
+ andl %edx,%esi
+ movdqa %xmm11,%xmm3
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ movdqa %xmm11,%xmm12
+ xorl %ebp,%esi
+ pslldq $12,%xmm3
+ paddd %xmm11,%xmm11
+ movl %ebx,%edi
+ addl 56(%rsp),%eax
+ psrld $31,%xmm12
+ xorl %edx,%ecx
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa %xmm3,%xmm13
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ psrld $30,%xmm3
+ addl %ebx,%eax
+ rorl $7,%ebx
+ cmpl $11,%r8d
+ jb .Laesenclast1
+ movups 64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%r15),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast1
+ movups 96(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%r15),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast1:
+.byte 102,15,56,221,209
+ movups 16-112(%r15),%xmm0
+ por %xmm12,%xmm11
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 60(%rsp),%ebp
+ pslld $2,%xmm13
+ pxor %xmm3,%xmm11
+ xorl %ecx,%ebx
+ movdqa 16(%r11),%xmm3
+ roll $5,%eax
+ addl %edi,%ebp
+ andl %ebx,%esi
+ pxor %xmm13,%xmm11
+ pshufd $238,%xmm10,%xmm13
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ pxor %xmm8,%xmm4
+ xorl %ecx,%esi
+ movl %ebp,%edi
+ addl 0(%rsp),%edx
+ punpcklqdq %xmm11,%xmm13
+ xorl %ebx,%eax
+ roll $5,%ebp
+ pxor %xmm5,%xmm4
+ addl %esi,%edx
+ movups 16(%r12),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,0(%r12,%r13,1)
+ xorps %xmm14,%xmm2
+ movups -80(%r15),%xmm1
+.byte 102,15,56,220,208
+ andl %eax,%edi
+ movdqa %xmm3,%xmm12
+ xorl %ebx,%eax
+ paddd %xmm11,%xmm3
+ addl %ebp,%edx
+ pxor %xmm13,%xmm4
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 4(%rsp),%ecx
+ movdqa %xmm4,%xmm13
+ xorl %eax,%ebp
+ roll $5,%edx
+ movdqa %xmm3,48(%rsp)
+ addl %edi,%ecx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ pslld $2,%xmm4
+ addl %edx,%ecx
+ rorl $7,%edx
+ psrld $30,%xmm13
+ xorl %eax,%esi
+ movl %ecx,%edi
+ addl 8(%rsp),%ebx
+ movups -64(%r15),%xmm0
+.byte 102,15,56,220,209
+ por %xmm13,%xmm4
+ xorl %ebp,%edx
+ roll $5,%ecx
+ pshufd $238,%xmm11,%xmm3
+ addl %esi,%ebx
+ andl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 12(%rsp),%eax
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pxor %xmm9,%xmm5
+ addl 16(%rsp),%ebp
+ movups -48(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%esi
+ punpcklqdq %xmm4,%xmm3
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm6,%xmm5
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm12,%xmm13
+ rorl $7,%ebx
+ paddd %xmm4,%xmm12
+ addl %eax,%ebp
+ pxor %xmm3,%xmm5
+ addl 20(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm5,%xmm3
+ addl %edi,%edx
+ xorl %ebx,%esi
+ movdqa %xmm12,0(%rsp)
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 24(%rsp),%ecx
+ pslld $2,%xmm5
+ xorl %eax,%esi
+ movl %edx,%edi
+ psrld $30,%xmm3
+ roll $5,%edx
+ addl %esi,%ecx
+ movups -32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%edi
+ rorl $7,%ebp
+ por %xmm3,%xmm5
+ addl %edx,%ecx
+ addl 28(%rsp),%ebx
+ pshufd $238,%xmm4,%xmm12
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pxor %xmm10,%xmm6
+ addl 32(%rsp),%eax
+ xorl %edx,%esi
+ punpcklqdq %xmm5,%xmm12
+ movl %ebx,%edi
+ roll $5,%ebx
+ pxor %xmm7,%xmm6
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa 32(%r11),%xmm3
+ rorl $7,%ecx
+ paddd %xmm5,%xmm13
+ addl %ebx,%eax
+ pxor %xmm12,%xmm6
+ addl 36(%rsp),%ebp
+ movups -16(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ movdqa %xmm6,%xmm12
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ movdqa %xmm13,16(%rsp)
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 40(%rsp),%edx
+ pslld $2,%xmm6
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ psrld $30,%xmm12
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ por %xmm12,%xmm6
+ addl %ebp,%edx
+ addl 44(%rsp),%ecx
+ pshufd $238,%xmm5,%xmm13
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ movups 0(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pxor %xmm11,%xmm7
+ addl 48(%rsp),%ebx
+ xorl %ebp,%esi
+ punpcklqdq %xmm6,%xmm13
+ movl %ecx,%edi
+ roll $5,%ecx
+ pxor %xmm8,%xmm7
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ movdqa %xmm3,%xmm12
+ rorl $7,%edx
+ paddd %xmm6,%xmm3
+ addl %ecx,%ebx
+ pxor %xmm13,%xmm7
+ addl 52(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ movdqa %xmm7,%xmm13
+ addl %edi,%eax
+ xorl %edx,%esi
+ movdqa %xmm3,32(%rsp)
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 56(%rsp),%ebp
+ movups 16(%r15),%xmm1
+.byte 102,15,56,220,208
+ pslld $2,%xmm7
+ xorl %ecx,%esi
+ movl %eax,%edi
+ psrld $30,%xmm13
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ por %xmm13,%xmm7
+ addl %eax,%ebp
+ addl 60(%rsp),%edx
+ pshufd $238,%xmm6,%xmm3
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pxor %xmm4,%xmm8
+ addl 0(%rsp),%ecx
+ xorl %eax,%esi
+ punpcklqdq %xmm7,%xmm3
+ movl %edx,%edi
+ roll $5,%edx
+ pxor %xmm9,%xmm8
+ addl %esi,%ecx
+ movups 32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%edi
+ movdqa %xmm12,%xmm13
+ rorl $7,%ebp
+ paddd %xmm7,%xmm12
+ addl %edx,%ecx
+ pxor %xmm3,%xmm8
+ addl 4(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ movdqa %xmm8,%xmm3
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ movdqa %xmm12,48(%rsp)
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 8(%rsp),%eax
+ pslld $2,%xmm8
+ xorl %edx,%esi
+ movl %ebx,%edi
+ psrld $30,%xmm3
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ por %xmm3,%xmm8
+ addl %ebx,%eax
+ addl 12(%rsp),%ebp
+ movups 48(%r15),%xmm1
+.byte 102,15,56,220,208
+ pshufd $238,%xmm7,%xmm12
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm5,%xmm9
+ addl 16(%rsp),%edx
+ xorl %ebx,%esi
+ punpcklqdq %xmm8,%xmm12
+ movl %ebp,%edi
+ roll $5,%ebp
+ pxor %xmm10,%xmm9
+ addl %esi,%edx
+ xorl %ebx,%edi
+ movdqa %xmm13,%xmm3
+ rorl $7,%eax
+ paddd %xmm8,%xmm13
+ addl %ebp,%edx
+ pxor %xmm12,%xmm9
+ addl 20(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ movdqa %xmm9,%xmm12
+ addl %edi,%ecx
+ cmpl $11,%r8d
+ jb .Laesenclast2
+ movups 64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%r15),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast2
+ movups 96(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%r15),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast2:
+.byte 102,15,56,221,209
+ movups 16-112(%r15),%xmm0
+ xorl %eax,%esi
+ movdqa %xmm13,0(%rsp)
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 24(%rsp),%ebx
+ pslld $2,%xmm9
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ psrld $30,%xmm12
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ por %xmm12,%xmm9
+ addl %ecx,%ebx
+ addl 28(%rsp),%eax
+ pshufd $238,%xmm8,%xmm13
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ pxor %xmm6,%xmm10
+ addl 32(%rsp),%ebp
+ movups 32(%r12),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,16(%r13,%r12,1)
+ xorps %xmm14,%xmm2
+ movups -80(%r15),%xmm1
+.byte 102,15,56,220,208
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ punpcklqdq %xmm9,%xmm13
+ movl %eax,%edi
+ xorl %ecx,%esi
+ pxor %xmm11,%xmm10
+ roll $5,%eax
+ addl %esi,%ebp
+ movdqa %xmm3,%xmm12
+ xorl %ebx,%edi
+ paddd %xmm9,%xmm3
+ xorl %ecx,%ebx
+ pxor %xmm13,%xmm10
+ addl %eax,%ebp
+ addl 36(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movdqa %xmm10,%xmm13
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ movdqa %xmm3,16(%rsp)
+ roll $5,%ebp
+ addl %edi,%edx
+ movups -64(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%esi
+ pslld $2,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ psrld $30,%xmm13
+ addl 40(%rsp),%ecx
+ andl %eax,%esi
+ xorl %ebx,%eax
+ por %xmm13,%xmm10
+ rorl $7,%ebp
+ movl %edx,%edi
+ xorl %eax,%esi
+ roll $5,%edx
+ pshufd $238,%xmm9,%xmm3
+ addl %esi,%ecx
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 44(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movups -48(%r15),%xmm1
+.byte 102,15,56,220,208
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ pxor %xmm7,%xmm11
+ addl 48(%rsp),%eax
+ andl %edx,%esi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ punpcklqdq %xmm10,%xmm3
+ movl %ebx,%edi
+ xorl %edx,%esi
+ pxor %xmm4,%xmm11
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa 48(%r11),%xmm13
+ xorl %ecx,%edi
+ paddd %xmm10,%xmm12
+ xorl %edx,%ecx
+ pxor %xmm3,%xmm11
+ addl %ebx,%eax
+ addl 52(%rsp),%ebp
+ movups -32(%r15),%xmm0
+.byte 102,15,56,220,209
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movdqa %xmm11,%xmm3
+ movl %eax,%esi
+ xorl %ecx,%edi
+ movdqa %xmm12,32(%rsp)
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ pslld $2,%xmm11
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ psrld $30,%xmm3
+ addl 56(%rsp),%edx
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ por %xmm3,%xmm11
+ rorl $7,%eax
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ roll $5,%ebp
+ pshufd $238,%xmm10,%xmm12
+ addl %esi,%edx
+ movups -16(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 60(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ xorl %eax,%edi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ pxor %xmm8,%xmm4
+ addl 0(%rsp),%ebx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movups 0(%r15),%xmm0
+.byte 102,15,56,220,209
+ punpcklqdq %xmm11,%xmm12
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ pxor %xmm5,%xmm4
+ roll $5,%ecx
+ addl %esi,%ebx
+ movdqa %xmm13,%xmm3
+ xorl %edx,%edi
+ paddd %xmm11,%xmm13
+ xorl %ebp,%edx
+ pxor %xmm12,%xmm4
+ addl %ecx,%ebx
+ addl 4(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movdqa %xmm4,%xmm12
+ movl %ebx,%esi
+ xorl %edx,%edi
+ movdqa %xmm13,48(%rsp)
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ pslld $2,%xmm4
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ psrld $30,%xmm12
+ addl 8(%rsp),%ebp
+ movups 16(%r15),%xmm1
+.byte 102,15,56,220,208
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ por %xmm12,%xmm4
+ rorl $7,%ebx
+ movl %eax,%edi
+ xorl %ecx,%esi
+ roll $5,%eax
+ pshufd $238,%xmm11,%xmm13
+ addl %esi,%ebp
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 12(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ roll $5,%ebp
+ addl %edi,%edx
+ movups 32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ pxor %xmm9,%xmm5
+ addl 16(%rsp),%ecx
+ andl %eax,%esi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ punpcklqdq %xmm4,%xmm13
+ movl %edx,%edi
+ xorl %eax,%esi
+ pxor %xmm6,%xmm5
+ roll $5,%edx
+ addl %esi,%ecx
+ movdqa %xmm3,%xmm12
+ xorl %ebp,%edi
+ paddd %xmm4,%xmm3
+ xorl %eax,%ebp
+ pxor %xmm13,%xmm5
+ addl %edx,%ecx
+ addl 20(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movups 48(%r15),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm5,%xmm13
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ movdqa %xmm3,0(%rsp)
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ pslld $2,%xmm5
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ psrld $30,%xmm13
+ addl 24(%rsp),%eax
+ andl %edx,%esi
+ xorl %ebp,%edx
+ por %xmm13,%xmm5
+ rorl $7,%ecx
+ movl %ebx,%edi
+ xorl %edx,%esi
+ roll $5,%ebx
+ pshufd $238,%xmm4,%xmm3
+ addl %esi,%eax
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 28(%rsp),%ebp
+ cmpl $11,%r8d
+ jb .Laesenclast3
+ movups 64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%r15),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast3
+ movups 96(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%r15),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast3:
+.byte 102,15,56,221,209
+ movups 16-112(%r15),%xmm0
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ xorl %ecx,%edi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ pxor %xmm10,%xmm6
+ addl 32(%rsp),%edx
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ punpcklqdq %xmm5,%xmm3
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ pxor %xmm7,%xmm6
+ roll $5,%ebp
+ addl %esi,%edx
+ movups 48(%r12),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,32(%r13,%r12,1)
+ xorps %xmm14,%xmm2
+ movups -80(%r15),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm12,%xmm13
+ xorl %eax,%edi
+ paddd %xmm5,%xmm12
+ xorl %ebx,%eax
+ pxor %xmm3,%xmm6
+ addl %ebp,%edx
+ addl 36(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movdqa %xmm6,%xmm3
+ movl %edx,%esi
+ xorl %eax,%edi
+ movdqa %xmm12,16(%rsp)
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ pslld $2,%xmm6
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ psrld $30,%xmm3
+ addl 40(%rsp),%ebx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ por %xmm3,%xmm6
+ rorl $7,%edx
+ movups -64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ roll $5,%ecx
+ pshufd $238,%xmm5,%xmm12
+ addl %esi,%ebx
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 44(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ addl %ebx,%eax
+ pxor %xmm11,%xmm7
+ addl 48(%rsp),%ebp
+ movups -48(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%esi
+ punpcklqdq %xmm6,%xmm12
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm8,%xmm7
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm13,%xmm3
+ rorl $7,%ebx
+ paddd %xmm6,%xmm13
+ addl %eax,%ebp
+ pxor %xmm12,%xmm7
+ addl 52(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm7,%xmm12
+ addl %edi,%edx
+ xorl %ebx,%esi
+ movdqa %xmm13,32(%rsp)
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 56(%rsp),%ecx
+ pslld $2,%xmm7
+ xorl %eax,%esi
+ movl %edx,%edi
+ psrld $30,%xmm12
+ roll $5,%edx
+ addl %esi,%ecx
+ movups -32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%edi
+ rorl $7,%ebp
+ por %xmm12,%xmm7
+ addl %edx,%ecx
+ addl 60(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 0(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ paddd %xmm7,%xmm3
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa %xmm3,48(%rsp)
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 4(%rsp),%ebp
+ movups -16(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 8(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 12(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ movups 0(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ cmpq %r14,%r10
+ je .Ldone_ssse3
+ movdqa 64(%r11),%xmm3
+ movdqa 0(%r11),%xmm13
+ movdqu 0(%r10),%xmm4
+ movdqu 16(%r10),%xmm5
+ movdqu 32(%r10),%xmm6
+ movdqu 48(%r10),%xmm7
+.byte 102,15,56,0,227
+ addq $64,%r10
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+.byte 102,15,56,0,235
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ paddd %xmm13,%xmm4
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ movdqa %xmm4,0(%rsp)
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ psubd %xmm13,%xmm4
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ movups 16(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+.byte 102,15,56,0,243
+ roll $5,%edx
+ addl %esi,%ecx
+ movups 32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%edi
+ rorl $7,%ebp
+ paddd %xmm13,%xmm5
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ movdqa %xmm5,16(%rsp)
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ psubd %xmm13,%xmm5
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ movups 48(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+.byte 102,15,56,0,251
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ paddd %xmm13,%xmm6
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ movdqa %xmm6,32(%rsp)
+ roll $5,%edx
+ addl %edi,%ecx
+ cmpl $11,%r8d
+ jb .Laesenclast4
+ movups 64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%r15),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast4
+ movups 96(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%r15),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast4:
+.byte 102,15,56,221,209
+ movups 16-112(%r15),%xmm0
+ xorl %eax,%esi
+ rorl $7,%ebp
+ psubd %xmm13,%xmm6
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ movups %xmm2,48(%r13,%r12,1)
+ leaq 64(%r12),%r12
+
+ addl 0(%r9),%eax
+ addl 4(%r9),%esi
+ addl 8(%r9),%ecx
+ addl 12(%r9),%edx
+ movl %eax,0(%r9)
+ addl 16(%r9),%ebp
+ movl %esi,4(%r9)
+ movl %esi,%ebx
+ movl %ecx,8(%r9)
+ movl %ecx,%edi
+ movl %edx,12(%r9)
+ xorl %edx,%edi
+ movl %ebp,16(%r9)
+ andl %edi,%esi
+ jmp .Loop_ssse3
+
+.Ldone_ssse3:
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ movups 16(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ movups 32(%r15),%xmm0
+.byte 102,15,56,220,209
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ movups 48(%r15),%xmm1
+.byte 102,15,56,220,208
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ cmpl $11,%r8d
+ jb .Laesenclast5
+ movups 64(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%r15),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast5
+ movups 96(%r15),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%r15),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast5:
+.byte 102,15,56,221,209
+ movups 16-112(%r15),%xmm0
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ movups %xmm2,48(%r13,%r12,1)
+ movq 88(%rsp),%r8
+
+ addl 0(%r9),%eax
+ addl 4(%r9),%esi
+ addl 8(%r9),%ecx
+ movl %eax,0(%r9)
+ addl 12(%r9),%edx
+ movl %esi,4(%r9)
+ addl 16(%r9),%ebp
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+ movl %ebp,16(%r9)
+ movups %xmm2,(%r8)
+ leaq 104(%rsp),%rsi
+.cfi_def_cfa %rsi,56
+ movq 0(%rsi),%r15
+.cfi_restore %r15
+ movq 8(%rsi),%r14
+.cfi_restore %r14
+ movq 16(%rsi),%r13
+.cfi_restore %r13
+ movq 24(%rsi),%r12
+.cfi_restore %r12
+ movq 32(%rsi),%rbp
+.cfi_restore %rbp
+ movq 40(%rsi),%rbx
+.cfi_restore %rbx
+ leaq 48(%rsi),%rsp
+.cfi_def_cfa %rsp,8
+.Lepilogue_ssse3:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3
+.align 64
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0
+
+.byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
+.type aesni_cbc_sha1_enc_shaext,@function
+.align 32
+aesni_cbc_sha1_enc_shaext:
+.cfi_startproc
+ movq 8(%rsp),%r10
+ movdqu (%r9),%xmm8
+ movd 16(%r9),%xmm9
+ movdqa K_XX_XX+80(%rip),%xmm7
+
+ movl 240(%rcx),%r11d
+ subq %rdi,%rsi
+ movups (%rcx),%xmm15
+ movups (%r8),%xmm2
+ movups 16(%rcx),%xmm0
+ leaq 112(%rcx),%rcx
+
+ pshufd $27,%xmm8,%xmm8
+ pshufd $27,%xmm9,%xmm9
+ jmp .Loop_shaext
+
+.align 16
+.Loop_shaext:
+ movups 0(%rdi),%xmm14
+ xorps %xmm15,%xmm14
+ xorps %xmm14,%xmm2
+ movups -80(%rcx),%xmm1
+.byte 102,15,56,220,208
+ movdqu (%r10),%xmm3
+ movdqa %xmm9,%xmm12
+.byte 102,15,56,0,223
+ movdqu 16(%r10),%xmm4
+ movdqa %xmm8,%xmm11
+ movups -64(%rcx),%xmm0
+.byte 102,15,56,220,209
+.byte 102,15,56,0,231
+
+ paddd %xmm3,%xmm9
+ movdqu 32(%r10),%xmm5
+ leaq 64(%r10),%r10
+ pxor %xmm12,%xmm3
+ movups -48(%rcx),%xmm1
+.byte 102,15,56,220,208
+ pxor %xmm12,%xmm3
+ movdqa %xmm8,%xmm10
+.byte 102,15,56,0,239
+.byte 69,15,58,204,193,0
+.byte 68,15,56,200,212
+ movups -32(%rcx),%xmm0
+.byte 102,15,56,220,209
+.byte 15,56,201,220
+ movdqu -16(%r10),%xmm6
+ movdqa %xmm8,%xmm9
+.byte 102,15,56,0,247
+ movups -16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 69,15,58,204,194,0
+.byte 68,15,56,200,205
+ pxor %xmm5,%xmm3
+.byte 15,56,201,229
+ movups 0(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,0
+.byte 68,15,56,200,214
+ movups 16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,222
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ movups 32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,0
+.byte 68,15,56,200,203
+ movups 48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,227
+ pxor %xmm3,%xmm5
+.byte 15,56,201,243
+ cmpl $11,%r11d
+ jb .Laesenclast6
+ movups 64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%rcx),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast6
+ movups 96(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%rcx),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast6:
+.byte 102,15,56,221,209
+ movups 16-112(%rcx),%xmm0
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,0
+.byte 68,15,56,200,212
+ movups 16(%rdi),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,0(%rsi,%rdi,1)
+ xorps %xmm14,%xmm2
+ movups -80(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,236
+ pxor %xmm4,%xmm6
+.byte 15,56,201,220
+ movups -64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,1
+.byte 68,15,56,200,205
+ movups -48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,245
+ pxor %xmm5,%xmm3
+.byte 15,56,201,229
+ movups -32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,1
+.byte 68,15,56,200,214
+ movups -16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,222
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ movups 0(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,1
+.byte 68,15,56,200,203
+ movups 16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,227
+ pxor %xmm3,%xmm5
+.byte 15,56,201,243
+ movups 32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,1
+.byte 68,15,56,200,212
+ movups 48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,236
+ pxor %xmm4,%xmm6
+.byte 15,56,201,220
+ cmpl $11,%r11d
+ jb .Laesenclast7
+ movups 64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%rcx),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast7
+ movups 96(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%rcx),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast7:
+.byte 102,15,56,221,209
+ movups 16-112(%rcx),%xmm0
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,1
+.byte 68,15,56,200,205
+ movups 32(%rdi),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,16(%rsi,%rdi,1)
+ xorps %xmm14,%xmm2
+ movups -80(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,245
+ pxor %xmm5,%xmm3
+.byte 15,56,201,229
+ movups -64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,2
+.byte 68,15,56,200,214
+ movups -48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,222
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ movups -32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,2
+.byte 68,15,56,200,203
+ movups -16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,227
+ pxor %xmm3,%xmm5
+.byte 15,56,201,243
+ movups 0(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,2
+.byte 68,15,56,200,212
+ movups 16(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,236
+ pxor %xmm4,%xmm6
+.byte 15,56,201,220
+ movups 32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,2
+.byte 68,15,56,200,205
+ movups 48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,245
+ pxor %xmm5,%xmm3
+.byte 15,56,201,229
+ cmpl $11,%r11d
+ jb .Laesenclast8
+ movups 64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%rcx),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast8
+ movups 96(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%rcx),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast8:
+.byte 102,15,56,221,209
+ movups 16-112(%rcx),%xmm0
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,2
+.byte 68,15,56,200,214
+ movups 48(%rdi),%xmm14
+ xorps %xmm15,%xmm14
+ movups %xmm2,32(%rsi,%rdi,1)
+ xorps %xmm14,%xmm2
+ movups -80(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,222
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ movups -64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,3
+.byte 68,15,56,200,203
+ movups -48(%rcx),%xmm1
+.byte 102,15,56,220,208
+.byte 15,56,202,227
+ pxor %xmm3,%xmm5
+.byte 15,56,201,243
+ movups -32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,3
+.byte 68,15,56,200,212
+.byte 15,56,202,236
+ pxor %xmm4,%xmm6
+ movups -16(%rcx),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,3
+.byte 68,15,56,200,205
+.byte 15,56,202,245
+ movups 0(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movdqa %xmm12,%xmm5
+ movdqa %xmm8,%xmm10
+.byte 69,15,58,204,193,3
+.byte 68,15,56,200,214
+ movups 16(%rcx),%xmm1
+.byte 102,15,56,220,208
+ movdqa %xmm8,%xmm9
+.byte 69,15,58,204,194,3
+.byte 68,15,56,200,205
+ movups 32(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 48(%rcx),%xmm1
+.byte 102,15,56,220,208
+ cmpl $11,%r11d
+ jb .Laesenclast9
+ movups 64(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 80(%rcx),%xmm1
+.byte 102,15,56,220,208
+ je .Laesenclast9
+ movups 96(%rcx),%xmm0
+.byte 102,15,56,220,209
+ movups 112(%rcx),%xmm1
+.byte 102,15,56,220,208
+.Laesenclast9:
+.byte 102,15,56,221,209
+ movups 16-112(%rcx),%xmm0
+ decq %rdx
+
+ paddd %xmm11,%xmm8
+ movups %xmm2,48(%rsi,%rdi,1)
+ leaq 64(%rdi),%rdi
+ jnz .Loop_shaext
+
+ pshufd $27,%xmm8,%xmm8
+ pshufd $27,%xmm9,%xmm9
+ movups %xmm2,(%r8)
+ movdqu %xmm8,(%r9)
+ movd %xmm9,16(%r9)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_cbc_sha1_enc_shaext,.-aesni_cbc_sha1_enc_shaext
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha256-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha256-x86_64.S
new file mode 100644
index 0000000000..e257169287
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-sha256-x86_64.S
@@ -0,0 +1,69 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/aes/asm/aesni-sha256-x86_64.pl
+#
+# Copyright 2013-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl aesni_cbc_sha256_enc
+.type aesni_cbc_sha256_enc,@function
+.align 16
+aesni_cbc_sha256_enc:
+.cfi_startproc
+ xorl %eax,%eax
+ cmpq $0,%rdi
+ je .Lprobe
+ ud2
+.Lprobe:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
+
+.align 64
+.type K256,@object
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0,0,0,0, 0,0,0,0, -1,-1,-1,-1
+.long 0,0,0,0, 0,0,0,0
+.byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-x86_64.S
new file mode 100644
index 0000000000..2bdb5cf251
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/aesni-x86_64.S
@@ -0,0 +1,4484 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/aes/asm/aesni-x86_64.pl
+#
+# Copyright 2009-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+.globl aesni_encrypt
+.type aesni_encrypt,@function
+.align 16
+aesni_encrypt:
+.cfi_startproc
+ movups (%rdi),%xmm2
+ movl 240(%rdx),%eax
+ movups (%rdx),%xmm0
+ movups 16(%rdx),%xmm1
+ leaq 32(%rdx),%rdx
+ xorps %xmm0,%xmm2
+.Loop_enc1_1:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rdx),%xmm1
+ leaq 16(%rdx),%rdx
+ jnz .Loop_enc1_1
+.byte 102,15,56,221,209
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_encrypt,.-aesni_encrypt
+
+.globl aesni_decrypt
+.type aesni_decrypt,@function
+.align 16
+aesni_decrypt:
+.cfi_startproc
+ movups (%rdi),%xmm2
+ movl 240(%rdx),%eax
+ movups (%rdx),%xmm0
+ movups 16(%rdx),%xmm1
+ leaq 32(%rdx),%rdx
+ xorps %xmm0,%xmm2
+.Loop_dec1_2:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rdx),%xmm1
+ leaq 16(%rdx),%rdx
+ jnz .Loop_dec1_2
+.byte 102,15,56,223,209
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_decrypt, .-aesni_decrypt
+.type _aesni_encrypt2,@function
+.align 16
+_aesni_encrypt2:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+ addq $16,%rax
+
+.Lenc_loop2:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lenc_loop2
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_encrypt2,.-_aesni_encrypt2
+.type _aesni_decrypt2,@function
+.align 16
+_aesni_decrypt2:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+ addq $16,%rax
+
+.Ldec_loop2:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Ldec_loop2
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,223,208
+.byte 102,15,56,223,216
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_decrypt2,.-_aesni_decrypt2
+.type _aesni_encrypt3,@function
+.align 16
+_aesni_encrypt3:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ xorps %xmm0,%xmm4
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+ addq $16,%rax
+
+.Lenc_loop3:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lenc_loop3
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+.byte 102,15,56,221,224
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_encrypt3,.-_aesni_encrypt3
+.type _aesni_decrypt3,@function
+.align 16
+_aesni_decrypt3:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ xorps %xmm0,%xmm4
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+ addq $16,%rax
+
+.Ldec_loop3:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Ldec_loop3
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,223,208
+.byte 102,15,56,223,216
+.byte 102,15,56,223,224
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_decrypt3,.-_aesni_decrypt3
+.type _aesni_encrypt4,@function
+.align 16
+_aesni_encrypt4:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ xorps %xmm0,%xmm4
+ xorps %xmm0,%xmm5
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 0x0f,0x1f,0x00
+ addq $16,%rax
+
+.Lenc_loop4:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lenc_loop4
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+.byte 102,15,56,221,224
+.byte 102,15,56,221,232
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_encrypt4,.-_aesni_encrypt4
+.type _aesni_decrypt4,@function
+.align 16
+_aesni_decrypt4:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ xorps %xmm0,%xmm4
+ xorps %xmm0,%xmm5
+ movups 32(%rcx),%xmm0
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 0x0f,0x1f,0x00
+ addq $16,%rax
+
+.Ldec_loop4:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Ldec_loop4
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,223,208
+.byte 102,15,56,223,216
+.byte 102,15,56,223,224
+.byte 102,15,56,223,232
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_decrypt4,.-_aesni_decrypt4
+.type _aesni_encrypt6,@function
+.align 16
+_aesni_encrypt6:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ pxor %xmm0,%xmm3
+ pxor %xmm0,%xmm4
+.byte 102,15,56,220,209
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 102,15,56,220,217
+ pxor %xmm0,%xmm5
+ pxor %xmm0,%xmm6
+.byte 102,15,56,220,225
+ pxor %xmm0,%xmm7
+ movups (%rcx,%rax,1),%xmm0
+ addq $16,%rax
+ jmp .Lenc_loop6_enter
+.align 16
+.Lenc_loop6:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.Lenc_loop6_enter:
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lenc_loop6
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+.byte 102,15,56,221,224
+.byte 102,15,56,221,232
+.byte 102,15,56,221,240
+.byte 102,15,56,221,248
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_encrypt6,.-_aesni_encrypt6
+.type _aesni_decrypt6,@function
+.align 16
+_aesni_decrypt6:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ pxor %xmm0,%xmm3
+ pxor %xmm0,%xmm4
+.byte 102,15,56,222,209
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 102,15,56,222,217
+ pxor %xmm0,%xmm5
+ pxor %xmm0,%xmm6
+.byte 102,15,56,222,225
+ pxor %xmm0,%xmm7
+ movups (%rcx,%rax,1),%xmm0
+ addq $16,%rax
+ jmp .Ldec_loop6_enter
+.align 16
+.Ldec_loop6:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.Ldec_loop6_enter:
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Ldec_loop6
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,15,56,223,208
+.byte 102,15,56,223,216
+.byte 102,15,56,223,224
+.byte 102,15,56,223,232
+.byte 102,15,56,223,240
+.byte 102,15,56,223,248
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_decrypt6,.-_aesni_decrypt6
+.type _aesni_encrypt8,@function
+.align 16
+_aesni_encrypt8:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ pxor %xmm0,%xmm4
+ pxor %xmm0,%xmm5
+ pxor %xmm0,%xmm6
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 102,15,56,220,209
+ pxor %xmm0,%xmm7
+ pxor %xmm0,%xmm8
+.byte 102,15,56,220,217
+ pxor %xmm0,%xmm9
+ movups (%rcx,%rax,1),%xmm0
+ addq $16,%rax
+ jmp .Lenc_loop8_inner
+.align 16
+.Lenc_loop8:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.Lenc_loop8_inner:
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+.Lenc_loop8_enter:
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lenc_loop8
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+.byte 102,15,56,221,224
+.byte 102,15,56,221,232
+.byte 102,15,56,221,240
+.byte 102,15,56,221,248
+.byte 102,68,15,56,221,192
+.byte 102,68,15,56,221,200
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_encrypt8,.-_aesni_encrypt8
+.type _aesni_decrypt8,@function
+.align 16
+_aesni_decrypt8:
+.cfi_startproc
+ movups (%rcx),%xmm0
+ shll $4,%eax
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm2
+ xorps %xmm0,%xmm3
+ pxor %xmm0,%xmm4
+ pxor %xmm0,%xmm5
+ pxor %xmm0,%xmm6
+ leaq 32(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 102,15,56,222,209
+ pxor %xmm0,%xmm7
+ pxor %xmm0,%xmm8
+.byte 102,15,56,222,217
+ pxor %xmm0,%xmm9
+ movups (%rcx,%rax,1),%xmm0
+ addq $16,%rax
+ jmp .Ldec_loop8_inner
+.align 16
+.Ldec_loop8:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.Ldec_loop8_inner:
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+.Ldec_loop8_enter:
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Ldec_loop8
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+.byte 102,15,56,223,208
+.byte 102,15,56,223,216
+.byte 102,15,56,223,224
+.byte 102,15,56,223,232
+.byte 102,15,56,223,240
+.byte 102,15,56,223,248
+.byte 102,68,15,56,223,192
+.byte 102,68,15,56,223,200
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _aesni_decrypt8,.-_aesni_decrypt8
+.globl aesni_ecb_encrypt
+.type aesni_ecb_encrypt,@function
+.align 16
+aesni_ecb_encrypt:
+.cfi_startproc
+ andq $-16,%rdx
+ jz .Lecb_ret
+
+ movl 240(%rcx),%eax
+ movups (%rcx),%xmm0
+ movq %rcx,%r11
+ movl %eax,%r10d
+ testl %r8d,%r8d
+ jz .Lecb_decrypt
+
+ cmpq $0x80,%rdx
+ jb .Lecb_enc_tail
+
+ movdqu (%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ movdqu 48(%rdi),%xmm5
+ movdqu 64(%rdi),%xmm6
+ movdqu 80(%rdi),%xmm7
+ movdqu 96(%rdi),%xmm8
+ movdqu 112(%rdi),%xmm9
+ leaq 128(%rdi),%rdi
+ subq $0x80,%rdx
+ jmp .Lecb_enc_loop8_enter
+.align 16
+.Lecb_enc_loop8:
+ movups %xmm2,(%rsi)
+ movq %r11,%rcx
+ movdqu (%rdi),%xmm2
+ movl %r10d,%eax
+ movups %xmm3,16(%rsi)
+ movdqu 16(%rdi),%xmm3
+ movups %xmm4,32(%rsi)
+ movdqu 32(%rdi),%xmm4
+ movups %xmm5,48(%rsi)
+ movdqu 48(%rdi),%xmm5
+ movups %xmm6,64(%rsi)
+ movdqu 64(%rdi),%xmm6
+ movups %xmm7,80(%rsi)
+ movdqu 80(%rdi),%xmm7
+ movups %xmm8,96(%rsi)
+ movdqu 96(%rdi),%xmm8
+ movups %xmm9,112(%rsi)
+ leaq 128(%rsi),%rsi
+ movdqu 112(%rdi),%xmm9
+ leaq 128(%rdi),%rdi
+.Lecb_enc_loop8_enter:
+
+ call _aesni_encrypt8
+
+ subq $0x80,%rdx
+ jnc .Lecb_enc_loop8
+
+ movups %xmm2,(%rsi)
+ movq %r11,%rcx
+ movups %xmm3,16(%rsi)
+ movl %r10d,%eax
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+ movups %xmm7,80(%rsi)
+ movups %xmm8,96(%rsi)
+ movups %xmm9,112(%rsi)
+ leaq 128(%rsi),%rsi
+ addq $0x80,%rdx
+ jz .Lecb_ret
+
+.Lecb_enc_tail:
+ movups (%rdi),%xmm2
+ cmpq $0x20,%rdx
+ jb .Lecb_enc_one
+ movups 16(%rdi),%xmm3
+ je .Lecb_enc_two
+ movups 32(%rdi),%xmm4
+ cmpq $0x40,%rdx
+ jb .Lecb_enc_three
+ movups 48(%rdi),%xmm5
+ je .Lecb_enc_four
+ movups 64(%rdi),%xmm6
+ cmpq $0x60,%rdx
+ jb .Lecb_enc_five
+ movups 80(%rdi),%xmm7
+ je .Lecb_enc_six
+ movdqu 96(%rdi),%xmm8
+ xorps %xmm9,%xmm9
+ call _aesni_encrypt8
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+ movups %xmm7,80(%rsi)
+ movups %xmm8,96(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_one:
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_enc1_3:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_3
+.byte 102,15,56,221,209
+ movups %xmm2,(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_two:
+ call _aesni_encrypt2
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_three:
+ call _aesni_encrypt3
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_four:
+ call _aesni_encrypt4
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_five:
+ xorps %xmm7,%xmm7
+ call _aesni_encrypt6
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+ jmp .Lecb_ret
+.align 16
+.Lecb_enc_six:
+ call _aesni_encrypt6
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+ movups %xmm7,80(%rsi)
+ jmp .Lecb_ret
+
+.align 16
+.Lecb_decrypt:
+ cmpq $0x80,%rdx
+ jb .Lecb_dec_tail
+
+ movdqu (%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ movdqu 48(%rdi),%xmm5
+ movdqu 64(%rdi),%xmm6
+ movdqu 80(%rdi),%xmm7
+ movdqu 96(%rdi),%xmm8
+ movdqu 112(%rdi),%xmm9
+ leaq 128(%rdi),%rdi
+ subq $0x80,%rdx
+ jmp .Lecb_dec_loop8_enter
+.align 16
+.Lecb_dec_loop8:
+ movups %xmm2,(%rsi)
+ movq %r11,%rcx
+ movdqu (%rdi),%xmm2
+ movl %r10d,%eax
+ movups %xmm3,16(%rsi)
+ movdqu 16(%rdi),%xmm3
+ movups %xmm4,32(%rsi)
+ movdqu 32(%rdi),%xmm4
+ movups %xmm5,48(%rsi)
+ movdqu 48(%rdi),%xmm5
+ movups %xmm6,64(%rsi)
+ movdqu 64(%rdi),%xmm6
+ movups %xmm7,80(%rsi)
+ movdqu 80(%rdi),%xmm7
+ movups %xmm8,96(%rsi)
+ movdqu 96(%rdi),%xmm8
+ movups %xmm9,112(%rsi)
+ leaq 128(%rsi),%rsi
+ movdqu 112(%rdi),%xmm9
+ leaq 128(%rdi),%rdi
+.Lecb_dec_loop8_enter:
+
+ call _aesni_decrypt8
+
+ movups (%r11),%xmm0
+ subq $0x80,%rdx
+ jnc .Lecb_dec_loop8
+
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movq %r11,%rcx
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movl %r10d,%eax
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ movups %xmm7,80(%rsi)
+ pxor %xmm7,%xmm7
+ movups %xmm8,96(%rsi)
+ pxor %xmm8,%xmm8
+ movups %xmm9,112(%rsi)
+ pxor %xmm9,%xmm9
+ leaq 128(%rsi),%rsi
+ addq $0x80,%rdx
+ jz .Lecb_ret
+
+.Lecb_dec_tail:
+ movups (%rdi),%xmm2
+ cmpq $0x20,%rdx
+ jb .Lecb_dec_one
+ movups 16(%rdi),%xmm3
+ je .Lecb_dec_two
+ movups 32(%rdi),%xmm4
+ cmpq $0x40,%rdx
+ jb .Lecb_dec_three
+ movups 48(%rdi),%xmm5
+ je .Lecb_dec_four
+ movups 64(%rdi),%xmm6
+ cmpq $0x60,%rdx
+ jb .Lecb_dec_five
+ movups 80(%rdi),%xmm7
+ je .Lecb_dec_six
+ movups 96(%rdi),%xmm8
+ movups (%rcx),%xmm0
+ xorps %xmm9,%xmm9
+ call _aesni_decrypt8
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ movups %xmm7,80(%rsi)
+ pxor %xmm7,%xmm7
+ movups %xmm8,96(%rsi)
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_one:
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_4:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_4
+.byte 102,15,56,223,209
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_two:
+ call _aesni_decrypt2
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_three:
+ call _aesni_decrypt3
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_four:
+ call _aesni_decrypt4
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_five:
+ xorps %xmm7,%xmm7
+ call _aesni_decrypt6
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ jmp .Lecb_ret
+.align 16
+.Lecb_dec_six:
+ call _aesni_decrypt6
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ movups %xmm7,80(%rsi)
+ pxor %xmm7,%xmm7
+
+.Lecb_ret:
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ecb_encrypt,.-aesni_ecb_encrypt
+.globl aesni_ccm64_encrypt_blocks
+.type aesni_ccm64_encrypt_blocks,@function
+.align 16
+aesni_ccm64_encrypt_blocks:
+.cfi_startproc
+ movl 240(%rcx),%eax
+ movdqu (%r8),%xmm6
+ movdqa .Lincrement64(%rip),%xmm9
+ movdqa .Lbswap_mask(%rip),%xmm7
+
+ shll $4,%eax
+ movl $16,%r10d
+ leaq 0(%rcx),%r11
+ movdqu (%r9),%xmm3
+ movdqa %xmm6,%xmm2
+ leaq 32(%rcx,%rax,1),%rcx
+.byte 102,15,56,0,247
+ subq %rax,%r10
+ jmp .Lccm64_enc_outer
+.align 16
+.Lccm64_enc_outer:
+ movups (%r11),%xmm0
+ movq %r10,%rax
+ movups (%rdi),%xmm8
+
+ xorps %xmm0,%xmm2
+ movups 16(%r11),%xmm1
+ xorps %xmm8,%xmm0
+ xorps %xmm0,%xmm3
+ movups 32(%r11),%xmm0
+
+.Lccm64_enc2_loop:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lccm64_enc2_loop
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ paddq %xmm9,%xmm6
+ decq %rdx
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+
+ leaq 16(%rdi),%rdi
+ xorps %xmm2,%xmm8
+ movdqa %xmm6,%xmm2
+ movups %xmm8,(%rsi)
+.byte 102,15,56,0,215
+ leaq 16(%rsi),%rsi
+ jnz .Lccm64_enc_outer
+
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ movups %xmm3,(%r9)
+ pxor %xmm3,%xmm3
+ pxor %xmm8,%xmm8
+ pxor %xmm6,%xmm6
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks
+.globl aesni_ccm64_decrypt_blocks
+.type aesni_ccm64_decrypt_blocks,@function
+.align 16
+aesni_ccm64_decrypt_blocks:
+.cfi_startproc
+ movl 240(%rcx),%eax
+ movups (%r8),%xmm6
+ movdqu (%r9),%xmm3
+ movdqa .Lincrement64(%rip),%xmm9
+ movdqa .Lbswap_mask(%rip),%xmm7
+
+ movaps %xmm6,%xmm2
+ movl %eax,%r10d
+ movq %rcx,%r11
+.byte 102,15,56,0,247
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_enc1_5:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_5
+.byte 102,15,56,221,209
+ shll $4,%r10d
+ movl $16,%eax
+ movups (%rdi),%xmm8
+ paddq %xmm9,%xmm6
+ leaq 16(%rdi),%rdi
+ subq %r10,%rax
+ leaq 32(%r11,%r10,1),%rcx
+ movq %rax,%r10
+ jmp .Lccm64_dec_outer
+.align 16
+.Lccm64_dec_outer:
+ xorps %xmm2,%xmm8
+ movdqa %xmm6,%xmm2
+ movups %xmm8,(%rsi)
+ leaq 16(%rsi),%rsi
+.byte 102,15,56,0,215
+
+ subq $1,%rdx
+ jz .Lccm64_dec_break
+
+ movups (%r11),%xmm0
+ movq %r10,%rax
+ movups 16(%r11),%xmm1
+ xorps %xmm0,%xmm8
+ xorps %xmm0,%xmm2
+ xorps %xmm8,%xmm3
+ movups 32(%r11),%xmm0
+ jmp .Lccm64_dec2_loop
+.align 16
+.Lccm64_dec2_loop:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Lccm64_dec2_loop
+ movups (%rdi),%xmm8
+ paddq %xmm9,%xmm6
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,221,208
+.byte 102,15,56,221,216
+ leaq 16(%rdi),%rdi
+ jmp .Lccm64_dec_outer
+
+.align 16
+.Lccm64_dec_break:
+
+ movl 240(%r11),%eax
+ movups (%r11),%xmm0
+ movups 16(%r11),%xmm1
+ xorps %xmm0,%xmm8
+ leaq 32(%r11),%r11
+ xorps %xmm8,%xmm3
+.Loop_enc1_6:
+.byte 102,15,56,220,217
+ decl %eax
+ movups (%r11),%xmm1
+ leaq 16(%r11),%r11
+ jnz .Loop_enc1_6
+.byte 102,15,56,221,217
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ movups %xmm3,(%r9)
+ pxor %xmm3,%xmm3
+ pxor %xmm8,%xmm8
+ pxor %xmm6,%xmm6
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks
+.globl aesni_ctr32_encrypt_blocks
+.type aesni_ctr32_encrypt_blocks,@function
+.align 16
+aesni_ctr32_encrypt_blocks:
+.cfi_startproc
+ cmpq $1,%rdx
+ jne .Lctr32_bulk
+
+
+
+ movups (%r8),%xmm2
+ movups (%rdi),%xmm3
+ movl 240(%rcx),%edx
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_enc1_7:
+.byte 102,15,56,220,209
+ decl %edx
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_7
+.byte 102,15,56,221,209
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ xorps %xmm3,%xmm2
+ pxor %xmm3,%xmm3
+ movups %xmm2,(%rsi)
+ xorps %xmm2,%xmm2
+ jmp .Lctr32_epilogue
+
+.align 16
+.Lctr32_bulk:
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
+ pushq %rbp
+.cfi_offset %rbp,-16
+ subq $128,%rsp
+ andq $-16,%rsp
+
+
+
+
+ movdqu (%r8),%xmm2
+ movdqu (%rcx),%xmm0
+ movl 12(%r8),%r8d
+ pxor %xmm0,%xmm2
+ movl 12(%rcx),%ebp
+ movdqa %xmm2,0(%rsp)
+ bswapl %r8d
+ movdqa %xmm2,%xmm3
+ movdqa %xmm2,%xmm4
+ movdqa %xmm2,%xmm5
+ movdqa %xmm2,64(%rsp)
+ movdqa %xmm2,80(%rsp)
+ movdqa %xmm2,96(%rsp)
+ movq %rdx,%r10
+ movdqa %xmm2,112(%rsp)
+
+ leaq 1(%r8),%rax
+ leaq 2(%r8),%rdx
+ bswapl %eax
+ bswapl %edx
+ xorl %ebp,%eax
+ xorl %ebp,%edx
+.byte 102,15,58,34,216,3
+ leaq 3(%r8),%rax
+ movdqa %xmm3,16(%rsp)
+.byte 102,15,58,34,226,3
+ bswapl %eax
+ movq %r10,%rdx
+ leaq 4(%r8),%r10
+ movdqa %xmm4,32(%rsp)
+ xorl %ebp,%eax
+ bswapl %r10d
+.byte 102,15,58,34,232,3
+ xorl %ebp,%r10d
+ movdqa %xmm5,48(%rsp)
+ leaq 5(%r8),%r9
+ movl %r10d,64+12(%rsp)
+ bswapl %r9d
+ leaq 6(%r8),%r10
+ movl 240(%rcx),%eax
+ xorl %ebp,%r9d
+ bswapl %r10d
+ movl %r9d,80+12(%rsp)
+ xorl %ebp,%r10d
+ leaq 7(%r8),%r9
+ movl %r10d,96+12(%rsp)
+ bswapl %r9d
+ movl OPENSSL_ia32cap_P+4(%rip),%r10d
+ xorl %ebp,%r9d
+ andl $71303168,%r10d
+ movl %r9d,112+12(%rsp)
+
+ movups 16(%rcx),%xmm1
+
+ movdqa 64(%rsp),%xmm6
+ movdqa 80(%rsp),%xmm7
+
+ cmpq $8,%rdx
+ jb .Lctr32_tail
+
+ subq $6,%rdx
+ cmpl $4194304,%r10d
+ je .Lctr32_6x
+
+ leaq 128(%rcx),%rcx
+ subq $2,%rdx
+ jmp .Lctr32_loop8
+
+.align 16
+.Lctr32_6x:
+ shll $4,%eax
+ movl $48,%r10d
+ bswapl %ebp
+ leaq 32(%rcx,%rax,1),%rcx
+ subq %rax,%r10
+ jmp .Lctr32_loop6
+
+.align 16
+.Lctr32_loop6:
+ addl $6,%r8d
+ movups -48(%rcx,%r10,1),%xmm0
+.byte 102,15,56,220,209
+ movl %r8d,%eax
+ xorl %ebp,%eax
+.byte 102,15,56,220,217
+.byte 0x0f,0x38,0xf1,0x44,0x24,12
+ leal 1(%r8),%eax
+.byte 102,15,56,220,225
+ xorl %ebp,%eax
+.byte 0x0f,0x38,0xf1,0x44,0x24,28
+.byte 102,15,56,220,233
+ leal 2(%r8),%eax
+ xorl %ebp,%eax
+.byte 102,15,56,220,241
+.byte 0x0f,0x38,0xf1,0x44,0x24,44
+ leal 3(%r8),%eax
+.byte 102,15,56,220,249
+ movups -32(%rcx,%r10,1),%xmm1
+ xorl %ebp,%eax
+
+.byte 102,15,56,220,208
+.byte 0x0f,0x38,0xf1,0x44,0x24,60
+ leal 4(%r8),%eax
+.byte 102,15,56,220,216
+ xorl %ebp,%eax
+.byte 0x0f,0x38,0xf1,0x44,0x24,76
+.byte 102,15,56,220,224
+ leal 5(%r8),%eax
+ xorl %ebp,%eax
+.byte 102,15,56,220,232
+.byte 0x0f,0x38,0xf1,0x44,0x24,92
+ movq %r10,%rax
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups -16(%rcx,%r10,1),%xmm0
+
+ call .Lenc_loop6
+
+ movdqu (%rdi),%xmm8
+ movdqu 16(%rdi),%xmm9
+ movdqu 32(%rdi),%xmm10
+ movdqu 48(%rdi),%xmm11
+ movdqu 64(%rdi),%xmm12
+ movdqu 80(%rdi),%xmm13
+ leaq 96(%rdi),%rdi
+ movups -64(%rcx,%r10,1),%xmm1
+ pxor %xmm2,%xmm8
+ movaps 0(%rsp),%xmm2
+ pxor %xmm3,%xmm9
+ movaps 16(%rsp),%xmm3
+ pxor %xmm4,%xmm10
+ movaps 32(%rsp),%xmm4
+ pxor %xmm5,%xmm11
+ movaps 48(%rsp),%xmm5
+ pxor %xmm6,%xmm12
+ movaps 64(%rsp),%xmm6
+ pxor %xmm7,%xmm13
+ movaps 80(%rsp),%xmm7
+ movdqu %xmm8,(%rsi)
+ movdqu %xmm9,16(%rsi)
+ movdqu %xmm10,32(%rsi)
+ movdqu %xmm11,48(%rsi)
+ movdqu %xmm12,64(%rsi)
+ movdqu %xmm13,80(%rsi)
+ leaq 96(%rsi),%rsi
+
+ subq $6,%rdx
+ jnc .Lctr32_loop6
+
+ addq $6,%rdx
+ jz .Lctr32_done
+
+ leal -48(%r10),%eax
+ leaq -80(%rcx,%r10,1),%rcx
+ negl %eax
+ shrl $4,%eax
+ jmp .Lctr32_tail
+
+.align 32
+.Lctr32_loop8:
+ addl $8,%r8d
+ movdqa 96(%rsp),%xmm8
+.byte 102,15,56,220,209
+ movl %r8d,%r9d
+ movdqa 112(%rsp),%xmm9
+.byte 102,15,56,220,217
+ bswapl %r9d
+ movups 32-128(%rcx),%xmm0
+.byte 102,15,56,220,225
+ xorl %ebp,%r9d
+ nop
+.byte 102,15,56,220,233
+ movl %r9d,0+12(%rsp)
+ leaq 1(%r8),%r9
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 48-128(%rcx),%xmm1
+ bswapl %r9d
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movl %r9d,16+12(%rsp)
+ leaq 2(%r8),%r9
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 64-128(%rcx),%xmm0
+ bswapl %r9d
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movl %r9d,32+12(%rsp)
+ leaq 3(%r8),%r9
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 80-128(%rcx),%xmm1
+ bswapl %r9d
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movl %r9d,48+12(%rsp)
+ leaq 4(%r8),%r9
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 96-128(%rcx),%xmm0
+ bswapl %r9d
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movl %r9d,64+12(%rsp)
+ leaq 5(%r8),%r9
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 112-128(%rcx),%xmm1
+ bswapl %r9d
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movl %r9d,80+12(%rsp)
+ leaq 6(%r8),%r9
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 128-128(%rcx),%xmm0
+ bswapl %r9d
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ xorl %ebp,%r9d
+.byte 0x66,0x90
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movl %r9d,96+12(%rsp)
+ leaq 7(%r8),%r9
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 144-128(%rcx),%xmm1
+ bswapl %r9d
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+ xorl %ebp,%r9d
+ movdqu 0(%rdi),%xmm10
+.byte 102,15,56,220,232
+ movl %r9d,112+12(%rsp)
+ cmpl $11,%eax
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 160-128(%rcx),%xmm0
+
+ jb .Lctr32_enc_done
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 176-128(%rcx),%xmm1
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 192-128(%rcx),%xmm0
+ je .Lctr32_enc_done
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movups 208-128(%rcx),%xmm1
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+.byte 102,68,15,56,220,192
+.byte 102,68,15,56,220,200
+ movups 224-128(%rcx),%xmm0
+ jmp .Lctr32_enc_done
+
+.align 16
+.Lctr32_enc_done:
+ movdqu 16(%rdi),%xmm11
+ pxor %xmm0,%xmm10
+ movdqu 32(%rdi),%xmm12
+ pxor %xmm0,%xmm11
+ movdqu 48(%rdi),%xmm13
+ pxor %xmm0,%xmm12
+ movdqu 64(%rdi),%xmm14
+ pxor %xmm0,%xmm13
+ movdqu 80(%rdi),%xmm15
+ pxor %xmm0,%xmm14
+ pxor %xmm0,%xmm15
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+.byte 102,68,15,56,220,201
+ movdqu 96(%rdi),%xmm1
+ leaq 128(%rdi),%rdi
+
+.byte 102,65,15,56,221,210
+ pxor %xmm0,%xmm1
+ movdqu 112-128(%rdi),%xmm10
+.byte 102,65,15,56,221,219
+ pxor %xmm0,%xmm10
+ movdqa 0(%rsp),%xmm11
+.byte 102,65,15,56,221,228
+.byte 102,65,15,56,221,237
+ movdqa 16(%rsp),%xmm12
+ movdqa 32(%rsp),%xmm13
+.byte 102,65,15,56,221,246
+.byte 102,65,15,56,221,255
+ movdqa 48(%rsp),%xmm14
+ movdqa 64(%rsp),%xmm15
+.byte 102,68,15,56,221,193
+ movdqa 80(%rsp),%xmm0
+ movups 16-128(%rcx),%xmm1
+.byte 102,69,15,56,221,202
+
+ movups %xmm2,(%rsi)
+ movdqa %xmm11,%xmm2
+ movups %xmm3,16(%rsi)
+ movdqa %xmm12,%xmm3
+ movups %xmm4,32(%rsi)
+ movdqa %xmm13,%xmm4
+ movups %xmm5,48(%rsi)
+ movdqa %xmm14,%xmm5
+ movups %xmm6,64(%rsi)
+ movdqa %xmm15,%xmm6
+ movups %xmm7,80(%rsi)
+ movdqa %xmm0,%xmm7
+ movups %xmm8,96(%rsi)
+ movups %xmm9,112(%rsi)
+ leaq 128(%rsi),%rsi
+
+ subq $8,%rdx
+ jnc .Lctr32_loop8
+
+ addq $8,%rdx
+ jz .Lctr32_done
+ leaq -128(%rcx),%rcx
+
+.Lctr32_tail:
+
+
+ leaq 16(%rcx),%rcx
+ cmpq $4,%rdx
+ jb .Lctr32_loop3
+ je .Lctr32_loop4
+
+
+ shll $4,%eax
+ movdqa 96(%rsp),%xmm8
+ pxor %xmm9,%xmm9
+
+ movups 16(%rcx),%xmm0
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+ leaq 32-16(%rcx,%rax,1),%rcx
+ negq %rax
+.byte 102,15,56,220,225
+ addq $16,%rax
+ movups (%rdi),%xmm10
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+ movups 16(%rdi),%xmm11
+ movups 32(%rdi),%xmm12
+.byte 102,15,56,220,249
+.byte 102,68,15,56,220,193
+
+ call .Lenc_loop8_enter
+
+ movdqu 48(%rdi),%xmm13
+ pxor %xmm10,%xmm2
+ movdqu 64(%rdi),%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm10,%xmm6
+ movdqu %xmm5,48(%rsi)
+ movdqu %xmm6,64(%rsi)
+ cmpq $6,%rdx
+ jb .Lctr32_done
+
+ movups 80(%rdi),%xmm11
+ xorps %xmm11,%xmm7
+ movups %xmm7,80(%rsi)
+ je .Lctr32_done
+
+ movups 96(%rdi),%xmm12
+ xorps %xmm12,%xmm8
+ movups %xmm8,96(%rsi)
+ jmp .Lctr32_done
+
+.align 32
+.Lctr32_loop4:
+.byte 102,15,56,220,209
+ leaq 16(%rcx),%rcx
+ decl %eax
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups (%rcx),%xmm1
+ jnz .Lctr32_loop4
+.byte 102,15,56,221,209
+.byte 102,15,56,221,217
+ movups (%rdi),%xmm10
+ movups 16(%rdi),%xmm11
+.byte 102,15,56,221,225
+.byte 102,15,56,221,233
+ movups 32(%rdi),%xmm12
+ movups 48(%rdi),%xmm13
+
+ xorps %xmm10,%xmm2
+ movups %xmm2,(%rsi)
+ xorps %xmm11,%xmm3
+ movups %xmm3,16(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm13,%xmm5
+ movdqu %xmm5,48(%rsi)
+ jmp .Lctr32_done
+
+.align 32
+.Lctr32_loop3:
+.byte 102,15,56,220,209
+ leaq 16(%rcx),%rcx
+ decl %eax
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+ movups (%rcx),%xmm1
+ jnz .Lctr32_loop3
+.byte 102,15,56,221,209
+.byte 102,15,56,221,217
+.byte 102,15,56,221,225
+
+ movups (%rdi),%xmm10
+ xorps %xmm10,%xmm2
+ movups %xmm2,(%rsi)
+ cmpq $2,%rdx
+ jb .Lctr32_done
+
+ movups 16(%rdi),%xmm11
+ xorps %xmm11,%xmm3
+ movups %xmm3,16(%rsi)
+ je .Lctr32_done
+
+ movups 32(%rdi),%xmm12
+ xorps %xmm12,%xmm4
+ movups %xmm4,32(%rsi)
+
+.Lctr32_done:
+ xorps %xmm0,%xmm0
+ xorl %ebp,%ebp
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ movaps %xmm0,0(%rsp)
+ pxor %xmm8,%xmm8
+ movaps %xmm0,16(%rsp)
+ pxor %xmm9,%xmm9
+ movaps %xmm0,32(%rsp)
+ pxor %xmm10,%xmm10
+ movaps %xmm0,48(%rsp)
+ pxor %xmm11,%xmm11
+ movaps %xmm0,64(%rsp)
+ pxor %xmm12,%xmm12
+ movaps %xmm0,80(%rsp)
+ pxor %xmm13,%xmm13
+ movaps %xmm0,96(%rsp)
+ pxor %xmm14,%xmm14
+ movaps %xmm0,112(%rsp)
+ pxor %xmm15,%xmm15
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
+.Lctr32_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks
+.globl aesni_xts_encrypt
+.type aesni_xts_encrypt,@function
+.align 16
+aesni_xts_encrypt:
+.cfi_startproc
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
+ pushq %rbp
+.cfi_offset %rbp,-16
+ subq $112,%rsp
+ andq $-16,%rsp
+ movups (%r9),%xmm2
+ movl 240(%r8),%eax
+ movl 240(%rcx),%r10d
+ movups (%r8),%xmm0
+ movups 16(%r8),%xmm1
+ leaq 32(%r8),%r8
+ xorps %xmm0,%xmm2
+.Loop_enc1_8:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%r8),%xmm1
+ leaq 16(%r8),%r8
+ jnz .Loop_enc1_8
+.byte 102,15,56,221,209
+ movups (%rcx),%xmm0
+ movq %rcx,%rbp
+ movl %r10d,%eax
+ shll $4,%r10d
+ movq %rdx,%r9
+ andq $-16,%rdx
+
+ movups 16(%rcx,%r10,1),%xmm1
+
+ movdqa .Lxts_magic(%rip),%xmm8
+ movdqa %xmm2,%xmm15
+ pshufd $0x5f,%xmm2,%xmm9
+ pxor %xmm0,%xmm1
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm10
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm10
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm11
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm11
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm12
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm12
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm13
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm13
+ pxor %xmm14,%xmm15
+ movdqa %xmm15,%xmm14
+ psrad $31,%xmm9
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm9
+ pxor %xmm0,%xmm14
+ pxor %xmm9,%xmm15
+ movaps %xmm1,96(%rsp)
+
+ subq $96,%rdx
+ jc .Lxts_enc_short
+
+ movl $16+96,%eax
+ leaq 32(%rbp,%r10,1),%rcx
+ subq %r10,%rax
+ movups 16(%rbp),%xmm1
+ movq %rax,%r10
+ leaq .Lxts_magic(%rip),%r8
+ jmp .Lxts_enc_grandloop
+
+.align 32
+.Lxts_enc_grandloop:
+ movdqu 0(%rdi),%xmm2
+ movdqa %xmm0,%xmm8
+ movdqu 16(%rdi),%xmm3
+ pxor %xmm10,%xmm2
+ movdqu 32(%rdi),%xmm4
+ pxor %xmm11,%xmm3
+.byte 102,15,56,220,209
+ movdqu 48(%rdi),%xmm5
+ pxor %xmm12,%xmm4
+.byte 102,15,56,220,217
+ movdqu 64(%rdi),%xmm6
+ pxor %xmm13,%xmm5
+.byte 102,15,56,220,225
+ movdqu 80(%rdi),%xmm7
+ pxor %xmm15,%xmm8
+ movdqa 96(%rsp),%xmm9
+ pxor %xmm14,%xmm6
+.byte 102,15,56,220,233
+ movups 32(%rbp),%xmm0
+ leaq 96(%rdi),%rdi
+ pxor %xmm8,%xmm7
+
+ pxor %xmm9,%xmm10
+.byte 102,15,56,220,241
+ pxor %xmm9,%xmm11
+ movdqa %xmm10,0(%rsp)
+.byte 102,15,56,220,249
+ movups 48(%rbp),%xmm1
+ pxor %xmm9,%xmm12
+
+.byte 102,15,56,220,208
+ pxor %xmm9,%xmm13
+ movdqa %xmm11,16(%rsp)
+.byte 102,15,56,220,216
+ pxor %xmm9,%xmm14
+ movdqa %xmm12,32(%rsp)
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ pxor %xmm9,%xmm8
+ movdqa %xmm14,64(%rsp)
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups 64(%rbp),%xmm0
+ movdqa %xmm8,80(%rsp)
+ pshufd $0x5f,%xmm15,%xmm9
+ jmp .Lxts_enc_loop6
+.align 32
+.Lxts_enc_loop6:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+ movups -64(%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups -80(%rcx,%rax,1),%xmm0
+ jnz .Lxts_enc_loop6
+
+ movdqa (%r8),%xmm8
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+.byte 102,15,56,220,209
+ paddq %xmm15,%xmm15
+ psrad $31,%xmm14
+.byte 102,15,56,220,217
+ pand %xmm8,%xmm14
+ movups (%rbp),%xmm10
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+ pxor %xmm14,%xmm15
+ movaps %xmm10,%xmm11
+.byte 102,15,56,220,249
+ movups -64(%rcx),%xmm1
+
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,220,208
+ paddd %xmm9,%xmm9
+ pxor %xmm15,%xmm10
+.byte 102,15,56,220,216
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ pand %xmm8,%xmm14
+ movaps %xmm11,%xmm12
+.byte 102,15,56,220,240
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,220,248
+ movups -48(%rcx),%xmm0
+
+ paddd %xmm9,%xmm9
+.byte 102,15,56,220,209
+ pxor %xmm15,%xmm11
+ psrad $31,%xmm14
+.byte 102,15,56,220,217
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movdqa %xmm13,48(%rsp)
+ pxor %xmm14,%xmm15
+.byte 102,15,56,220,241
+ movaps %xmm12,%xmm13
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,220,249
+ movups -32(%rcx),%xmm1
+
+ paddd %xmm9,%xmm9
+.byte 102,15,56,220,208
+ pxor %xmm15,%xmm12
+ psrad $31,%xmm14
+.byte 102,15,56,220,216
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+ pxor %xmm14,%xmm15
+ movaps %xmm13,%xmm14
+.byte 102,15,56,220,248
+
+ movdqa %xmm9,%xmm0
+ paddd %xmm9,%xmm9
+.byte 102,15,56,220,209
+ pxor %xmm15,%xmm13
+ psrad $31,%xmm0
+.byte 102,15,56,220,217
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm0
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ pxor %xmm0,%xmm15
+ movups (%rbp),%xmm0
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+ movups 16(%rbp),%xmm1
+
+ pxor %xmm15,%xmm14
+.byte 102,15,56,221,84,36,0
+ psrad $31,%xmm9
+ paddq %xmm15,%xmm15
+.byte 102,15,56,221,92,36,16
+.byte 102,15,56,221,100,36,32
+ pand %xmm8,%xmm9
+ movq %r10,%rax
+.byte 102,15,56,221,108,36,48
+.byte 102,15,56,221,116,36,64
+.byte 102,15,56,221,124,36,80
+ pxor %xmm9,%xmm15
+
+ leaq 96(%rsi),%rsi
+ movups %xmm2,-96(%rsi)
+ movups %xmm3,-80(%rsi)
+ movups %xmm4,-64(%rsi)
+ movups %xmm5,-48(%rsi)
+ movups %xmm6,-32(%rsi)
+ movups %xmm7,-16(%rsi)
+ subq $96,%rdx
+ jnc .Lxts_enc_grandloop
+
+ movl $16+96,%eax
+ subl %r10d,%eax
+ movq %rbp,%rcx
+ shrl $4,%eax
+
+.Lxts_enc_short:
+
+ movl %eax,%r10d
+ pxor %xmm0,%xmm10
+ addq $96,%rdx
+ jz .Lxts_enc_done
+
+ pxor %xmm0,%xmm11
+ cmpq $0x20,%rdx
+ jb .Lxts_enc_one
+ pxor %xmm0,%xmm12
+ je .Lxts_enc_two
+
+ pxor %xmm0,%xmm13
+ cmpq $0x40,%rdx
+ jb .Lxts_enc_three
+ pxor %xmm0,%xmm14
+ je .Lxts_enc_four
+
+ movdqu (%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ pxor %xmm10,%xmm2
+ movdqu 48(%rdi),%xmm5
+ pxor %xmm11,%xmm3
+ movdqu 64(%rdi),%xmm6
+ leaq 80(%rdi),%rdi
+ pxor %xmm12,%xmm4
+ pxor %xmm13,%xmm5
+ pxor %xmm14,%xmm6
+ pxor %xmm7,%xmm7
+
+ call _aesni_encrypt6
+
+ xorps %xmm10,%xmm2
+ movdqa %xmm15,%xmm10
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+ movdqu %xmm2,(%rsi)
+ xorps %xmm13,%xmm5
+ movdqu %xmm3,16(%rsi)
+ xorps %xmm14,%xmm6
+ movdqu %xmm4,32(%rsi)
+ movdqu %xmm5,48(%rsi)
+ movdqu %xmm6,64(%rsi)
+ leaq 80(%rsi),%rsi
+ jmp .Lxts_enc_done
+
+.align 16
+.Lxts_enc_one:
+ movups (%rdi),%xmm2
+ leaq 16(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_enc1_9:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_9
+.byte 102,15,56,221,209
+ xorps %xmm10,%xmm2
+ movdqa %xmm11,%xmm10
+ movups %xmm2,(%rsi)
+ leaq 16(%rsi),%rsi
+ jmp .Lxts_enc_done
+
+.align 16
+.Lxts_enc_two:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ leaq 32(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ xorps %xmm11,%xmm3
+
+ call _aesni_encrypt2
+
+ xorps %xmm10,%xmm2
+ movdqa %xmm12,%xmm10
+ xorps %xmm11,%xmm3
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ leaq 32(%rsi),%rsi
+ jmp .Lxts_enc_done
+
+.align 16
+.Lxts_enc_three:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ movups 32(%rdi),%xmm4
+ leaq 48(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+
+ call _aesni_encrypt3
+
+ xorps %xmm10,%xmm2
+ movdqa %xmm13,%xmm10
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ leaq 48(%rsi),%rsi
+ jmp .Lxts_enc_done
+
+.align 16
+.Lxts_enc_four:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ movups 32(%rdi),%xmm4
+ xorps %xmm10,%xmm2
+ movups 48(%rdi),%xmm5
+ leaq 64(%rdi),%rdi
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+ xorps %xmm13,%xmm5
+
+ call _aesni_encrypt4
+
+ pxor %xmm10,%xmm2
+ movdqa %xmm14,%xmm10
+ pxor %xmm11,%xmm3
+ pxor %xmm12,%xmm4
+ movdqu %xmm2,(%rsi)
+ pxor %xmm13,%xmm5
+ movdqu %xmm3,16(%rsi)
+ movdqu %xmm4,32(%rsi)
+ movdqu %xmm5,48(%rsi)
+ leaq 64(%rsi),%rsi
+ jmp .Lxts_enc_done
+
+.align 16
+.Lxts_enc_done:
+ andq $15,%r9
+ jz .Lxts_enc_ret
+ movq %r9,%rdx
+
+.Lxts_enc_steal:
+ movzbl (%rdi),%eax
+ movzbl -16(%rsi),%ecx
+ leaq 1(%rdi),%rdi
+ movb %al,-16(%rsi)
+ movb %cl,0(%rsi)
+ leaq 1(%rsi),%rsi
+ subq $1,%rdx
+ jnz .Lxts_enc_steal
+
+ subq %r9,%rsi
+ movq %rbp,%rcx
+ movl %r10d,%eax
+
+ movups -16(%rsi),%xmm2
+ xorps %xmm10,%xmm2
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_enc1_10:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_10
+.byte 102,15,56,221,209
+ xorps %xmm10,%xmm2
+ movups %xmm2,-16(%rsi)
+
+.Lxts_enc_ret:
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ movaps %xmm0,0(%rsp)
+ pxor %xmm8,%xmm8
+ movaps %xmm0,16(%rsp)
+ pxor %xmm9,%xmm9
+ movaps %xmm0,32(%rsp)
+ pxor %xmm10,%xmm10
+ movaps %xmm0,48(%rsp)
+ pxor %xmm11,%xmm11
+ movaps %xmm0,64(%rsp)
+ pxor %xmm12,%xmm12
+ movaps %xmm0,80(%rsp)
+ pxor %xmm13,%xmm13
+ movaps %xmm0,96(%rsp)
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
+.Lxts_enc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_xts_encrypt,.-aesni_xts_encrypt
+.globl aesni_xts_decrypt
+.type aesni_xts_decrypt,@function
+.align 16
+aesni_xts_decrypt:
+.cfi_startproc
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
+ pushq %rbp
+.cfi_offset %rbp,-16
+ subq $112,%rsp
+ andq $-16,%rsp
+ movups (%r9),%xmm2
+ movl 240(%r8),%eax
+ movl 240(%rcx),%r10d
+ movups (%r8),%xmm0
+ movups 16(%r8),%xmm1
+ leaq 32(%r8),%r8
+ xorps %xmm0,%xmm2
+.Loop_enc1_11:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%r8),%xmm1
+ leaq 16(%r8),%r8
+ jnz .Loop_enc1_11
+.byte 102,15,56,221,209
+ xorl %eax,%eax
+ testq $15,%rdx
+ setnz %al
+ shlq $4,%rax
+ subq %rax,%rdx
+
+ movups (%rcx),%xmm0
+ movq %rcx,%rbp
+ movl %r10d,%eax
+ shll $4,%r10d
+ movq %rdx,%r9
+ andq $-16,%rdx
+
+ movups 16(%rcx,%r10,1),%xmm1
+
+ movdqa .Lxts_magic(%rip),%xmm8
+ movdqa %xmm2,%xmm15
+ pshufd $0x5f,%xmm2,%xmm9
+ pxor %xmm0,%xmm1
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm10
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm10
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm11
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm11
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm12
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm12
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+ movdqa %xmm15,%xmm13
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+ pxor %xmm0,%xmm13
+ pxor %xmm14,%xmm15
+ movdqa %xmm15,%xmm14
+ psrad $31,%xmm9
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm9
+ pxor %xmm0,%xmm14
+ pxor %xmm9,%xmm15
+ movaps %xmm1,96(%rsp)
+
+ subq $96,%rdx
+ jc .Lxts_dec_short
+
+ movl $16+96,%eax
+ leaq 32(%rbp,%r10,1),%rcx
+ subq %r10,%rax
+ movups 16(%rbp),%xmm1
+ movq %rax,%r10
+ leaq .Lxts_magic(%rip),%r8
+ jmp .Lxts_dec_grandloop
+
+.align 32
+.Lxts_dec_grandloop:
+ movdqu 0(%rdi),%xmm2
+ movdqa %xmm0,%xmm8
+ movdqu 16(%rdi),%xmm3
+ pxor %xmm10,%xmm2
+ movdqu 32(%rdi),%xmm4
+ pxor %xmm11,%xmm3
+.byte 102,15,56,222,209
+ movdqu 48(%rdi),%xmm5
+ pxor %xmm12,%xmm4
+.byte 102,15,56,222,217
+ movdqu 64(%rdi),%xmm6
+ pxor %xmm13,%xmm5
+.byte 102,15,56,222,225
+ movdqu 80(%rdi),%xmm7
+ pxor %xmm15,%xmm8
+ movdqa 96(%rsp),%xmm9
+ pxor %xmm14,%xmm6
+.byte 102,15,56,222,233
+ movups 32(%rbp),%xmm0
+ leaq 96(%rdi),%rdi
+ pxor %xmm8,%xmm7
+
+ pxor %xmm9,%xmm10
+.byte 102,15,56,222,241
+ pxor %xmm9,%xmm11
+ movdqa %xmm10,0(%rsp)
+.byte 102,15,56,222,249
+ movups 48(%rbp),%xmm1
+ pxor %xmm9,%xmm12
+
+.byte 102,15,56,222,208
+ pxor %xmm9,%xmm13
+ movdqa %xmm11,16(%rsp)
+.byte 102,15,56,222,216
+ pxor %xmm9,%xmm14
+ movdqa %xmm12,32(%rsp)
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ pxor %xmm9,%xmm8
+ movdqa %xmm14,64(%rsp)
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+ movups 64(%rbp),%xmm0
+ movdqa %xmm8,80(%rsp)
+ pshufd $0x5f,%xmm15,%xmm9
+ jmp .Lxts_dec_loop6
+.align 32
+.Lxts_dec_loop6:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ movups -64(%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+ movups -80(%rcx,%rax,1),%xmm0
+ jnz .Lxts_dec_loop6
+
+ movdqa (%r8),%xmm8
+ movdqa %xmm9,%xmm14
+ paddd %xmm9,%xmm9
+.byte 102,15,56,222,209
+ paddq %xmm15,%xmm15
+ psrad $31,%xmm14
+.byte 102,15,56,222,217
+ pand %xmm8,%xmm14
+ movups (%rbp),%xmm10
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+ pxor %xmm14,%xmm15
+ movaps %xmm10,%xmm11
+.byte 102,15,56,222,249
+ movups -64(%rcx),%xmm1
+
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,222,208
+ paddd %xmm9,%xmm9
+ pxor %xmm15,%xmm10
+.byte 102,15,56,222,216
+ psrad $31,%xmm14
+ paddq %xmm15,%xmm15
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ pand %xmm8,%xmm14
+ movaps %xmm11,%xmm12
+.byte 102,15,56,222,240
+ pxor %xmm14,%xmm15
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,222,248
+ movups -48(%rcx),%xmm0
+
+ paddd %xmm9,%xmm9
+.byte 102,15,56,222,209
+ pxor %xmm15,%xmm11
+ psrad $31,%xmm14
+.byte 102,15,56,222,217
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movdqa %xmm13,48(%rsp)
+ pxor %xmm14,%xmm15
+.byte 102,15,56,222,241
+ movaps %xmm12,%xmm13
+ movdqa %xmm9,%xmm14
+.byte 102,15,56,222,249
+ movups -32(%rcx),%xmm1
+
+ paddd %xmm9,%xmm9
+.byte 102,15,56,222,208
+ pxor %xmm15,%xmm12
+ psrad $31,%xmm14
+.byte 102,15,56,222,216
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm14
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+ pxor %xmm14,%xmm15
+ movaps %xmm13,%xmm14
+.byte 102,15,56,222,248
+
+ movdqa %xmm9,%xmm0
+ paddd %xmm9,%xmm9
+.byte 102,15,56,222,209
+ pxor %xmm15,%xmm13
+ psrad $31,%xmm0
+.byte 102,15,56,222,217
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm0
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ pxor %xmm0,%xmm15
+ movups (%rbp),%xmm0
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ movups 16(%rbp),%xmm1
+
+ pxor %xmm15,%xmm14
+.byte 102,15,56,223,84,36,0
+ psrad $31,%xmm9
+ paddq %xmm15,%xmm15
+.byte 102,15,56,223,92,36,16
+.byte 102,15,56,223,100,36,32
+ pand %xmm8,%xmm9
+ movq %r10,%rax
+.byte 102,15,56,223,108,36,48
+.byte 102,15,56,223,116,36,64
+.byte 102,15,56,223,124,36,80
+ pxor %xmm9,%xmm15
+
+ leaq 96(%rsi),%rsi
+ movups %xmm2,-96(%rsi)
+ movups %xmm3,-80(%rsi)
+ movups %xmm4,-64(%rsi)
+ movups %xmm5,-48(%rsi)
+ movups %xmm6,-32(%rsi)
+ movups %xmm7,-16(%rsi)
+ subq $96,%rdx
+ jnc .Lxts_dec_grandloop
+
+ movl $16+96,%eax
+ subl %r10d,%eax
+ movq %rbp,%rcx
+ shrl $4,%eax
+
+.Lxts_dec_short:
+
+ movl %eax,%r10d
+ pxor %xmm0,%xmm10
+ pxor %xmm0,%xmm11
+ addq $96,%rdx
+ jz .Lxts_dec_done
+
+ pxor %xmm0,%xmm12
+ cmpq $0x20,%rdx
+ jb .Lxts_dec_one
+ pxor %xmm0,%xmm13
+ je .Lxts_dec_two
+
+ pxor %xmm0,%xmm14
+ cmpq $0x40,%rdx
+ jb .Lxts_dec_three
+ je .Lxts_dec_four
+
+ movdqu (%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ pxor %xmm10,%xmm2
+ movdqu 48(%rdi),%xmm5
+ pxor %xmm11,%xmm3
+ movdqu 64(%rdi),%xmm6
+ leaq 80(%rdi),%rdi
+ pxor %xmm12,%xmm4
+ pxor %xmm13,%xmm5
+ pxor %xmm14,%xmm6
+
+ call _aesni_decrypt6
+
+ xorps %xmm10,%xmm2
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+ movdqu %xmm2,(%rsi)
+ xorps %xmm13,%xmm5
+ movdqu %xmm3,16(%rsi)
+ xorps %xmm14,%xmm6
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm14,%xmm14
+ movdqu %xmm5,48(%rsi)
+ pcmpgtd %xmm15,%xmm14
+ movdqu %xmm6,64(%rsi)
+ leaq 80(%rsi),%rsi
+ pshufd $0x13,%xmm14,%xmm11
+ andq $15,%r9
+ jz .Lxts_dec_ret
+
+ movdqa %xmm15,%xmm10
+ paddq %xmm15,%xmm15
+ pand %xmm8,%xmm11
+ pxor %xmm15,%xmm11
+ jmp .Lxts_dec_done2
+
+.align 16
+.Lxts_dec_one:
+ movups (%rdi),%xmm2
+ leaq 16(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_12:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_12
+.byte 102,15,56,223,209
+ xorps %xmm10,%xmm2
+ movdqa %xmm11,%xmm10
+ movups %xmm2,(%rsi)
+ movdqa %xmm12,%xmm11
+ leaq 16(%rsi),%rsi
+ jmp .Lxts_dec_done
+
+.align 16
+.Lxts_dec_two:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ leaq 32(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ xorps %xmm11,%xmm3
+
+ call _aesni_decrypt2
+
+ xorps %xmm10,%xmm2
+ movdqa %xmm12,%xmm10
+ xorps %xmm11,%xmm3
+ movdqa %xmm13,%xmm11
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ leaq 32(%rsi),%rsi
+ jmp .Lxts_dec_done
+
+.align 16
+.Lxts_dec_three:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ movups 32(%rdi),%xmm4
+ leaq 48(%rdi),%rdi
+ xorps %xmm10,%xmm2
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+
+ call _aesni_decrypt3
+
+ xorps %xmm10,%xmm2
+ movdqa %xmm13,%xmm10
+ xorps %xmm11,%xmm3
+ movdqa %xmm14,%xmm11
+ xorps %xmm12,%xmm4
+ movups %xmm2,(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ leaq 48(%rsi),%rsi
+ jmp .Lxts_dec_done
+
+.align 16
+.Lxts_dec_four:
+ movups (%rdi),%xmm2
+ movups 16(%rdi),%xmm3
+ movups 32(%rdi),%xmm4
+ xorps %xmm10,%xmm2
+ movups 48(%rdi),%xmm5
+ leaq 64(%rdi),%rdi
+ xorps %xmm11,%xmm3
+ xorps %xmm12,%xmm4
+ xorps %xmm13,%xmm5
+
+ call _aesni_decrypt4
+
+ pxor %xmm10,%xmm2
+ movdqa %xmm14,%xmm10
+ pxor %xmm11,%xmm3
+ movdqa %xmm15,%xmm11
+ pxor %xmm12,%xmm4
+ movdqu %xmm2,(%rsi)
+ pxor %xmm13,%xmm5
+ movdqu %xmm3,16(%rsi)
+ movdqu %xmm4,32(%rsi)
+ movdqu %xmm5,48(%rsi)
+ leaq 64(%rsi),%rsi
+ jmp .Lxts_dec_done
+
+.align 16
+.Lxts_dec_done:
+ andq $15,%r9
+ jz .Lxts_dec_ret
+.Lxts_dec_done2:
+ movq %r9,%rdx
+ movq %rbp,%rcx
+ movl %r10d,%eax
+
+ movups (%rdi),%xmm2
+ xorps %xmm11,%xmm2
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_13:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_13
+.byte 102,15,56,223,209
+ xorps %xmm11,%xmm2
+ movups %xmm2,(%rsi)
+
+.Lxts_dec_steal:
+ movzbl 16(%rdi),%eax
+ movzbl (%rsi),%ecx
+ leaq 1(%rdi),%rdi
+ movb %al,(%rsi)
+ movb %cl,16(%rsi)
+ leaq 1(%rsi),%rsi
+ subq $1,%rdx
+ jnz .Lxts_dec_steal
+
+ subq %r9,%rsi
+ movq %rbp,%rcx
+ movl %r10d,%eax
+
+ movups (%rsi),%xmm2
+ xorps %xmm10,%xmm2
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_14:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_14
+.byte 102,15,56,223,209
+ xorps %xmm10,%xmm2
+ movups %xmm2,(%rsi)
+
+.Lxts_dec_ret:
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ movaps %xmm0,0(%rsp)
+ pxor %xmm8,%xmm8
+ movaps %xmm0,16(%rsp)
+ pxor %xmm9,%xmm9
+ movaps %xmm0,32(%rsp)
+ pxor %xmm10,%xmm10
+ movaps %xmm0,48(%rsp)
+ pxor %xmm11,%xmm11
+ movaps %xmm0,64(%rsp)
+ pxor %xmm12,%xmm12
+ movaps %xmm0,80(%rsp)
+ pxor %xmm13,%xmm13
+ movaps %xmm0,96(%rsp)
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
+.Lxts_dec_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_xts_decrypt,.-aesni_xts_decrypt
+.globl aesni_ocb_encrypt
+.type aesni_ocb_encrypt,@function
+.align 32
+aesni_ocb_encrypt:
+.cfi_startproc
+ leaq (%rsp),%rax
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ movq 8(%rax),%rbx
+ movq 8+8(%rax),%rbp
+
+ movl 240(%rcx),%r10d
+ movq %rcx,%r11
+ shll $4,%r10d
+ movups (%rcx),%xmm9
+ movups 16(%rcx,%r10,1),%xmm1
+
+ movdqu (%r9),%xmm15
+ pxor %xmm1,%xmm9
+ pxor %xmm1,%xmm15
+
+ movl $16+32,%eax
+ leaq 32(%r11,%r10,1),%rcx
+ movups 16(%r11),%xmm1
+ subq %r10,%rax
+ movq %rax,%r10
+
+ movdqu (%rbx),%xmm10
+ movdqu (%rbp),%xmm8
+
+ testq $1,%r8
+ jnz .Locb_enc_odd
+
+ bsfq %r8,%r12
+ addq $1,%r8
+ shlq $4,%r12
+ movdqu (%rbx,%r12,1),%xmm7
+ movdqu (%rdi),%xmm2
+ leaq 16(%rdi),%rdi
+
+ call __ocb_encrypt1
+
+ movdqa %xmm7,%xmm15
+ movups %xmm2,(%rsi)
+ leaq 16(%rsi),%rsi
+ subq $1,%rdx
+ jz .Locb_enc_done
+
+.Locb_enc_odd:
+ leaq 1(%r8),%r12
+ leaq 3(%r8),%r13
+ leaq 5(%r8),%r14
+ leaq 6(%r8),%r8
+ bsfq %r12,%r12
+ bsfq %r13,%r13
+ bsfq %r14,%r14
+ shlq $4,%r12
+ shlq $4,%r13
+ shlq $4,%r14
+
+ subq $6,%rdx
+ jc .Locb_enc_short
+ jmp .Locb_enc_grandloop
+
+.align 32
+.Locb_enc_grandloop:
+ movdqu 0(%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ movdqu 48(%rdi),%xmm5
+ movdqu 64(%rdi),%xmm6
+ movdqu 80(%rdi),%xmm7
+ leaq 96(%rdi),%rdi
+
+ call __ocb_encrypt6
+
+ movups %xmm2,0(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+ movups %xmm7,80(%rsi)
+ leaq 96(%rsi),%rsi
+ subq $6,%rdx
+ jnc .Locb_enc_grandloop
+
+.Locb_enc_short:
+ addq $6,%rdx
+ jz .Locb_enc_done
+
+ movdqu 0(%rdi),%xmm2
+ cmpq $2,%rdx
+ jb .Locb_enc_one
+ movdqu 16(%rdi),%xmm3
+ je .Locb_enc_two
+
+ movdqu 32(%rdi),%xmm4
+ cmpq $4,%rdx
+ jb .Locb_enc_three
+ movdqu 48(%rdi),%xmm5
+ je .Locb_enc_four
+
+ movdqu 64(%rdi),%xmm6
+ pxor %xmm7,%xmm7
+
+ call __ocb_encrypt6
+
+ movdqa %xmm14,%xmm15
+ movups %xmm2,0(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+ movups %xmm6,64(%rsi)
+
+ jmp .Locb_enc_done
+
+.align 16
+.Locb_enc_one:
+ movdqa %xmm10,%xmm7
+
+ call __ocb_encrypt1
+
+ movdqa %xmm7,%xmm15
+ movups %xmm2,0(%rsi)
+ jmp .Locb_enc_done
+
+.align 16
+.Locb_enc_two:
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+
+ call __ocb_encrypt4
+
+ movdqa %xmm11,%xmm15
+ movups %xmm2,0(%rsi)
+ movups %xmm3,16(%rsi)
+
+ jmp .Locb_enc_done
+
+.align 16
+.Locb_enc_three:
+ pxor %xmm5,%xmm5
+
+ call __ocb_encrypt4
+
+ movdqa %xmm12,%xmm15
+ movups %xmm2,0(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+
+ jmp .Locb_enc_done
+
+.align 16
+.Locb_enc_four:
+ call __ocb_encrypt4
+
+ movdqa %xmm13,%xmm15
+ movups %xmm2,0(%rsi)
+ movups %xmm3,16(%rsi)
+ movups %xmm4,32(%rsi)
+ movups %xmm5,48(%rsi)
+
+.Locb_enc_done:
+ pxor %xmm0,%xmm15
+ movdqu %xmm8,(%rbp)
+ movdqu %xmm15,(%r9)
+
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ leaq 40(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Locb_enc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ocb_encrypt,.-aesni_ocb_encrypt
+
+.type __ocb_encrypt6,@function
+.align 32
+__ocb_encrypt6:
+.cfi_startproc
+ pxor %xmm9,%xmm15
+ movdqu (%rbx,%r12,1),%xmm11
+ movdqa %xmm10,%xmm12
+ movdqu (%rbx,%r13,1),%xmm13
+ movdqa %xmm10,%xmm14
+ pxor %xmm15,%xmm10
+ movdqu (%rbx,%r14,1),%xmm15
+ pxor %xmm10,%xmm11
+ pxor %xmm2,%xmm8
+ pxor %xmm10,%xmm2
+ pxor %xmm11,%xmm12
+ pxor %xmm3,%xmm8
+ pxor %xmm11,%xmm3
+ pxor %xmm12,%xmm13
+ pxor %xmm4,%xmm8
+ pxor %xmm12,%xmm4
+ pxor %xmm13,%xmm14
+ pxor %xmm5,%xmm8
+ pxor %xmm13,%xmm5
+ pxor %xmm14,%xmm15
+ pxor %xmm6,%xmm8
+ pxor %xmm14,%xmm6
+ pxor %xmm7,%xmm8
+ pxor %xmm15,%xmm7
+ movups 32(%r11),%xmm0
+
+ leaq 1(%r8),%r12
+ leaq 3(%r8),%r13
+ leaq 5(%r8),%r14
+ addq $6,%r8
+ pxor %xmm9,%xmm10
+ bsfq %r12,%r12
+ bsfq %r13,%r13
+ bsfq %r14,%r14
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ pxor %xmm9,%xmm11
+ pxor %xmm9,%xmm12
+.byte 102,15,56,220,241
+ pxor %xmm9,%xmm13
+ pxor %xmm9,%xmm14
+.byte 102,15,56,220,249
+ movups 48(%r11),%xmm1
+ pxor %xmm9,%xmm15
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups 64(%r11),%xmm0
+ shlq $4,%r12
+ shlq $4,%r13
+ jmp .Locb_enc_loop6
+
+.align 32
+.Locb_enc_loop6:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+.byte 102,15,56,220,240
+.byte 102,15,56,220,248
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_enc_loop6
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+.byte 102,15,56,220,241
+.byte 102,15,56,220,249
+ movups 16(%r11),%xmm1
+ shlq $4,%r14
+
+.byte 102,65,15,56,221,210
+ movdqu (%rbx),%xmm10
+ movq %r10,%rax
+.byte 102,65,15,56,221,219
+.byte 102,65,15,56,221,228
+.byte 102,65,15,56,221,237
+.byte 102,65,15,56,221,246
+.byte 102,65,15,56,221,255
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_encrypt6,.-__ocb_encrypt6
+
+.type __ocb_encrypt4,@function
+.align 32
+__ocb_encrypt4:
+.cfi_startproc
+ pxor %xmm9,%xmm15
+ movdqu (%rbx,%r12,1),%xmm11
+ movdqa %xmm10,%xmm12
+ movdqu (%rbx,%r13,1),%xmm13
+ pxor %xmm15,%xmm10
+ pxor %xmm10,%xmm11
+ pxor %xmm2,%xmm8
+ pxor %xmm10,%xmm2
+ pxor %xmm11,%xmm12
+ pxor %xmm3,%xmm8
+ pxor %xmm11,%xmm3
+ pxor %xmm12,%xmm13
+ pxor %xmm4,%xmm8
+ pxor %xmm12,%xmm4
+ pxor %xmm5,%xmm8
+ pxor %xmm13,%xmm5
+ movups 32(%r11),%xmm0
+
+ pxor %xmm9,%xmm10
+ pxor %xmm9,%xmm11
+ pxor %xmm9,%xmm12
+ pxor %xmm9,%xmm13
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 48(%r11),%xmm1
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups 64(%r11),%xmm0
+ jmp .Locb_enc_loop4
+
+.align 32
+.Locb_enc_loop4:
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,220,208
+.byte 102,15,56,220,216
+.byte 102,15,56,220,224
+.byte 102,15,56,220,232
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_enc_loop4
+
+.byte 102,15,56,220,209
+.byte 102,15,56,220,217
+.byte 102,15,56,220,225
+.byte 102,15,56,220,233
+ movups 16(%r11),%xmm1
+ movq %r10,%rax
+
+.byte 102,65,15,56,221,210
+.byte 102,65,15,56,221,219
+.byte 102,65,15,56,221,228
+.byte 102,65,15,56,221,237
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_encrypt4,.-__ocb_encrypt4
+
+.type __ocb_encrypt1,@function
+.align 32
+__ocb_encrypt1:
+.cfi_startproc
+ pxor %xmm15,%xmm7
+ pxor %xmm9,%xmm7
+ pxor %xmm2,%xmm8
+ pxor %xmm7,%xmm2
+ movups 32(%r11),%xmm0
+
+.byte 102,15,56,220,209
+ movups 48(%r11),%xmm1
+ pxor %xmm9,%xmm7
+
+.byte 102,15,56,220,208
+ movups 64(%r11),%xmm0
+ jmp .Locb_enc_loop1
+
+.align 32
+.Locb_enc_loop1:
+.byte 102,15,56,220,209
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,220,208
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_enc_loop1
+
+.byte 102,15,56,220,209
+ movups 16(%r11),%xmm1
+ movq %r10,%rax
+
+.byte 102,15,56,221,215
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_encrypt1,.-__ocb_encrypt1
+
+.globl aesni_ocb_decrypt
+.type aesni_ocb_decrypt,@function
+.align 32
+aesni_ocb_decrypt:
+.cfi_startproc
+ leaq (%rsp),%rax
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ movq 8(%rax),%rbx
+ movq 8+8(%rax),%rbp
+
+ movl 240(%rcx),%r10d
+ movq %rcx,%r11
+ shll $4,%r10d
+ movups (%rcx),%xmm9
+ movups 16(%rcx,%r10,1),%xmm1
+
+ movdqu (%r9),%xmm15
+ pxor %xmm1,%xmm9
+ pxor %xmm1,%xmm15
+
+ movl $16+32,%eax
+ leaq 32(%r11,%r10,1),%rcx
+ movups 16(%r11),%xmm1
+ subq %r10,%rax
+ movq %rax,%r10
+
+ movdqu (%rbx),%xmm10
+ movdqu (%rbp),%xmm8
+
+ testq $1,%r8
+ jnz .Locb_dec_odd
+
+ bsfq %r8,%r12
+ addq $1,%r8
+ shlq $4,%r12
+ movdqu (%rbx,%r12,1),%xmm7
+ movdqu (%rdi),%xmm2
+ leaq 16(%rdi),%rdi
+
+ call __ocb_decrypt1
+
+ movdqa %xmm7,%xmm15
+ movups %xmm2,(%rsi)
+ xorps %xmm2,%xmm8
+ leaq 16(%rsi),%rsi
+ subq $1,%rdx
+ jz .Locb_dec_done
+
+.Locb_dec_odd:
+ leaq 1(%r8),%r12
+ leaq 3(%r8),%r13
+ leaq 5(%r8),%r14
+ leaq 6(%r8),%r8
+ bsfq %r12,%r12
+ bsfq %r13,%r13
+ bsfq %r14,%r14
+ shlq $4,%r12
+ shlq $4,%r13
+ shlq $4,%r14
+
+ subq $6,%rdx
+ jc .Locb_dec_short
+ jmp .Locb_dec_grandloop
+
+.align 32
+.Locb_dec_grandloop:
+ movdqu 0(%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqu 32(%rdi),%xmm4
+ movdqu 48(%rdi),%xmm5
+ movdqu 64(%rdi),%xmm6
+ movdqu 80(%rdi),%xmm7
+ leaq 96(%rdi),%rdi
+
+ call __ocb_decrypt6
+
+ movups %xmm2,0(%rsi)
+ pxor %xmm2,%xmm8
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm8
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm8
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm8
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm8
+ movups %xmm7,80(%rsi)
+ pxor %xmm7,%xmm8
+ leaq 96(%rsi),%rsi
+ subq $6,%rdx
+ jnc .Locb_dec_grandloop
+
+.Locb_dec_short:
+ addq $6,%rdx
+ jz .Locb_dec_done
+
+ movdqu 0(%rdi),%xmm2
+ cmpq $2,%rdx
+ jb .Locb_dec_one
+ movdqu 16(%rdi),%xmm3
+ je .Locb_dec_two
+
+ movdqu 32(%rdi),%xmm4
+ cmpq $4,%rdx
+ jb .Locb_dec_three
+ movdqu 48(%rdi),%xmm5
+ je .Locb_dec_four
+
+ movdqu 64(%rdi),%xmm6
+ pxor %xmm7,%xmm7
+
+ call __ocb_decrypt6
+
+ movdqa %xmm14,%xmm15
+ movups %xmm2,0(%rsi)
+ pxor %xmm2,%xmm8
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm8
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm8
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm8
+ movups %xmm6,64(%rsi)
+ pxor %xmm6,%xmm8
+
+ jmp .Locb_dec_done
+
+.align 16
+.Locb_dec_one:
+ movdqa %xmm10,%xmm7
+
+ call __ocb_decrypt1
+
+ movdqa %xmm7,%xmm15
+ movups %xmm2,0(%rsi)
+ xorps %xmm2,%xmm8
+ jmp .Locb_dec_done
+
+.align 16
+.Locb_dec_two:
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+
+ call __ocb_decrypt4
+
+ movdqa %xmm11,%xmm15
+ movups %xmm2,0(%rsi)
+ xorps %xmm2,%xmm8
+ movups %xmm3,16(%rsi)
+ xorps %xmm3,%xmm8
+
+ jmp .Locb_dec_done
+
+.align 16
+.Locb_dec_three:
+ pxor %xmm5,%xmm5
+
+ call __ocb_decrypt4
+
+ movdqa %xmm12,%xmm15
+ movups %xmm2,0(%rsi)
+ xorps %xmm2,%xmm8
+ movups %xmm3,16(%rsi)
+ xorps %xmm3,%xmm8
+ movups %xmm4,32(%rsi)
+ xorps %xmm4,%xmm8
+
+ jmp .Locb_dec_done
+
+.align 16
+.Locb_dec_four:
+ call __ocb_decrypt4
+
+ movdqa %xmm13,%xmm15
+ movups %xmm2,0(%rsi)
+ pxor %xmm2,%xmm8
+ movups %xmm3,16(%rsi)
+ pxor %xmm3,%xmm8
+ movups %xmm4,32(%rsi)
+ pxor %xmm4,%xmm8
+ movups %xmm5,48(%rsi)
+ pxor %xmm5,%xmm8
+
+.Locb_dec_done:
+ pxor %xmm0,%xmm15
+ movdqu %xmm8,(%rbp)
+ movdqu %xmm15,(%r9)
+
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ leaq 40(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Locb_dec_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_ocb_decrypt,.-aesni_ocb_decrypt
+
+.type __ocb_decrypt6,@function
+.align 32
+__ocb_decrypt6:
+.cfi_startproc
+ pxor %xmm9,%xmm15
+ movdqu (%rbx,%r12,1),%xmm11
+ movdqa %xmm10,%xmm12
+ movdqu (%rbx,%r13,1),%xmm13
+ movdqa %xmm10,%xmm14
+ pxor %xmm15,%xmm10
+ movdqu (%rbx,%r14,1),%xmm15
+ pxor %xmm10,%xmm11
+ pxor %xmm10,%xmm2
+ pxor %xmm11,%xmm12
+ pxor %xmm11,%xmm3
+ pxor %xmm12,%xmm13
+ pxor %xmm12,%xmm4
+ pxor %xmm13,%xmm14
+ pxor %xmm13,%xmm5
+ pxor %xmm14,%xmm15
+ pxor %xmm14,%xmm6
+ pxor %xmm15,%xmm7
+ movups 32(%r11),%xmm0
+
+ leaq 1(%r8),%r12
+ leaq 3(%r8),%r13
+ leaq 5(%r8),%r14
+ addq $6,%r8
+ pxor %xmm9,%xmm10
+ bsfq %r12,%r12
+ bsfq %r13,%r13
+ bsfq %r14,%r14
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ pxor %xmm9,%xmm11
+ pxor %xmm9,%xmm12
+.byte 102,15,56,222,241
+ pxor %xmm9,%xmm13
+ pxor %xmm9,%xmm14
+.byte 102,15,56,222,249
+ movups 48(%r11),%xmm1
+ pxor %xmm9,%xmm15
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+ movups 64(%r11),%xmm0
+ shlq $4,%r12
+ shlq $4,%r13
+ jmp .Locb_dec_loop6
+
+.align 32
+.Locb_dec_loop6:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_dec_loop6
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ movups 16(%r11),%xmm1
+ shlq $4,%r14
+
+.byte 102,65,15,56,223,210
+ movdqu (%rbx),%xmm10
+ movq %r10,%rax
+.byte 102,65,15,56,223,219
+.byte 102,65,15,56,223,228
+.byte 102,65,15,56,223,237
+.byte 102,65,15,56,223,246
+.byte 102,65,15,56,223,255
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_decrypt6,.-__ocb_decrypt6
+
+.type __ocb_decrypt4,@function
+.align 32
+__ocb_decrypt4:
+.cfi_startproc
+ pxor %xmm9,%xmm15
+ movdqu (%rbx,%r12,1),%xmm11
+ movdqa %xmm10,%xmm12
+ movdqu (%rbx,%r13,1),%xmm13
+ pxor %xmm15,%xmm10
+ pxor %xmm10,%xmm11
+ pxor %xmm10,%xmm2
+ pxor %xmm11,%xmm12
+ pxor %xmm11,%xmm3
+ pxor %xmm12,%xmm13
+ pxor %xmm12,%xmm4
+ pxor %xmm13,%xmm5
+ movups 32(%r11),%xmm0
+
+ pxor %xmm9,%xmm10
+ pxor %xmm9,%xmm11
+ pxor %xmm9,%xmm12
+ pxor %xmm9,%xmm13
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 48(%r11),%xmm1
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups 64(%r11),%xmm0
+ jmp .Locb_dec_loop4
+
+.align 32
+.Locb_dec_loop4:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_dec_loop4
+
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ movups 16(%r11),%xmm1
+ movq %r10,%rax
+
+.byte 102,65,15,56,223,210
+.byte 102,65,15,56,223,219
+.byte 102,65,15,56,223,228
+.byte 102,65,15,56,223,237
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_decrypt4,.-__ocb_decrypt4
+
+.type __ocb_decrypt1,@function
+.align 32
+__ocb_decrypt1:
+.cfi_startproc
+ pxor %xmm15,%xmm7
+ pxor %xmm9,%xmm7
+ pxor %xmm7,%xmm2
+ movups 32(%r11),%xmm0
+
+.byte 102,15,56,222,209
+ movups 48(%r11),%xmm1
+ pxor %xmm9,%xmm7
+
+.byte 102,15,56,222,208
+ movups 64(%r11),%xmm0
+ jmp .Locb_dec_loop1
+
+.align 32
+.Locb_dec_loop1:
+.byte 102,15,56,222,209
+ movups (%rcx,%rax,1),%xmm1
+ addq $32,%rax
+
+.byte 102,15,56,222,208
+ movups -16(%rcx,%rax,1),%xmm0
+ jnz .Locb_dec_loop1
+
+.byte 102,15,56,222,209
+ movups 16(%r11),%xmm1
+ movq %r10,%rax
+
+.byte 102,15,56,223,215
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size __ocb_decrypt1,.-__ocb_decrypt1
+.globl aesni_cbc_encrypt
+.type aesni_cbc_encrypt,@function
+.align 16
+aesni_cbc_encrypt:
+.cfi_startproc
+ testq %rdx,%rdx
+ jz .Lcbc_ret
+
+ movl 240(%rcx),%r10d
+ movq %rcx,%r11
+ testl %r9d,%r9d
+ jz .Lcbc_decrypt
+
+ movups (%r8),%xmm2
+ movl %r10d,%eax
+ cmpq $16,%rdx
+ jb .Lcbc_enc_tail
+ subq $16,%rdx
+ jmp .Lcbc_enc_loop
+.align 16
+.Lcbc_enc_loop:
+ movups (%rdi),%xmm3
+ leaq 16(%rdi),%rdi
+
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ xorps %xmm0,%xmm3
+ leaq 32(%rcx),%rcx
+ xorps %xmm3,%xmm2
+.Loop_enc1_15:
+.byte 102,15,56,220,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_enc1_15
+.byte 102,15,56,221,209
+ movl %r10d,%eax
+ movq %r11,%rcx
+ movups %xmm2,0(%rsi)
+ leaq 16(%rsi),%rsi
+ subq $16,%rdx
+ jnc .Lcbc_enc_loop
+ addq $16,%rdx
+ jnz .Lcbc_enc_tail
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ movups %xmm2,(%r8)
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ jmp .Lcbc_ret
+
+.Lcbc_enc_tail:
+ movq %rdx,%rcx
+ xchgq %rdi,%rsi
+.long 0x9066A4F3
+ movl $16,%ecx
+ subq %rdx,%rcx
+ xorl %eax,%eax
+.long 0x9066AAF3
+ leaq -16(%rdi),%rdi
+ movl %r10d,%eax
+ movq %rdi,%rsi
+ movq %r11,%rcx
+ xorq %rdx,%rdx
+ jmp .Lcbc_enc_loop
+
+.align 16
+.Lcbc_decrypt:
+ cmpq $16,%rdx
+ jne .Lcbc_decrypt_bulk
+
+
+
+ movdqu (%rdi),%xmm2
+ movdqu (%r8),%xmm3
+ movdqa %xmm2,%xmm4
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_16:
+.byte 102,15,56,222,209
+ decl %r10d
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_16
+.byte 102,15,56,223,209
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ movdqu %xmm4,(%r8)
+ xorps %xmm3,%xmm2
+ pxor %xmm3,%xmm3
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ jmp .Lcbc_ret
+.align 16
+.Lcbc_decrypt_bulk:
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
+ pushq %rbp
+.cfi_offset %rbp,-16
+ subq $16,%rsp
+ andq $-16,%rsp
+ movq %rcx,%rbp
+ movups (%r8),%xmm10
+ movl %r10d,%eax
+ cmpq $0x50,%rdx
+ jbe .Lcbc_dec_tail
+
+ movups (%rcx),%xmm0
+ movdqu 0(%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqa %xmm2,%xmm11
+ movdqu 32(%rdi),%xmm4
+ movdqa %xmm3,%xmm12
+ movdqu 48(%rdi),%xmm5
+ movdqa %xmm4,%xmm13
+ movdqu 64(%rdi),%xmm6
+ movdqa %xmm5,%xmm14
+ movdqu 80(%rdi),%xmm7
+ movdqa %xmm6,%xmm15
+ movl OPENSSL_ia32cap_P+4(%rip),%r9d
+ cmpq $0x70,%rdx
+ jbe .Lcbc_dec_six_or_seven
+
+ andl $71303168,%r9d
+ subq $0x50,%rdx
+ cmpl $4194304,%r9d
+ je .Lcbc_dec_loop6_enter
+ subq $0x20,%rdx
+ leaq 112(%rcx),%rcx
+ jmp .Lcbc_dec_loop8_enter
+.align 16
+.Lcbc_dec_loop8:
+ movups %xmm9,(%rsi)
+ leaq 16(%rsi),%rsi
+.Lcbc_dec_loop8_enter:
+ movdqu 96(%rdi),%xmm8
+ pxor %xmm0,%xmm2
+ movdqu 112(%rdi),%xmm9
+ pxor %xmm0,%xmm3
+ movups 16-112(%rcx),%xmm1
+ pxor %xmm0,%xmm4
+ movq $-1,%rbp
+ cmpq $0x70,%rdx
+ pxor %xmm0,%xmm5
+ pxor %xmm0,%xmm6
+ pxor %xmm0,%xmm7
+ pxor %xmm0,%xmm8
+
+.byte 102,15,56,222,209
+ pxor %xmm0,%xmm9
+ movups 32-112(%rcx),%xmm0
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+ adcq $0,%rbp
+ andq $128,%rbp
+.byte 102,68,15,56,222,201
+ addq %rdi,%rbp
+ movups 48-112(%rcx),%xmm1
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 64-112(%rcx),%xmm0
+ nop
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movups 80-112(%rcx),%xmm1
+ nop
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 96-112(%rcx),%xmm0
+ nop
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movups 112-112(%rcx),%xmm1
+ nop
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 128-112(%rcx),%xmm0
+ nop
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movups 144-112(%rcx),%xmm1
+ cmpl $11,%eax
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 160-112(%rcx),%xmm0
+ jb .Lcbc_dec_done
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movups 176-112(%rcx),%xmm1
+ nop
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 192-112(%rcx),%xmm0
+ je .Lcbc_dec_done
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movups 208-112(%rcx),%xmm1
+ nop
+.byte 102,15,56,222,208
+.byte 102,15,56,222,216
+.byte 102,15,56,222,224
+.byte 102,15,56,222,232
+.byte 102,15,56,222,240
+.byte 102,15,56,222,248
+.byte 102,68,15,56,222,192
+.byte 102,68,15,56,222,200
+ movups 224-112(%rcx),%xmm0
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_done:
+.byte 102,15,56,222,209
+.byte 102,15,56,222,217
+ pxor %xmm0,%xmm10
+ pxor %xmm0,%xmm11
+.byte 102,15,56,222,225
+.byte 102,15,56,222,233
+ pxor %xmm0,%xmm12
+ pxor %xmm0,%xmm13
+.byte 102,15,56,222,241
+.byte 102,15,56,222,249
+ pxor %xmm0,%xmm14
+ pxor %xmm0,%xmm15
+.byte 102,68,15,56,222,193
+.byte 102,68,15,56,222,201
+ movdqu 80(%rdi),%xmm1
+
+.byte 102,65,15,56,223,210
+ movdqu 96(%rdi),%xmm10
+ pxor %xmm0,%xmm1
+.byte 102,65,15,56,223,219
+ pxor %xmm0,%xmm10
+ movdqu 112(%rdi),%xmm0
+.byte 102,65,15,56,223,228
+ leaq 128(%rdi),%rdi
+ movdqu 0(%rbp),%xmm11
+.byte 102,65,15,56,223,237
+.byte 102,65,15,56,223,246
+ movdqu 16(%rbp),%xmm12
+ movdqu 32(%rbp),%xmm13
+.byte 102,65,15,56,223,255
+.byte 102,68,15,56,223,193
+ movdqu 48(%rbp),%xmm14
+ movdqu 64(%rbp),%xmm15
+.byte 102,69,15,56,223,202
+ movdqa %xmm0,%xmm10
+ movdqu 80(%rbp),%xmm1
+ movups -112(%rcx),%xmm0
+
+ movups %xmm2,(%rsi)
+ movdqa %xmm11,%xmm2
+ movups %xmm3,16(%rsi)
+ movdqa %xmm12,%xmm3
+ movups %xmm4,32(%rsi)
+ movdqa %xmm13,%xmm4
+ movups %xmm5,48(%rsi)
+ movdqa %xmm14,%xmm5
+ movups %xmm6,64(%rsi)
+ movdqa %xmm15,%xmm6
+ movups %xmm7,80(%rsi)
+ movdqa %xmm1,%xmm7
+ movups %xmm8,96(%rsi)
+ leaq 112(%rsi),%rsi
+
+ subq $0x80,%rdx
+ ja .Lcbc_dec_loop8
+
+ movaps %xmm9,%xmm2
+ leaq -112(%rcx),%rcx
+ addq $0x70,%rdx
+ jle .Lcbc_dec_clear_tail_collected
+ movups %xmm9,(%rsi)
+ leaq 16(%rsi),%rsi
+ cmpq $0x50,%rdx
+ jbe .Lcbc_dec_tail
+
+ movaps %xmm11,%xmm2
+.Lcbc_dec_six_or_seven:
+ cmpq $0x60,%rdx
+ ja .Lcbc_dec_seven
+
+ movaps %xmm7,%xmm8
+ call _aesni_decrypt6
+ pxor %xmm10,%xmm2
+ movaps %xmm8,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ pxor %xmm14,%xmm6
+ movdqu %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ pxor %xmm15,%xmm7
+ movdqu %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ leaq 80(%rsi),%rsi
+ movdqa %xmm7,%xmm2
+ pxor %xmm7,%xmm7
+ jmp .Lcbc_dec_tail_collected
+
+.align 16
+.Lcbc_dec_seven:
+ movups 96(%rdi),%xmm8
+ xorps %xmm9,%xmm9
+ call _aesni_decrypt8
+ movups 80(%rdi),%xmm9
+ pxor %xmm10,%xmm2
+ movups 96(%rdi),%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ pxor %xmm14,%xmm6
+ movdqu %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ pxor %xmm15,%xmm7
+ movdqu %xmm6,64(%rsi)
+ pxor %xmm6,%xmm6
+ pxor %xmm9,%xmm8
+ movdqu %xmm7,80(%rsi)
+ pxor %xmm7,%xmm7
+ leaq 96(%rsi),%rsi
+ movdqa %xmm8,%xmm2
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ jmp .Lcbc_dec_tail_collected
+
+.align 16
+.Lcbc_dec_loop6:
+ movups %xmm7,(%rsi)
+ leaq 16(%rsi),%rsi
+ movdqu 0(%rdi),%xmm2
+ movdqu 16(%rdi),%xmm3
+ movdqa %xmm2,%xmm11
+ movdqu 32(%rdi),%xmm4
+ movdqa %xmm3,%xmm12
+ movdqu 48(%rdi),%xmm5
+ movdqa %xmm4,%xmm13
+ movdqu 64(%rdi),%xmm6
+ movdqa %xmm5,%xmm14
+ movdqu 80(%rdi),%xmm7
+ movdqa %xmm6,%xmm15
+.Lcbc_dec_loop6_enter:
+ leaq 96(%rdi),%rdi
+ movdqa %xmm7,%xmm8
+
+ call _aesni_decrypt6
+
+ pxor %xmm10,%xmm2
+ movdqa %xmm8,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm14,%xmm6
+ movq %rbp,%rcx
+ movdqu %xmm5,48(%rsi)
+ pxor %xmm15,%xmm7
+ movl %r10d,%eax
+ movdqu %xmm6,64(%rsi)
+ leaq 80(%rsi),%rsi
+ subq $0x60,%rdx
+ ja .Lcbc_dec_loop6
+
+ movdqa %xmm7,%xmm2
+ addq $0x50,%rdx
+ jle .Lcbc_dec_clear_tail_collected
+ movups %xmm7,(%rsi)
+ leaq 16(%rsi),%rsi
+
+.Lcbc_dec_tail:
+ movups (%rdi),%xmm2
+ subq $0x10,%rdx
+ jbe .Lcbc_dec_one
+
+ movups 16(%rdi),%xmm3
+ movaps %xmm2,%xmm11
+ subq $0x10,%rdx
+ jbe .Lcbc_dec_two
+
+ movups 32(%rdi),%xmm4
+ movaps %xmm3,%xmm12
+ subq $0x10,%rdx
+ jbe .Lcbc_dec_three
+
+ movups 48(%rdi),%xmm5
+ movaps %xmm4,%xmm13
+ subq $0x10,%rdx
+ jbe .Lcbc_dec_four
+
+ movups 64(%rdi),%xmm6
+ movaps %xmm5,%xmm14
+ movaps %xmm6,%xmm15
+ xorps %xmm7,%xmm7
+ call _aesni_decrypt6
+ pxor %xmm10,%xmm2
+ movaps %xmm15,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ pxor %xmm14,%xmm6
+ movdqu %xmm5,48(%rsi)
+ pxor %xmm5,%xmm5
+ leaq 64(%rsi),%rsi
+ movdqa %xmm6,%xmm2
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ subq $0x10,%rdx
+ jmp .Lcbc_dec_tail_collected
+
+.align 16
+.Lcbc_dec_one:
+ movaps %xmm2,%xmm11
+ movups (%rcx),%xmm0
+ movups 16(%rcx),%xmm1
+ leaq 32(%rcx),%rcx
+ xorps %xmm0,%xmm2
+.Loop_dec1_17:
+.byte 102,15,56,222,209
+ decl %eax
+ movups (%rcx),%xmm1
+ leaq 16(%rcx),%rcx
+ jnz .Loop_dec1_17
+.byte 102,15,56,223,209
+ xorps %xmm10,%xmm2
+ movaps %xmm11,%xmm10
+ jmp .Lcbc_dec_tail_collected
+.align 16
+.Lcbc_dec_two:
+ movaps %xmm3,%xmm12
+ call _aesni_decrypt2
+ pxor %xmm10,%xmm2
+ movaps %xmm12,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ movdqa %xmm3,%xmm2
+ pxor %xmm3,%xmm3
+ leaq 16(%rsi),%rsi
+ jmp .Lcbc_dec_tail_collected
+.align 16
+.Lcbc_dec_three:
+ movaps %xmm4,%xmm13
+ call _aesni_decrypt3
+ pxor %xmm10,%xmm2
+ movaps %xmm13,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ movdqa %xmm4,%xmm2
+ pxor %xmm4,%xmm4
+ leaq 32(%rsi),%rsi
+ jmp .Lcbc_dec_tail_collected
+.align 16
+.Lcbc_dec_four:
+ movaps %xmm5,%xmm14
+ call _aesni_decrypt4
+ pxor %xmm10,%xmm2
+ movaps %xmm14,%xmm10
+ pxor %xmm11,%xmm3
+ movdqu %xmm2,(%rsi)
+ pxor %xmm12,%xmm4
+ movdqu %xmm3,16(%rsi)
+ pxor %xmm3,%xmm3
+ pxor %xmm13,%xmm5
+ movdqu %xmm4,32(%rsi)
+ pxor %xmm4,%xmm4
+ movdqa %xmm5,%xmm2
+ pxor %xmm5,%xmm5
+ leaq 48(%rsi),%rsi
+ jmp .Lcbc_dec_tail_collected
+
+.align 16
+.Lcbc_dec_clear_tail_collected:
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+.Lcbc_dec_tail_collected:
+ movups %xmm10,(%r8)
+ andq $15,%rdx
+ jnz .Lcbc_dec_tail_partial
+ movups %xmm2,(%rsi)
+ pxor %xmm2,%xmm2
+ jmp .Lcbc_dec_ret
+.align 16
+.Lcbc_dec_tail_partial:
+ movaps %xmm2,(%rsp)
+ pxor %xmm2,%xmm2
+ movq $16,%rcx
+ movq %rsi,%rdi
+ subq %rdx,%rcx
+ leaq (%rsp),%rsi
+.long 0x9066A4F3
+ movdqa %xmm2,(%rsp)
+
+.Lcbc_dec_ret:
+ xorps %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
+.Lcbc_ret:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_cbc_encrypt,.-aesni_cbc_encrypt
+.globl aesni_set_decrypt_key
+.type aesni_set_decrypt_key,@function
+.align 16
+aesni_set_decrypt_key:
+.cfi_startproc
+.byte 0x48,0x83,0xEC,0x08
+.cfi_adjust_cfa_offset 8
+ call __aesni_set_encrypt_key
+ shll $4,%esi
+ testl %eax,%eax
+ jnz .Ldec_key_ret
+ leaq 16(%rdx,%rsi,1),%rdi
+
+ movups (%rdx),%xmm0
+ movups (%rdi),%xmm1
+ movups %xmm0,(%rdi)
+ movups %xmm1,(%rdx)
+ leaq 16(%rdx),%rdx
+ leaq -16(%rdi),%rdi
+
+.Ldec_key_inverse:
+ movups (%rdx),%xmm0
+ movups (%rdi),%xmm1
+.byte 102,15,56,219,192
+.byte 102,15,56,219,201
+ leaq 16(%rdx),%rdx
+ leaq -16(%rdi),%rdi
+ movups %xmm0,16(%rdi)
+ movups %xmm1,-16(%rdx)
+ cmpq %rdx,%rdi
+ ja .Ldec_key_inverse
+
+ movups (%rdx),%xmm0
+.byte 102,15,56,219,192
+ pxor %xmm1,%xmm1
+ movups %xmm0,(%rdi)
+ pxor %xmm0,%xmm0
+.Ldec_key_ret:
+ addq $8,%rsp
+.cfi_adjust_cfa_offset -8
+ .byte 0xf3,0xc3
+.cfi_endproc
+.LSEH_end_set_decrypt_key:
+.size aesni_set_decrypt_key,.-aesni_set_decrypt_key
+.globl aesni_set_encrypt_key
+.type aesni_set_encrypt_key,@function
+.align 16
+aesni_set_encrypt_key:
+__aesni_set_encrypt_key:
+.cfi_startproc
+.byte 0x48,0x83,0xEC,0x08
+.cfi_adjust_cfa_offset 8
+ movq $-1,%rax
+ testq %rdi,%rdi
+ jz .Lenc_key_ret
+ testq %rdx,%rdx
+ jz .Lenc_key_ret
+
+ movl $268437504,%r10d
+ movups (%rdi),%xmm0
+ xorps %xmm4,%xmm4
+ andl OPENSSL_ia32cap_P+4(%rip),%r10d
+ leaq 16(%rdx),%rax
+ cmpl $256,%esi
+ je .L14rounds
+ cmpl $192,%esi
+ je .L12rounds
+ cmpl $128,%esi
+ jne .Lbad_keybits
+
+.L10rounds:
+ movl $9,%esi
+ cmpl $268435456,%r10d
+ je .L10rounds_alt
+
+ movups %xmm0,(%rdx)
+.byte 102,15,58,223,200,1
+ call .Lkey_expansion_128_cold
+.byte 102,15,58,223,200,2
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,4
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,8
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,16
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,32
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,64
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,128
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,27
+ call .Lkey_expansion_128
+.byte 102,15,58,223,200,54
+ call .Lkey_expansion_128
+ movups %xmm0,(%rax)
+ movl %esi,80(%rax)
+ xorl %eax,%eax
+ jmp .Lenc_key_ret
+
+.align 16
+.L10rounds_alt:
+ movdqa .Lkey_rotate(%rip),%xmm5
+ movl $8,%r10d
+ movdqa .Lkey_rcon1(%rip),%xmm4
+ movdqa %xmm0,%xmm2
+ movdqu %xmm0,(%rdx)
+ jmp .Loop_key128
+
+.align 16
+.Loop_key128:
+.byte 102,15,56,0,197
+.byte 102,15,56,221,196
+ pslld $1,%xmm4
+ leaq 16(%rax),%rax
+
+ movdqa %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm3,%xmm2
+
+ pxor %xmm2,%xmm0
+ movdqu %xmm0,-16(%rax)
+ movdqa %xmm0,%xmm2
+
+ decl %r10d
+ jnz .Loop_key128
+
+ movdqa .Lkey_rcon1b(%rip),%xmm4
+
+.byte 102,15,56,0,197
+.byte 102,15,56,221,196
+ pslld $1,%xmm4
+
+ movdqa %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm3,%xmm2
+
+ pxor %xmm2,%xmm0
+ movdqu %xmm0,(%rax)
+
+ movdqa %xmm0,%xmm2
+.byte 102,15,56,0,197
+.byte 102,15,56,221,196
+
+ movdqa %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm2,%xmm3
+ pslldq $4,%xmm2
+ pxor %xmm3,%xmm2
+
+ pxor %xmm2,%xmm0
+ movdqu %xmm0,16(%rax)
+
+ movl %esi,96(%rax)
+ xorl %eax,%eax
+ jmp .Lenc_key_ret
+
+.align 16
+.L12rounds:
+ movq 16(%rdi),%xmm2
+ movl $11,%esi
+ cmpl $268435456,%r10d
+ je .L12rounds_alt
+
+ movups %xmm0,(%rdx)
+.byte 102,15,58,223,202,1
+ call .Lkey_expansion_192a_cold
+.byte 102,15,58,223,202,2
+ call .Lkey_expansion_192b
+.byte 102,15,58,223,202,4
+ call .Lkey_expansion_192a
+.byte 102,15,58,223,202,8
+ call .Lkey_expansion_192b
+.byte 102,15,58,223,202,16
+ call .Lkey_expansion_192a
+.byte 102,15,58,223,202,32
+ call .Lkey_expansion_192b
+.byte 102,15,58,223,202,64
+ call .Lkey_expansion_192a
+.byte 102,15,58,223,202,128
+ call .Lkey_expansion_192b
+ movups %xmm0,(%rax)
+ movl %esi,48(%rax)
+ xorq %rax,%rax
+ jmp .Lenc_key_ret
+
+.align 16
+.L12rounds_alt:
+ movdqa .Lkey_rotate192(%rip),%xmm5
+ movdqa .Lkey_rcon1(%rip),%xmm4
+ movl $8,%r10d
+ movdqu %xmm0,(%rdx)
+ jmp .Loop_key192
+
+.align 16
+.Loop_key192:
+ movq %xmm2,0(%rax)
+ movdqa %xmm2,%xmm1
+.byte 102,15,56,0,213
+.byte 102,15,56,221,212
+ pslld $1,%xmm4
+ leaq 24(%rax),%rax
+
+ movdqa %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm3,%xmm0
+
+ pshufd $0xff,%xmm0,%xmm3
+ pxor %xmm1,%xmm3
+ pslldq $4,%xmm1
+ pxor %xmm1,%xmm3
+
+ pxor %xmm2,%xmm0
+ pxor %xmm3,%xmm2
+ movdqu %xmm0,-16(%rax)
+
+ decl %r10d
+ jnz .Loop_key192
+
+ movl %esi,32(%rax)
+ xorl %eax,%eax
+ jmp .Lenc_key_ret
+
+.align 16
+.L14rounds:
+ movups 16(%rdi),%xmm2
+ movl $13,%esi
+ leaq 16(%rax),%rax
+ cmpl $268435456,%r10d
+ je .L14rounds_alt
+
+ movups %xmm0,(%rdx)
+ movups %xmm2,16(%rdx)
+.byte 102,15,58,223,202,1
+ call .Lkey_expansion_256a_cold
+.byte 102,15,58,223,200,1
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,2
+ call .Lkey_expansion_256a
+.byte 102,15,58,223,200,2
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,4
+ call .Lkey_expansion_256a
+.byte 102,15,58,223,200,4
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,8
+ call .Lkey_expansion_256a
+.byte 102,15,58,223,200,8
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,16
+ call .Lkey_expansion_256a
+.byte 102,15,58,223,200,16
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,32
+ call .Lkey_expansion_256a
+.byte 102,15,58,223,200,32
+ call .Lkey_expansion_256b
+.byte 102,15,58,223,202,64
+ call .Lkey_expansion_256a
+ movups %xmm0,(%rax)
+ movl %esi,16(%rax)
+ xorq %rax,%rax
+ jmp .Lenc_key_ret
+
+.align 16
+.L14rounds_alt:
+ movdqa .Lkey_rotate(%rip),%xmm5
+ movdqa .Lkey_rcon1(%rip),%xmm4
+ movl $7,%r10d
+ movdqu %xmm0,0(%rdx)
+ movdqa %xmm2,%xmm1
+ movdqu %xmm2,16(%rdx)
+ jmp .Loop_key256
+
+.align 16
+.Loop_key256:
+.byte 102,15,56,0,213
+.byte 102,15,56,221,212
+
+ movdqa %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm0,%xmm3
+ pslldq $4,%xmm0
+ pxor %xmm3,%xmm0
+ pslld $1,%xmm4
+
+ pxor %xmm2,%xmm0
+ movdqu %xmm0,(%rax)
+
+ decl %r10d
+ jz .Ldone_key256
+
+ pshufd $0xff,%xmm0,%xmm2
+ pxor %xmm3,%xmm3
+.byte 102,15,56,221,211
+
+ movdqa %xmm1,%xmm3
+ pslldq $4,%xmm1
+ pxor %xmm1,%xmm3
+ pslldq $4,%xmm1
+ pxor %xmm1,%xmm3
+ pslldq $4,%xmm1
+ pxor %xmm3,%xmm1
+
+ pxor %xmm1,%xmm2
+ movdqu %xmm2,16(%rax)
+ leaq 32(%rax),%rax
+ movdqa %xmm2,%xmm1
+
+ jmp .Loop_key256
+
+.Ldone_key256:
+ movl %esi,16(%rax)
+ xorl %eax,%eax
+ jmp .Lenc_key_ret
+
+.align 16
+.Lbad_keybits:
+ movq $-2,%rax
+.Lenc_key_ret:
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ addq $8,%rsp
+.cfi_adjust_cfa_offset -8
+ .byte 0xf3,0xc3
+.LSEH_end_set_encrypt_key:
+
+.align 16
+.Lkey_expansion_128:
+ movups %xmm0,(%rax)
+ leaq 16(%rax),%rax
+.Lkey_expansion_128_cold:
+ shufps $16,%xmm0,%xmm4
+ xorps %xmm4,%xmm0
+ shufps $140,%xmm0,%xmm4
+ xorps %xmm4,%xmm0
+ shufps $255,%xmm1,%xmm1
+ xorps %xmm1,%xmm0
+ .byte 0xf3,0xc3
+
+.align 16
+.Lkey_expansion_192a:
+ movups %xmm0,(%rax)
+ leaq 16(%rax),%rax
+.Lkey_expansion_192a_cold:
+ movaps %xmm2,%xmm5
+.Lkey_expansion_192b_warm:
+ shufps $16,%xmm0,%xmm4
+ movdqa %xmm2,%xmm3
+ xorps %xmm4,%xmm0
+ shufps $140,%xmm0,%xmm4
+ pslldq $4,%xmm3
+ xorps %xmm4,%xmm0
+ pshufd $85,%xmm1,%xmm1
+ pxor %xmm3,%xmm2
+ pxor %xmm1,%xmm0
+ pshufd $255,%xmm0,%xmm3
+ pxor %xmm3,%xmm2
+ .byte 0xf3,0xc3
+
+.align 16
+.Lkey_expansion_192b:
+ movaps %xmm0,%xmm3
+ shufps $68,%xmm0,%xmm5
+ movups %xmm5,(%rax)
+ shufps $78,%xmm2,%xmm3
+ movups %xmm3,16(%rax)
+ leaq 32(%rax),%rax
+ jmp .Lkey_expansion_192b_warm
+
+.align 16
+.Lkey_expansion_256a:
+ movups %xmm2,(%rax)
+ leaq 16(%rax),%rax
+.Lkey_expansion_256a_cold:
+ shufps $16,%xmm0,%xmm4
+ xorps %xmm4,%xmm0
+ shufps $140,%xmm0,%xmm4
+ xorps %xmm4,%xmm0
+ shufps $255,%xmm1,%xmm1
+ xorps %xmm1,%xmm0
+ .byte 0xf3,0xc3
+
+.align 16
+.Lkey_expansion_256b:
+ movups %xmm0,(%rax)
+ leaq 16(%rax),%rax
+
+ shufps $16,%xmm2,%xmm4
+ xorps %xmm4,%xmm2
+ shufps $140,%xmm2,%xmm4
+ xorps %xmm4,%xmm2
+ shufps $170,%xmm1,%xmm1
+ xorps %xmm1,%xmm2
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_set_encrypt_key,.-aesni_set_encrypt_key
+.size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key
+.align 64
+.Lbswap_mask:
+.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
+.Lincrement32:
+.long 6,6,6,0
+.Lincrement64:
+.long 1,0,0,0
+.Lxts_magic:
+.long 0x87,0,1,0
+.Lincrement1:
+.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
+.Lkey_rotate:
+.long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d
+.Lkey_rotate192:
+.long 0x04070605,0x04070605,0x04070605,0x04070605
+.Lkey_rcon1:
+.long 1,1,1,1
+.Lkey_rcon1b:
+.long 0x1b,0x1b,0x1b,0x1b
+
+.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/vpaes-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/vpaes-x86_64.S
new file mode 100644
index 0000000000..982818f83b
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/aes/vpaes-x86_64.S
@@ -0,0 +1,863 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/aes/asm/vpaes-x86_64.pl
+#
+# Copyright 2011-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.type _vpaes_encrypt_core,@function
+.align 16
+_vpaes_encrypt_core:
+.cfi_startproc
+ movq %rdx,%r9
+ movq $16,%r11
+ movl 240(%rdx),%eax
+ movdqa %xmm9,%xmm1
+ movdqa .Lk_ipt(%rip),%xmm2
+ pandn %xmm0,%xmm1
+ movdqu (%r9),%xmm5
+ psrld $4,%xmm1
+ pand %xmm9,%xmm0
+.byte 102,15,56,0,208
+ movdqa .Lk_ipt+16(%rip),%xmm0
+.byte 102,15,56,0,193
+ pxor %xmm5,%xmm2
+ addq $16,%r9
+ pxor %xmm2,%xmm0
+ leaq .Lk_mc_backward(%rip),%r10
+ jmp .Lenc_entry
+
+.align 16
+.Lenc_loop:
+
+ movdqa %xmm13,%xmm4
+ movdqa %xmm12,%xmm0
+.byte 102,15,56,0,226
+.byte 102,15,56,0,195
+ pxor %xmm5,%xmm4
+ movdqa %xmm15,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa -64(%r11,%r10,1),%xmm1
+.byte 102,15,56,0,234
+ movdqa (%r11,%r10,1),%xmm4
+ movdqa %xmm14,%xmm2
+.byte 102,15,56,0,211
+ movdqa %xmm0,%xmm3
+ pxor %xmm5,%xmm2
+.byte 102,15,56,0,193
+ addq $16,%r9
+ pxor %xmm2,%xmm0
+.byte 102,15,56,0,220
+ addq $16,%r11
+ pxor %xmm0,%xmm3
+.byte 102,15,56,0,193
+ andq $0x30,%r11
+ subq $1,%rax
+ pxor %xmm3,%xmm0
+
+.Lenc_entry:
+
+ movdqa %xmm9,%xmm1
+ movdqa %xmm11,%xmm5
+ pandn %xmm0,%xmm1
+ psrld $4,%xmm1
+ pand %xmm9,%xmm0
+.byte 102,15,56,0,232
+ movdqa %xmm10,%xmm3
+ pxor %xmm1,%xmm0
+.byte 102,15,56,0,217
+ movdqa %xmm10,%xmm4
+ pxor %xmm5,%xmm3
+.byte 102,15,56,0,224
+ movdqa %xmm10,%xmm2
+ pxor %xmm5,%xmm4
+.byte 102,15,56,0,211
+ movdqa %xmm10,%xmm3
+ pxor %xmm0,%xmm2
+.byte 102,15,56,0,220
+ movdqu (%r9),%xmm5
+ pxor %xmm1,%xmm3
+ jnz .Lenc_loop
+
+
+ movdqa -96(%r10),%xmm4
+ movdqa -80(%r10),%xmm0
+.byte 102,15,56,0,226
+ pxor %xmm5,%xmm4
+.byte 102,15,56,0,195
+ movdqa 64(%r11,%r10,1),%xmm1
+ pxor %xmm4,%xmm0
+.byte 102,15,56,0,193
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_encrypt_core,.-_vpaes_encrypt_core
+
+
+
+
+
+
+.type _vpaes_decrypt_core,@function
+.align 16
+_vpaes_decrypt_core:
+.cfi_startproc
+ movq %rdx,%r9
+ movl 240(%rdx),%eax
+ movdqa %xmm9,%xmm1
+ movdqa .Lk_dipt(%rip),%xmm2
+ pandn %xmm0,%xmm1
+ movq %rax,%r11
+ psrld $4,%xmm1
+ movdqu (%r9),%xmm5
+ shlq $4,%r11
+ pand %xmm9,%xmm0
+.byte 102,15,56,0,208
+ movdqa .Lk_dipt+16(%rip),%xmm0
+ xorq $0x30,%r11
+ leaq .Lk_dsbd(%rip),%r10
+.byte 102,15,56,0,193
+ andq $0x30,%r11
+ pxor %xmm5,%xmm2
+ movdqa .Lk_mc_forward+48(%rip),%xmm5
+ pxor %xmm2,%xmm0
+ addq $16,%r9
+ addq %r10,%r11
+ jmp .Ldec_entry
+
+.align 16
+.Ldec_loop:
+
+
+
+ movdqa -32(%r10),%xmm4
+ movdqa -16(%r10),%xmm1
+.byte 102,15,56,0,226
+.byte 102,15,56,0,203
+ pxor %xmm4,%xmm0
+ movdqa 0(%r10),%xmm4
+ pxor %xmm1,%xmm0
+ movdqa 16(%r10),%xmm1
+
+.byte 102,15,56,0,226
+.byte 102,15,56,0,197
+.byte 102,15,56,0,203
+ pxor %xmm4,%xmm0
+ movdqa 32(%r10),%xmm4
+ pxor %xmm1,%xmm0
+ movdqa 48(%r10),%xmm1
+
+.byte 102,15,56,0,226
+.byte 102,15,56,0,197
+.byte 102,15,56,0,203
+ pxor %xmm4,%xmm0
+ movdqa 64(%r10),%xmm4
+ pxor %xmm1,%xmm0
+ movdqa 80(%r10),%xmm1
+
+.byte 102,15,56,0,226
+.byte 102,15,56,0,197
+.byte 102,15,56,0,203
+ pxor %xmm4,%xmm0
+ addq $16,%r9
+.byte 102,15,58,15,237,12
+ pxor %xmm1,%xmm0
+ subq $1,%rax
+
+.Ldec_entry:
+
+ movdqa %xmm9,%xmm1
+ pandn %xmm0,%xmm1
+ movdqa %xmm11,%xmm2
+ psrld $4,%xmm1
+ pand %xmm9,%xmm0
+.byte 102,15,56,0,208
+ movdqa %xmm10,%xmm3
+ pxor %xmm1,%xmm0
+.byte 102,15,56,0,217
+ movdqa %xmm10,%xmm4
+ pxor %xmm2,%xmm3
+.byte 102,15,56,0,224
+ pxor %xmm2,%xmm4
+ movdqa %xmm10,%xmm2
+.byte 102,15,56,0,211
+ movdqa %xmm10,%xmm3
+ pxor %xmm0,%xmm2
+.byte 102,15,56,0,220
+ movdqu (%r9),%xmm0
+ pxor %xmm1,%xmm3
+ jnz .Ldec_loop
+
+
+ movdqa 96(%r10),%xmm4
+.byte 102,15,56,0,226
+ pxor %xmm0,%xmm4
+ movdqa 112(%r10),%xmm0
+ movdqa -352(%r11),%xmm2
+.byte 102,15,56,0,195
+ pxor %xmm4,%xmm0
+.byte 102,15,56,0,194
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_decrypt_core,.-_vpaes_decrypt_core
+
+
+
+
+
+
+.type _vpaes_schedule_core,@function
+.align 16
+_vpaes_schedule_core:
+.cfi_startproc
+
+
+
+
+
+ call _vpaes_preheat
+ movdqa .Lk_rcon(%rip),%xmm8
+ movdqu (%rdi),%xmm0
+
+
+ movdqa %xmm0,%xmm3
+ leaq .Lk_ipt(%rip),%r11
+ call _vpaes_schedule_transform
+ movdqa %xmm0,%xmm7
+
+ leaq .Lk_sr(%rip),%r10
+ testq %rcx,%rcx
+ jnz .Lschedule_am_decrypting
+
+
+ movdqu %xmm0,(%rdx)
+ jmp .Lschedule_go
+
+.Lschedule_am_decrypting:
+
+ movdqa (%r8,%r10,1),%xmm1
+.byte 102,15,56,0,217
+ movdqu %xmm3,(%rdx)
+ xorq $0x30,%r8
+
+.Lschedule_go:
+ cmpl $192,%esi
+ ja .Lschedule_256
+ je .Lschedule_192
+
+
+
+
+
+
+
+
+
+
+.Lschedule_128:
+ movl $10,%esi
+
+.Loop_schedule_128:
+ call _vpaes_schedule_round
+ decq %rsi
+ jz .Lschedule_mangle_last
+ call _vpaes_schedule_mangle
+ jmp .Loop_schedule_128
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 16
+.Lschedule_192:
+ movdqu 8(%rdi),%xmm0
+ call _vpaes_schedule_transform
+ movdqa %xmm0,%xmm6
+ pxor %xmm4,%xmm4
+ movhlps %xmm4,%xmm6
+ movl $4,%esi
+
+.Loop_schedule_192:
+ call _vpaes_schedule_round
+.byte 102,15,58,15,198,8
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_192_smear
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_round
+ decq %rsi
+ jz .Lschedule_mangle_last
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_192_smear
+ jmp .Loop_schedule_192
+
+
+
+
+
+
+
+
+
+
+
+.align 16
+.Lschedule_256:
+ movdqu 16(%rdi),%xmm0
+ call _vpaes_schedule_transform
+ movl $7,%esi
+
+.Loop_schedule_256:
+ call _vpaes_schedule_mangle
+ movdqa %xmm0,%xmm6
+
+
+ call _vpaes_schedule_round
+ decq %rsi
+ jz .Lschedule_mangle_last
+ call _vpaes_schedule_mangle
+
+
+ pshufd $0xFF,%xmm0,%xmm0
+ movdqa %xmm7,%xmm5
+ movdqa %xmm6,%xmm7
+ call _vpaes_schedule_low_round
+ movdqa %xmm5,%xmm7
+
+ jmp .Loop_schedule_256
+
+
+
+
+
+
+
+
+
+
+
+
+.align 16
+.Lschedule_mangle_last:
+
+ leaq .Lk_deskew(%rip),%r11
+ testq %rcx,%rcx
+ jnz .Lschedule_mangle_last_dec
+
+
+ movdqa (%r8,%r10,1),%xmm1
+.byte 102,15,56,0,193
+ leaq .Lk_opt(%rip),%r11
+ addq $32,%rdx
+
+.Lschedule_mangle_last_dec:
+ addq $-16,%rdx
+ pxor .Lk_s63(%rip),%xmm0
+ call _vpaes_schedule_transform
+ movdqu %xmm0,(%rdx)
+
+
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_schedule_core,.-_vpaes_schedule_core
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.type _vpaes_schedule_192_smear,@function
+.align 16
+_vpaes_schedule_192_smear:
+.cfi_startproc
+ pshufd $0x80,%xmm6,%xmm1
+ pshufd $0xFE,%xmm7,%xmm0
+ pxor %xmm1,%xmm6
+ pxor %xmm1,%xmm1
+ pxor %xmm0,%xmm6
+ movdqa %xmm6,%xmm0
+ movhlps %xmm1,%xmm6
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.type _vpaes_schedule_round,@function
+.align 16
+_vpaes_schedule_round:
+.cfi_startproc
+
+ pxor %xmm1,%xmm1
+.byte 102,65,15,58,15,200,15
+.byte 102,69,15,58,15,192,15
+ pxor %xmm1,%xmm7
+
+
+ pshufd $0xFF,%xmm0,%xmm0
+.byte 102,15,58,15,192,1
+
+
+
+
+_vpaes_schedule_low_round:
+
+ movdqa %xmm7,%xmm1
+ pslldq $4,%xmm7
+ pxor %xmm1,%xmm7
+ movdqa %xmm7,%xmm1
+ pslldq $8,%xmm7
+ pxor %xmm1,%xmm7
+ pxor .Lk_s63(%rip),%xmm7
+
+
+ movdqa %xmm9,%xmm1
+ pandn %xmm0,%xmm1
+ psrld $4,%xmm1
+ pand %xmm9,%xmm0
+ movdqa %xmm11,%xmm2
+.byte 102,15,56,0,208
+ pxor %xmm1,%xmm0
+ movdqa %xmm10,%xmm3
+.byte 102,15,56,0,217
+ pxor %xmm2,%xmm3
+ movdqa %xmm10,%xmm4
+.byte 102,15,56,0,224
+ pxor %xmm2,%xmm4
+ movdqa %xmm10,%xmm2
+.byte 102,15,56,0,211
+ pxor %xmm0,%xmm2
+ movdqa %xmm10,%xmm3
+.byte 102,15,56,0,220
+ pxor %xmm1,%xmm3
+ movdqa %xmm13,%xmm4
+.byte 102,15,56,0,226
+ movdqa %xmm12,%xmm0
+.byte 102,15,56,0,195
+ pxor %xmm4,%xmm0
+
+
+ pxor %xmm7,%xmm0
+ movdqa %xmm0,%xmm7
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_schedule_round,.-_vpaes_schedule_round
+
+
+
+
+
+
+
+
+
+
+.type _vpaes_schedule_transform,@function
+.align 16
+_vpaes_schedule_transform:
+.cfi_startproc
+ movdqa %xmm9,%xmm1
+ pandn %xmm0,%xmm1
+ psrld $4,%xmm1
+ pand %xmm9,%xmm0
+ movdqa (%r11),%xmm2
+.byte 102,15,56,0,208
+ movdqa 16(%r11),%xmm0
+.byte 102,15,56,0,193
+ pxor %xmm2,%xmm0
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_schedule_transform,.-_vpaes_schedule_transform
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.type _vpaes_schedule_mangle,@function
+.align 16
+_vpaes_schedule_mangle:
+.cfi_startproc
+ movdqa %xmm0,%xmm4
+ movdqa .Lk_mc_forward(%rip),%xmm5
+ testq %rcx,%rcx
+ jnz .Lschedule_mangle_dec
+
+
+ addq $16,%rdx
+ pxor .Lk_s63(%rip),%xmm4
+.byte 102,15,56,0,229
+ movdqa %xmm4,%xmm3
+.byte 102,15,56,0,229
+ pxor %xmm4,%xmm3
+.byte 102,15,56,0,229
+ pxor %xmm4,%xmm3
+
+ jmp .Lschedule_mangle_both
+.align 16
+.Lschedule_mangle_dec:
+
+ leaq .Lk_dksd(%rip),%r11
+ movdqa %xmm9,%xmm1
+ pandn %xmm4,%xmm1
+ psrld $4,%xmm1
+ pand %xmm9,%xmm4
+
+ movdqa 0(%r11),%xmm2
+.byte 102,15,56,0,212
+ movdqa 16(%r11),%xmm3
+.byte 102,15,56,0,217
+ pxor %xmm2,%xmm3
+.byte 102,15,56,0,221
+
+ movdqa 32(%r11),%xmm2
+.byte 102,15,56,0,212
+ pxor %xmm3,%xmm2
+ movdqa 48(%r11),%xmm3
+.byte 102,15,56,0,217
+ pxor %xmm2,%xmm3
+.byte 102,15,56,0,221
+
+ movdqa 64(%r11),%xmm2
+.byte 102,15,56,0,212
+ pxor %xmm3,%xmm2
+ movdqa 80(%r11),%xmm3
+.byte 102,15,56,0,217
+ pxor %xmm2,%xmm3
+.byte 102,15,56,0,221
+
+ movdqa 96(%r11),%xmm2
+.byte 102,15,56,0,212
+ pxor %xmm3,%xmm2
+ movdqa 112(%r11),%xmm3
+.byte 102,15,56,0,217
+ pxor %xmm2,%xmm3
+
+ addq $-16,%rdx
+
+.Lschedule_mangle_both:
+ movdqa (%r8,%r10,1),%xmm1
+.byte 102,15,56,0,217
+ addq $-16,%r8
+ andq $0x30,%r8
+ movdqu %xmm3,(%rdx)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
+
+
+
+
+.globl vpaes_set_encrypt_key
+.type vpaes_set_encrypt_key,@function
+.align 16
+vpaes_set_encrypt_key:
+.cfi_startproc
+ movl %esi,%eax
+ shrl $5,%eax
+ addl $5,%eax
+ movl %eax,240(%rdx)
+
+ movl $0,%ecx
+ movl $0x30,%r8d
+ call _vpaes_schedule_core
+ xorl %eax,%eax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key
+
+.globl vpaes_set_decrypt_key
+.type vpaes_set_decrypt_key,@function
+.align 16
+vpaes_set_decrypt_key:
+.cfi_startproc
+ movl %esi,%eax
+ shrl $5,%eax
+ addl $5,%eax
+ movl %eax,240(%rdx)
+ shll $4,%eax
+ leaq 16(%rdx,%rax,1),%rdx
+
+ movl $1,%ecx
+ movl %esi,%r8d
+ shrl $1,%r8d
+ andl $32,%r8d
+ xorl $32,%r8d
+ call _vpaes_schedule_core
+ xorl %eax,%eax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key
+
+.globl vpaes_encrypt
+.type vpaes_encrypt,@function
+.align 16
+vpaes_encrypt:
+.cfi_startproc
+ movdqu (%rdi),%xmm0
+ call _vpaes_preheat
+ call _vpaes_encrypt_core
+ movdqu %xmm0,(%rsi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size vpaes_encrypt,.-vpaes_encrypt
+
+.globl vpaes_decrypt
+.type vpaes_decrypt,@function
+.align 16
+vpaes_decrypt:
+.cfi_startproc
+ movdqu (%rdi),%xmm0
+ call _vpaes_preheat
+ call _vpaes_decrypt_core
+ movdqu %xmm0,(%rsi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size vpaes_decrypt,.-vpaes_decrypt
+.globl vpaes_cbc_encrypt
+.type vpaes_cbc_encrypt,@function
+.align 16
+vpaes_cbc_encrypt:
+.cfi_startproc
+ xchgq %rcx,%rdx
+ subq $16,%rcx
+ jc .Lcbc_abort
+ movdqu (%r8),%xmm6
+ subq %rdi,%rsi
+ call _vpaes_preheat
+ cmpl $0,%r9d
+ je .Lcbc_dec_loop
+ jmp .Lcbc_enc_loop
+.align 16
+.Lcbc_enc_loop:
+ movdqu (%rdi),%xmm0
+ pxor %xmm6,%xmm0
+ call _vpaes_encrypt_core
+ movdqa %xmm0,%xmm6
+ movdqu %xmm0,(%rsi,%rdi,1)
+ leaq 16(%rdi),%rdi
+ subq $16,%rcx
+ jnc .Lcbc_enc_loop
+ jmp .Lcbc_done
+.align 16
+.Lcbc_dec_loop:
+ movdqu (%rdi),%xmm0
+ movdqa %xmm0,%xmm7
+ call _vpaes_decrypt_core
+ pxor %xmm6,%xmm0
+ movdqa %xmm7,%xmm6
+ movdqu %xmm0,(%rsi,%rdi,1)
+ leaq 16(%rdi),%rdi
+ subq $16,%rcx
+ jnc .Lcbc_dec_loop
+.Lcbc_done:
+ movdqu %xmm6,(%r8)
+.Lcbc_abort:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt
+
+
+
+
+
+
+.type _vpaes_preheat,@function
+.align 16
+_vpaes_preheat:
+.cfi_startproc
+ leaq .Lk_s0F(%rip),%r10
+ movdqa -32(%r10),%xmm10
+ movdqa -16(%r10),%xmm11
+ movdqa 0(%r10),%xmm9
+ movdqa 48(%r10),%xmm13
+ movdqa 64(%r10),%xmm12
+ movdqa 80(%r10),%xmm15
+ movdqa 96(%r10),%xmm14
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _vpaes_preheat,.-_vpaes_preheat
+
+
+
+
+
+.type _vpaes_consts,@object
+.align 64
+_vpaes_consts:
+.Lk_inv:
+.quad 0x0E05060F0D080180, 0x040703090A0B0C02
+.quad 0x01040A060F0B0780, 0x030D0E0C02050809
+
+.Lk_s0F:
+.quad 0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F0F0F0F0F
+
+.Lk_ipt:
+.quad 0xC2B2E8985A2A7000, 0xCABAE09052227808
+.quad 0x4C01307D317C4D00, 0xCD80B1FCB0FDCC81
+
+.Lk_sb1:
+.quad 0xB19BE18FCB503E00, 0xA5DF7A6E142AF544
+.quad 0x3618D415FAE22300, 0x3BF7CCC10D2ED9EF
+.Lk_sb2:
+.quad 0xE27A93C60B712400, 0x5EB7E955BC982FCD
+.quad 0x69EB88400AE12900, 0xC2A163C8AB82234A
+.Lk_sbo:
+.quad 0xD0D26D176FBDC700, 0x15AABF7AC502A878
+.quad 0xCFE474A55FBB6A00, 0x8E1E90D1412B35FA
+
+.Lk_mc_forward:
+.quad 0x0407060500030201, 0x0C0F0E0D080B0A09
+.quad 0x080B0A0904070605, 0x000302010C0F0E0D
+.quad 0x0C0F0E0D080B0A09, 0x0407060500030201
+.quad 0x000302010C0F0E0D, 0x080B0A0904070605
+
+.Lk_mc_backward:
+.quad 0x0605040702010003, 0x0E0D0C0F0A09080B
+.quad 0x020100030E0D0C0F, 0x0A09080B06050407
+.quad 0x0E0D0C0F0A09080B, 0x0605040702010003
+.quad 0x0A09080B06050407, 0x020100030E0D0C0F
+
+.Lk_sr:
+.quad 0x0706050403020100, 0x0F0E0D0C0B0A0908
+.quad 0x030E09040F0A0500, 0x0B06010C07020D08
+.quad 0x0F060D040B020900, 0x070E050C030A0108
+.quad 0x0B0E0104070A0D00, 0x0306090C0F020508
+
+.Lk_rcon:
+.quad 0x1F8391B9AF9DEEB6, 0x702A98084D7C7D81
+
+.Lk_s63:
+.quad 0x5B5B5B5B5B5B5B5B, 0x5B5B5B5B5B5B5B5B
+
+.Lk_opt:
+.quad 0xFF9F4929D6B66000, 0xF7974121DEBE6808
+.quad 0x01EDBD5150BCEC00, 0xE10D5DB1B05C0CE0
+
+.Lk_deskew:
+.quad 0x07E4A34047A4E300, 0x1DFEB95A5DBEF91A
+.quad 0x5F36B5DC83EA6900, 0x2841C2ABF49D1E77
+
+
+
+
+
+.Lk_dksd:
+.quad 0xFEB91A5DA3E44700, 0x0740E3A45A1DBEF9
+.quad 0x41C277F4B5368300, 0x5FDC69EAAB289D1E
+.Lk_dksb:
+.quad 0x9A4FCA1F8550D500, 0x03D653861CC94C99
+.quad 0x115BEDA7B6FC4A00, 0xD993256F7E3482C8
+.Lk_dkse:
+.quad 0xD5031CCA1FC9D600, 0x53859A4C994F5086
+.quad 0xA23196054FDC7BE8, 0xCD5EF96A20B31487
+.Lk_dks9:
+.quad 0xB6116FC87ED9A700, 0x4AED933482255BFC
+.quad 0x4576516227143300, 0x8BB89FACE9DAFDCE
+
+
+
+
+
+.Lk_dipt:
+.quad 0x0F505B040B545F00, 0x154A411E114E451A
+.quad 0x86E383E660056500, 0x12771772F491F194
+
+.Lk_dsb9:
+.quad 0x851C03539A86D600, 0xCAD51F504F994CC9
+.quad 0xC03B1789ECD74900, 0x725E2C9EB2FBA565
+.Lk_dsbd:
+.quad 0x7D57CCDFE6B1A200, 0xF56E9B13882A4439
+.quad 0x3CE2FAF724C6CB00, 0x2931180D15DEEFD3
+.Lk_dsbb:
+.quad 0xD022649296B44200, 0x602646F6B0F2D404
+.quad 0xC19498A6CD596700, 0xF3FF0C3E3255AA6B
+.Lk_dsbe:
+.quad 0x46F2929626D4D000, 0x2242600464B4F6B0
+.quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32
+.Lk_dsbo:
+.quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D
+.quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C
+.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0
+.align 64
+.size _vpaes_consts,.-_vpaes_consts
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/aesni-gcm-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/aesni-gcm-x86_64.S
new file mode 100644
index 0000000000..1201f3427a
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/aesni-gcm-x86_64.S
@@ -0,0 +1,29 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/modes/asm/aesni-gcm-x86_64.pl
+#
+# Copyright 2013-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+.globl aesni_gcm_encrypt
+.type aesni_gcm_encrypt,@function
+aesni_gcm_encrypt:
+.cfi_startproc
+ xorl %eax,%eax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_gcm_encrypt,.-aesni_gcm_encrypt
+
+.globl aesni_gcm_decrypt
+.type aesni_gcm_decrypt,@function
+aesni_gcm_decrypt:
+.cfi_startproc
+ xorl %eax,%eax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size aesni_gcm_decrypt,.-aesni_gcm_decrypt
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/ghash-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/ghash-x86_64.S
new file mode 100644
index 0000000000..3fcaa4b2ef
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/modes/ghash-x86_64.S
@@ -0,0 +1,1386 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/modes/asm/ghash-x86_64.pl
+#
+# Copyright 2010-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl gcm_gmult_4bit
+.type gcm_gmult_4bit,@function
+.align 16
+gcm_gmult_4bit:
+.cfi_startproc
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ subq $280,%rsp
+.cfi_adjust_cfa_offset 280
+.Lgmult_prologue:
+
+ movzbq 15(%rdi),%r8
+ leaq .Lrem_4bit(%rip),%r11
+ xorq %rax,%rax
+ xorq %rbx,%rbx
+ movb %r8b,%al
+ movb %r8b,%bl
+ shlb $4,%al
+ movq $14,%rcx
+ movq 8(%rsi,%rax,1),%r8
+ movq (%rsi,%rax,1),%r9
+ andb $0xf0,%bl
+ movq %r8,%rdx
+ jmp .Loop1
+
+.align 16
+.Loop1:
+ shrq $4,%r8
+ andq $0xf,%rdx
+ movq %r9,%r10
+ movb (%rdi,%rcx,1),%al
+ shrq $4,%r9
+ xorq 8(%rsi,%rbx,1),%r8
+ shlq $60,%r10
+ xorq (%rsi,%rbx,1),%r9
+ movb %al,%bl
+ xorq (%r11,%rdx,8),%r9
+ movq %r8,%rdx
+ shlb $4,%al
+ xorq %r10,%r8
+ decq %rcx
+ js .Lbreak1
+
+ shrq $4,%r8
+ andq $0xf,%rdx
+ movq %r9,%r10
+ shrq $4,%r9
+ xorq 8(%rsi,%rax,1),%r8
+ shlq $60,%r10
+ xorq (%rsi,%rax,1),%r9
+ andb $0xf0,%bl
+ xorq (%r11,%rdx,8),%r9
+ movq %r8,%rdx
+ xorq %r10,%r8
+ jmp .Loop1
+
+.align 16
+.Lbreak1:
+ shrq $4,%r8
+ andq $0xf,%rdx
+ movq %r9,%r10
+ shrq $4,%r9
+ xorq 8(%rsi,%rax,1),%r8
+ shlq $60,%r10
+ xorq (%rsi,%rax,1),%r9
+ andb $0xf0,%bl
+ xorq (%r11,%rdx,8),%r9
+ movq %r8,%rdx
+ xorq %r10,%r8
+
+ shrq $4,%r8
+ andq $0xf,%rdx
+ movq %r9,%r10
+ shrq $4,%r9
+ xorq 8(%rsi,%rbx,1),%r8
+ shlq $60,%r10
+ xorq (%rsi,%rbx,1),%r9
+ xorq %r10,%r8
+ xorq (%r11,%rdx,8),%r9
+
+ bswapq %r8
+ bswapq %r9
+ movq %r8,8(%rdi)
+ movq %r9,(%rdi)
+
+ leaq 280+48(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lgmult_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size gcm_gmult_4bit,.-gcm_gmult_4bit
+.globl gcm_ghash_4bit
+.type gcm_ghash_4bit,@function
+.align 16
+gcm_ghash_4bit:
+.cfi_startproc
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ subq $280,%rsp
+.cfi_adjust_cfa_offset 280
+.Lghash_prologue:
+ movq %rdx,%r14
+ movq %rcx,%r15
+ subq $-128,%rsi
+ leaq 16+128(%rsp),%rbp
+ xorl %edx,%edx
+ movq 0+0-128(%rsi),%r8
+ movq 0+8-128(%rsi),%rax
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq 16+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq 16+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,0(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,0(%rbp)
+ movq 32+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,0-128(%rbp)
+ movq 32+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,1(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,8(%rbp)
+ movq 48+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,8-128(%rbp)
+ movq 48+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,2(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,16(%rbp)
+ movq 64+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,16-128(%rbp)
+ movq 64+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,3(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,24(%rbp)
+ movq 80+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,24-128(%rbp)
+ movq 80+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,4(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,32(%rbp)
+ movq 96+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,32-128(%rbp)
+ movq 96+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,5(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,40(%rbp)
+ movq 112+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,40-128(%rbp)
+ movq 112+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,6(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,48(%rbp)
+ movq 128+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,48-128(%rbp)
+ movq 128+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,7(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,56(%rbp)
+ movq 144+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,56-128(%rbp)
+ movq 144+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,8(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,64(%rbp)
+ movq 160+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,64-128(%rbp)
+ movq 160+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,9(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,72(%rbp)
+ movq 176+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,72-128(%rbp)
+ movq 176+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,10(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,80(%rbp)
+ movq 192+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,80-128(%rbp)
+ movq 192+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,11(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,88(%rbp)
+ movq 208+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,88-128(%rbp)
+ movq 208+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,12(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,96(%rbp)
+ movq 224+0-128(%rsi),%r8
+ shlb $4,%dl
+ movq %rax,96-128(%rbp)
+ movq 224+8-128(%rsi),%rax
+ shlq $60,%r10
+ movb %dl,13(%rsp)
+ orq %r10,%rbx
+ movb %al,%dl
+ shrq $4,%rax
+ movq %r8,%r10
+ shrq $4,%r8
+ movq %r9,104(%rbp)
+ movq 240+0-128(%rsi),%r9
+ shlb $4,%dl
+ movq %rbx,104-128(%rbp)
+ movq 240+8-128(%rsi),%rbx
+ shlq $60,%r10
+ movb %dl,14(%rsp)
+ orq %r10,%rax
+ movb %bl,%dl
+ shrq $4,%rbx
+ movq %r9,%r10
+ shrq $4,%r9
+ movq %r8,112(%rbp)
+ shlb $4,%dl
+ movq %rax,112-128(%rbp)
+ shlq $60,%r10
+ movb %dl,15(%rsp)
+ orq %r10,%rbx
+ movq %r9,120(%rbp)
+ movq %rbx,120-128(%rbp)
+ addq $-128,%rsi
+ movq 8(%rdi),%r8
+ movq 0(%rdi),%r9
+ addq %r14,%r15
+ leaq .Lrem_8bit(%rip),%r11
+ jmp .Louter_loop
+.align 16
+.Louter_loop:
+ xorq (%r14),%r9
+ movq 8(%r14),%rdx
+ leaq 16(%r14),%r14
+ xorq %r8,%rdx
+ movq %r9,(%rdi)
+ movq %rdx,8(%rdi)
+ shrq $32,%rdx
+ xorq %rax,%rax
+ roll $8,%edx
+ movb %dl,%al
+ movzbl %dl,%ebx
+ shlb $4,%al
+ shrl $4,%ebx
+ roll $8,%edx
+ movq 8(%rsi,%rax,1),%r8
+ movq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ xorq %r8,%r12
+ movq %r9,%r10
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ movl 8(%rdi),%edx
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ movl 4(%rdi),%edx
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ movl 0(%rdi),%edx
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ shrl $4,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r12,2),%r12
+ movzbl %dl,%ebx
+ shlb $4,%al
+ movzbq (%rsp,%rcx,1),%r13
+ shrl $4,%ebx
+ shlq $48,%r12
+ xorq %r8,%r13
+ movq %r9,%r10
+ xorq %r12,%r9
+ shrq $8,%r8
+ movzbq %r13b,%r13
+ shrq $8,%r9
+ xorq -128(%rbp,%rcx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rcx,8),%r9
+ roll $8,%edx
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ movb %dl,%al
+ xorq %r10,%r8
+ movzwq (%r11,%r13,2),%r13
+ movzbl %dl,%ecx
+ shlb $4,%al
+ movzbq (%rsp,%rbx,1),%r12
+ andl $240,%ecx
+ shlq $48,%r13
+ xorq %r8,%r12
+ movq %r9,%r10
+ xorq %r13,%r9
+ shrq $8,%r8
+ movzbq %r12b,%r12
+ movl -4(%rdi),%edx
+ shrq $8,%r9
+ xorq -128(%rbp,%rbx,8),%r8
+ shlq $56,%r10
+ xorq (%rbp,%rbx,8),%r9
+ movzwq (%r11,%r12,2),%r12
+ xorq 8(%rsi,%rax,1),%r8
+ xorq (%rsi,%rax,1),%r9
+ shlq $48,%r12
+ xorq %r10,%r8
+ xorq %r12,%r9
+ movzbq %r8b,%r13
+ shrq $4,%r8
+ movq %r9,%r10
+ shlb $4,%r13b
+ shrq $4,%r9
+ xorq 8(%rsi,%rcx,1),%r8
+ movzwq (%r11,%r13,2),%r13
+ shlq $60,%r10
+ xorq (%rsi,%rcx,1),%r9
+ xorq %r10,%r8
+ shlq $48,%r13
+ bswapq %r8
+ xorq %r13,%r9
+ bswapq %r9
+ cmpq %r15,%r14
+ jb .Louter_loop
+ movq %r8,8(%rdi)
+ movq %r9,(%rdi)
+
+ leaq 280+48(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq 0(%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lghash_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size gcm_ghash_4bit,.-gcm_ghash_4bit
+.globl gcm_init_clmul
+.type gcm_init_clmul,@function
+.align 16
+gcm_init_clmul:
+.cfi_startproc
+.L_init_clmul:
+ movdqu (%rsi),%xmm2
+ pshufd $78,%xmm2,%xmm2
+
+
+ pshufd $255,%xmm2,%xmm4
+ movdqa %xmm2,%xmm3
+ psllq $1,%xmm2
+ pxor %xmm5,%xmm5
+ psrlq $63,%xmm3
+ pcmpgtd %xmm4,%xmm5
+ pslldq $8,%xmm3
+ por %xmm3,%xmm2
+
+
+ pand .L0x1c2_polynomial(%rip),%xmm5
+ pxor %xmm5,%xmm2
+
+
+ pshufd $78,%xmm2,%xmm6
+ movdqa %xmm2,%xmm0
+ pxor %xmm2,%xmm6
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm3
+ pxor %xmm0,%xmm3
+.byte 102,15,58,68,194,0
+.byte 102,15,58,68,202,17
+.byte 102,15,58,68,222,0
+ pxor %xmm0,%xmm3
+ pxor %xmm1,%xmm3
+
+ movdqa %xmm3,%xmm4
+ psrldq $8,%xmm3
+ pslldq $8,%xmm4
+ pxor %xmm3,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ pshufd $78,%xmm2,%xmm3
+ pshufd $78,%xmm0,%xmm4
+ pxor %xmm2,%xmm3
+ movdqu %xmm2,0(%rdi)
+ pxor %xmm0,%xmm4
+ movdqu %xmm0,16(%rdi)
+.byte 102,15,58,15,227,8
+ movdqu %xmm4,32(%rdi)
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm3
+ pxor %xmm0,%xmm3
+.byte 102,15,58,68,194,0
+.byte 102,15,58,68,202,17
+.byte 102,15,58,68,222,0
+ pxor %xmm0,%xmm3
+ pxor %xmm1,%xmm3
+
+ movdqa %xmm3,%xmm4
+ psrldq $8,%xmm3
+ pslldq $8,%xmm4
+ pxor %xmm3,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ movdqa %xmm0,%xmm5
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm3
+ pxor %xmm0,%xmm3
+.byte 102,15,58,68,194,0
+.byte 102,15,58,68,202,17
+.byte 102,15,58,68,222,0
+ pxor %xmm0,%xmm3
+ pxor %xmm1,%xmm3
+
+ movdqa %xmm3,%xmm4
+ psrldq $8,%xmm3
+ pslldq $8,%xmm4
+ pxor %xmm3,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ pshufd $78,%xmm5,%xmm3
+ pshufd $78,%xmm0,%xmm4
+ pxor %xmm5,%xmm3
+ movdqu %xmm5,48(%rdi)
+ pxor %xmm0,%xmm4
+ movdqu %xmm0,64(%rdi)
+.byte 102,15,58,15,227,8
+ movdqu %xmm4,80(%rdi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size gcm_init_clmul,.-gcm_init_clmul
+.globl gcm_gmult_clmul
+.type gcm_gmult_clmul,@function
+.align 16
+gcm_gmult_clmul:
+.cfi_startproc
+.L_gmult_clmul:
+ movdqu (%rdi),%xmm0
+ movdqa .Lbswap_mask(%rip),%xmm5
+ movdqu (%rsi),%xmm2
+ movdqu 32(%rsi),%xmm4
+.byte 102,15,56,0,197
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm3
+ pxor %xmm0,%xmm3
+.byte 102,15,58,68,194,0
+.byte 102,15,58,68,202,17
+.byte 102,15,58,68,220,0
+ pxor %xmm0,%xmm3
+ pxor %xmm1,%xmm3
+
+ movdqa %xmm3,%xmm4
+ psrldq $8,%xmm3
+ pslldq $8,%xmm4
+ pxor %xmm3,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+.byte 102,15,56,0,197
+ movdqu %xmm0,(%rdi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size gcm_gmult_clmul,.-gcm_gmult_clmul
+.globl gcm_ghash_clmul
+.type gcm_ghash_clmul,@function
+.align 32
+gcm_ghash_clmul:
+.cfi_startproc
+.L_ghash_clmul:
+ movdqa .Lbswap_mask(%rip),%xmm10
+
+ movdqu (%rdi),%xmm0
+ movdqu (%rsi),%xmm2
+ movdqu 32(%rsi),%xmm7
+.byte 102,65,15,56,0,194
+
+ subq $0x10,%rcx
+ jz .Lodd_tail
+
+ movdqu 16(%rsi),%xmm6
+ movl OPENSSL_ia32cap_P+4(%rip),%eax
+ cmpq $0x30,%rcx
+ jb .Lskip4x
+
+ andl $71303168,%eax
+ cmpl $4194304,%eax
+ je .Lskip4x
+
+ subq $0x30,%rcx
+ movq $0xA040608020C0E000,%rax
+ movdqu 48(%rsi),%xmm14
+ movdqu 64(%rsi),%xmm15
+
+
+
+
+ movdqu 48(%rdx),%xmm3
+ movdqu 32(%rdx),%xmm11
+.byte 102,65,15,56,0,218
+.byte 102,69,15,56,0,218
+ movdqa %xmm3,%xmm5
+ pshufd $78,%xmm3,%xmm4
+ pxor %xmm3,%xmm4
+.byte 102,15,58,68,218,0
+.byte 102,15,58,68,234,17
+.byte 102,15,58,68,231,0
+
+ movdqa %xmm11,%xmm13
+ pshufd $78,%xmm11,%xmm12
+ pxor %xmm11,%xmm12
+.byte 102,68,15,58,68,222,0
+.byte 102,68,15,58,68,238,17
+.byte 102,68,15,58,68,231,16
+ xorps %xmm11,%xmm3
+ xorps %xmm13,%xmm5
+ movups 80(%rsi),%xmm7
+ xorps %xmm12,%xmm4
+
+ movdqu 16(%rdx),%xmm11
+ movdqu 0(%rdx),%xmm8
+.byte 102,69,15,56,0,218
+.byte 102,69,15,56,0,194
+ movdqa %xmm11,%xmm13
+ pshufd $78,%xmm11,%xmm12
+ pxor %xmm8,%xmm0
+ pxor %xmm11,%xmm12
+.byte 102,69,15,58,68,222,0
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm8
+ pxor %xmm0,%xmm8
+.byte 102,69,15,58,68,238,17
+.byte 102,68,15,58,68,231,0
+ xorps %xmm11,%xmm3
+ xorps %xmm13,%xmm5
+
+ leaq 64(%rdx),%rdx
+ subq $0x40,%rcx
+ jc .Ltail4x
+
+ jmp .Lmod4_loop
+.align 32
+.Lmod4_loop:
+.byte 102,65,15,58,68,199,0
+ xorps %xmm12,%xmm4
+ movdqu 48(%rdx),%xmm11
+.byte 102,69,15,56,0,218
+.byte 102,65,15,58,68,207,17
+ xorps %xmm3,%xmm0
+ movdqu 32(%rdx),%xmm3
+ movdqa %xmm11,%xmm13
+.byte 102,68,15,58,68,199,16
+ pshufd $78,%xmm11,%xmm12
+ xorps %xmm5,%xmm1
+ pxor %xmm11,%xmm12
+.byte 102,65,15,56,0,218
+ movups 32(%rsi),%xmm7
+ xorps %xmm4,%xmm8
+.byte 102,68,15,58,68,218,0
+ pshufd $78,%xmm3,%xmm4
+
+ pxor %xmm0,%xmm8
+ movdqa %xmm3,%xmm5
+ pxor %xmm1,%xmm8
+ pxor %xmm3,%xmm4
+ movdqa %xmm8,%xmm9
+.byte 102,68,15,58,68,234,17
+ pslldq $8,%xmm8
+ psrldq $8,%xmm9
+ pxor %xmm8,%xmm0
+ movdqa .L7_mask(%rip),%xmm8
+ pxor %xmm9,%xmm1
+.byte 102,76,15,110,200
+
+ pand %xmm0,%xmm8
+.byte 102,69,15,56,0,200
+ pxor %xmm0,%xmm9
+.byte 102,68,15,58,68,231,0
+ psllq $57,%xmm9
+ movdqa %xmm9,%xmm8
+ pslldq $8,%xmm9
+.byte 102,15,58,68,222,0
+ psrldq $8,%xmm8
+ pxor %xmm9,%xmm0
+ pxor %xmm8,%xmm1
+ movdqu 0(%rdx),%xmm8
+
+ movdqa %xmm0,%xmm9
+ psrlq $1,%xmm0
+.byte 102,15,58,68,238,17
+ xorps %xmm11,%xmm3
+ movdqu 16(%rdx),%xmm11
+.byte 102,69,15,56,0,218
+.byte 102,15,58,68,231,16
+ xorps %xmm13,%xmm5
+ movups 80(%rsi),%xmm7
+.byte 102,69,15,56,0,194
+ pxor %xmm9,%xmm1
+ pxor %xmm0,%xmm9
+ psrlq $5,%xmm0
+
+ movdqa %xmm11,%xmm13
+ pxor %xmm12,%xmm4
+ pshufd $78,%xmm11,%xmm12
+ pxor %xmm9,%xmm0
+ pxor %xmm8,%xmm1
+ pxor %xmm11,%xmm12
+.byte 102,69,15,58,68,222,0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ movdqa %xmm0,%xmm1
+.byte 102,69,15,58,68,238,17
+ xorps %xmm11,%xmm3
+ pshufd $78,%xmm0,%xmm8
+ pxor %xmm0,%xmm8
+
+.byte 102,68,15,58,68,231,0
+ xorps %xmm13,%xmm5
+
+ leaq 64(%rdx),%rdx
+ subq $0x40,%rcx
+ jnc .Lmod4_loop
+
+.Ltail4x:
+.byte 102,65,15,58,68,199,0
+.byte 102,65,15,58,68,207,17
+.byte 102,68,15,58,68,199,16
+ xorps %xmm12,%xmm4
+ xorps %xmm3,%xmm0
+ xorps %xmm5,%xmm1
+ pxor %xmm0,%xmm1
+ pxor %xmm4,%xmm8
+
+ pxor %xmm1,%xmm8
+ pxor %xmm0,%xmm1
+
+ movdqa %xmm8,%xmm9
+ psrldq $8,%xmm8
+ pslldq $8,%xmm9
+ pxor %xmm8,%xmm1
+ pxor %xmm9,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ addq $0x40,%rcx
+ jz .Ldone
+ movdqu 32(%rsi),%xmm7
+ subq $0x10,%rcx
+ jz .Lodd_tail
+.Lskip4x:
+
+
+
+
+
+ movdqu (%rdx),%xmm8
+ movdqu 16(%rdx),%xmm3
+.byte 102,69,15,56,0,194
+.byte 102,65,15,56,0,218
+ pxor %xmm8,%xmm0
+
+ movdqa %xmm3,%xmm5
+ pshufd $78,%xmm3,%xmm4
+ pxor %xmm3,%xmm4
+.byte 102,15,58,68,218,0
+.byte 102,15,58,68,234,17
+.byte 102,15,58,68,231,0
+
+ leaq 32(%rdx),%rdx
+ nop
+ subq $0x20,%rcx
+ jbe .Leven_tail
+ nop
+ jmp .Lmod_loop
+
+.align 32
+.Lmod_loop:
+ movdqa %xmm0,%xmm1
+ movdqa %xmm4,%xmm8
+ pshufd $78,%xmm0,%xmm4
+ pxor %xmm0,%xmm4
+
+.byte 102,15,58,68,198,0
+.byte 102,15,58,68,206,17
+.byte 102,15,58,68,231,16
+
+ pxor %xmm3,%xmm0
+ pxor %xmm5,%xmm1
+ movdqu (%rdx),%xmm9
+ pxor %xmm0,%xmm8
+.byte 102,69,15,56,0,202
+ movdqu 16(%rdx),%xmm3
+
+ pxor %xmm1,%xmm8
+ pxor %xmm9,%xmm1
+ pxor %xmm8,%xmm4
+.byte 102,65,15,56,0,218
+ movdqa %xmm4,%xmm8
+ psrldq $8,%xmm8
+ pslldq $8,%xmm4
+ pxor %xmm8,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm3,%xmm5
+
+ movdqa %xmm0,%xmm9
+ movdqa %xmm0,%xmm8
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm8
+.byte 102,15,58,68,218,0
+ psllq $1,%xmm0
+ pxor %xmm8,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm8
+ pslldq $8,%xmm0
+ psrldq $8,%xmm8
+ pxor %xmm9,%xmm0
+ pshufd $78,%xmm5,%xmm4
+ pxor %xmm8,%xmm1
+ pxor %xmm5,%xmm4
+
+ movdqa %xmm0,%xmm9
+ psrlq $1,%xmm0
+.byte 102,15,58,68,234,17
+ pxor %xmm9,%xmm1
+ pxor %xmm0,%xmm9
+ psrlq $5,%xmm0
+ pxor %xmm9,%xmm0
+ leaq 32(%rdx),%rdx
+ psrlq $1,%xmm0
+.byte 102,15,58,68,231,0
+ pxor %xmm1,%xmm0
+
+ subq $0x20,%rcx
+ ja .Lmod_loop
+
+.Leven_tail:
+ movdqa %xmm0,%xmm1
+ movdqa %xmm4,%xmm8
+ pshufd $78,%xmm0,%xmm4
+ pxor %xmm0,%xmm4
+
+.byte 102,15,58,68,198,0
+.byte 102,15,58,68,206,17
+.byte 102,15,58,68,231,16
+
+ pxor %xmm3,%xmm0
+ pxor %xmm5,%xmm1
+ pxor %xmm0,%xmm8
+ pxor %xmm1,%xmm8
+ pxor %xmm8,%xmm4
+ movdqa %xmm4,%xmm8
+ psrldq $8,%xmm8
+ pslldq $8,%xmm4
+ pxor %xmm8,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+ testq %rcx,%rcx
+ jnz .Ldone
+
+.Lodd_tail:
+ movdqu (%rdx),%xmm8
+.byte 102,69,15,56,0,194
+ pxor %xmm8,%xmm0
+ movdqa %xmm0,%xmm1
+ pshufd $78,%xmm0,%xmm3
+ pxor %xmm0,%xmm3
+.byte 102,15,58,68,194,0
+.byte 102,15,58,68,202,17
+.byte 102,15,58,68,223,0
+ pxor %xmm0,%xmm3
+ pxor %xmm1,%xmm3
+
+ movdqa %xmm3,%xmm4
+ psrldq $8,%xmm3
+ pslldq $8,%xmm4
+ pxor %xmm3,%xmm1
+ pxor %xmm4,%xmm0
+
+ movdqa %xmm0,%xmm4
+ movdqa %xmm0,%xmm3
+ psllq $5,%xmm0
+ pxor %xmm0,%xmm3
+ psllq $1,%xmm0
+ pxor %xmm3,%xmm0
+ psllq $57,%xmm0
+ movdqa %xmm0,%xmm3
+ pslldq $8,%xmm0
+ psrldq $8,%xmm3
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm1
+
+
+ movdqa %xmm0,%xmm4
+ psrlq $1,%xmm0
+ pxor %xmm4,%xmm1
+ pxor %xmm0,%xmm4
+ psrlq $5,%xmm0
+ pxor %xmm4,%xmm0
+ psrlq $1,%xmm0
+ pxor %xmm1,%xmm0
+.Ldone:
+.byte 102,65,15,56,0,194
+ movdqu %xmm0,(%rdi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size gcm_ghash_clmul,.-gcm_ghash_clmul
+.globl gcm_init_avx
+.type gcm_init_avx,@function
+.align 32
+gcm_init_avx:
+.cfi_startproc
+ jmp .L_init_clmul
+.cfi_endproc
+.size gcm_init_avx,.-gcm_init_avx
+.globl gcm_gmult_avx
+.type gcm_gmult_avx,@function
+.align 32
+gcm_gmult_avx:
+.cfi_startproc
+ jmp .L_gmult_clmul
+.cfi_endproc
+.size gcm_gmult_avx,.-gcm_gmult_avx
+.globl gcm_ghash_avx
+.type gcm_ghash_avx,@function
+.align 32
+gcm_ghash_avx:
+.cfi_startproc
+ jmp .L_ghash_clmul
+.cfi_endproc
+.size gcm_ghash_avx,.-gcm_ghash_avx
+.align 64
+.Lbswap_mask:
+.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
+.L0x1c2_polynomial:
+.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
+.L7_mask:
+.long 7,0,7,0
+.L7_mask_poly:
+.long 7,0,450,0
+.align 64
+.type .Lrem_4bit,@object
+.Lrem_4bit:
+.long 0,0,0,471859200,0,943718400,0,610271232
+.long 0,1887436800,0,1822425088,0,1220542464,0,1423966208
+.long 0,3774873600,0,4246732800,0,3644850176,0,3311403008
+.long 0,2441084928,0,2376073216,0,2847932416,0,3051356160
+.type .Lrem_8bit,@object
+.Lrem_8bit:
+.value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
+.value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
+.value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
+.value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
+.value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
+.value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
+.value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
+.value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
+.value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
+.value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
+.value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
+.value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
+.value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
+.value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
+.value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
+.value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
+.value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
+.value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
+.value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
+.value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
+.value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
+.value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
+.value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
+.value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
+.value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
+.value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
+.value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
+.value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
+.value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
+.value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
+.value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
+.value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
+
+.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-mb-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-mb-x86_64.S
new file mode 100644
index 0000000000..4572bc7227
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-mb-x86_64.S
@@ -0,0 +1,2962 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/sha/asm/sha1-mb-x86_64.pl
+#
+# Copyright 2013-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+
+.globl sha1_multi_block
+.type sha1_multi_block,@function
+.align 32
+sha1_multi_block:
+.cfi_startproc
+ movq OPENSSL_ia32cap_P+4(%rip),%rcx
+ btq $61,%rcx
+ jc _shaext_shortcut
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbx,-24
+ subq $288,%rsp
+ andq $-256,%rsp
+ movq %rax,272(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0x90,0x02,0x06,0x23,0x08
+.Lbody:
+ leaq K_XX_XX(%rip),%rbp
+ leaq 256(%rsp),%rbx
+
+.Loop_grande:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rbp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rbp,%r9
+ movq 32(%rsi),%r10
+ movl 40(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,8(%rbx)
+ cmovleq %rbp,%r10
+ movq 48(%rsi),%r11
+ movl 56(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,12(%rbx)
+ cmovleq %rbp,%r11
+ testl %edx,%edx
+ jz .Ldone
+
+ movdqu 0(%rdi),%xmm10
+ leaq 128(%rsp),%rax
+ movdqu 32(%rdi),%xmm11
+ movdqu 64(%rdi),%xmm12
+ movdqu 96(%rdi),%xmm13
+ movdqu 128(%rdi),%xmm14
+ movdqa 96(%rbp),%xmm5
+ movdqa -32(%rbp),%xmm15
+ jmp .Loop
+
+.align 32
+.Loop:
+ movd (%r8),%xmm0
+ leaq 64(%r8),%r8
+ movd (%r9),%xmm2
+ leaq 64(%r9),%r9
+ movd (%r10),%xmm3
+ leaq 64(%r10),%r10
+ movd (%r11),%xmm4
+ leaq 64(%r11),%r11
+ punpckldq %xmm3,%xmm0
+ movd -60(%r8),%xmm1
+ punpckldq %xmm4,%xmm2
+ movd -60(%r9),%xmm9
+ punpckldq %xmm2,%xmm0
+ movd -60(%r10),%xmm8
+.byte 102,15,56,0,197
+ movd -60(%r11),%xmm7
+ punpckldq %xmm8,%xmm1
+ movdqa %xmm10,%xmm8
+ paddd %xmm15,%xmm14
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm13,%xmm7
+ pand %xmm12,%xmm6
+ punpckldq %xmm9,%xmm1
+ movdqa %xmm10,%xmm9
+
+ movdqa %xmm0,0-128(%rax)
+ paddd %xmm0,%xmm14
+ movd -56(%r8),%xmm2
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm11,%xmm7
+
+ por %xmm9,%xmm8
+ movd -56(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+.byte 102,15,56,0,205
+ movd -56(%r10),%xmm8
+ por %xmm7,%xmm11
+ movd -56(%r11),%xmm7
+ punpckldq %xmm8,%xmm2
+ movdqa %xmm14,%xmm8
+ paddd %xmm15,%xmm13
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm12,%xmm7
+ pand %xmm11,%xmm6
+ punpckldq %xmm9,%xmm2
+ movdqa %xmm14,%xmm9
+
+ movdqa %xmm1,16-128(%rax)
+ paddd %xmm1,%xmm13
+ movd -52(%r8),%xmm3
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm10,%xmm7
+
+ por %xmm9,%xmm8
+ movd -52(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+.byte 102,15,56,0,213
+ movd -52(%r10),%xmm8
+ por %xmm7,%xmm10
+ movd -52(%r11),%xmm7
+ punpckldq %xmm8,%xmm3
+ movdqa %xmm13,%xmm8
+ paddd %xmm15,%xmm12
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm11,%xmm7
+ pand %xmm10,%xmm6
+ punpckldq %xmm9,%xmm3
+ movdqa %xmm13,%xmm9
+
+ movdqa %xmm2,32-128(%rax)
+ paddd %xmm2,%xmm12
+ movd -48(%r8),%xmm4
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm14,%xmm7
+
+ por %xmm9,%xmm8
+ movd -48(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+.byte 102,15,56,0,221
+ movd -48(%r10),%xmm8
+ por %xmm7,%xmm14
+ movd -48(%r11),%xmm7
+ punpckldq %xmm8,%xmm4
+ movdqa %xmm12,%xmm8
+ paddd %xmm15,%xmm11
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm10,%xmm7
+ pand %xmm14,%xmm6
+ punpckldq %xmm9,%xmm4
+ movdqa %xmm12,%xmm9
+
+ movdqa %xmm3,48-128(%rax)
+ paddd %xmm3,%xmm11
+ movd -44(%r8),%xmm0
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm13,%xmm7
+
+ por %xmm9,%xmm8
+ movd -44(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+.byte 102,15,56,0,229
+ movd -44(%r10),%xmm8
+ por %xmm7,%xmm13
+ movd -44(%r11),%xmm7
+ punpckldq %xmm8,%xmm0
+ movdqa %xmm11,%xmm8
+ paddd %xmm15,%xmm10
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm14,%xmm7
+ pand %xmm13,%xmm6
+ punpckldq %xmm9,%xmm0
+ movdqa %xmm11,%xmm9
+
+ movdqa %xmm4,64-128(%rax)
+ paddd %xmm4,%xmm10
+ movd -40(%r8),%xmm1
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm12,%xmm7
+
+ por %xmm9,%xmm8
+ movd -40(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+.byte 102,15,56,0,197
+ movd -40(%r10),%xmm8
+ por %xmm7,%xmm12
+ movd -40(%r11),%xmm7
+ punpckldq %xmm8,%xmm1
+ movdqa %xmm10,%xmm8
+ paddd %xmm15,%xmm14
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm13,%xmm7
+ pand %xmm12,%xmm6
+ punpckldq %xmm9,%xmm1
+ movdqa %xmm10,%xmm9
+
+ movdqa %xmm0,80-128(%rax)
+ paddd %xmm0,%xmm14
+ movd -36(%r8),%xmm2
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm11,%xmm7
+
+ por %xmm9,%xmm8
+ movd -36(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+.byte 102,15,56,0,205
+ movd -36(%r10),%xmm8
+ por %xmm7,%xmm11
+ movd -36(%r11),%xmm7
+ punpckldq %xmm8,%xmm2
+ movdqa %xmm14,%xmm8
+ paddd %xmm15,%xmm13
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm12,%xmm7
+ pand %xmm11,%xmm6
+ punpckldq %xmm9,%xmm2
+ movdqa %xmm14,%xmm9
+
+ movdqa %xmm1,96-128(%rax)
+ paddd %xmm1,%xmm13
+ movd -32(%r8),%xmm3
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm10,%xmm7
+
+ por %xmm9,%xmm8
+ movd -32(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+.byte 102,15,56,0,213
+ movd -32(%r10),%xmm8
+ por %xmm7,%xmm10
+ movd -32(%r11),%xmm7
+ punpckldq %xmm8,%xmm3
+ movdqa %xmm13,%xmm8
+ paddd %xmm15,%xmm12
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm11,%xmm7
+ pand %xmm10,%xmm6
+ punpckldq %xmm9,%xmm3
+ movdqa %xmm13,%xmm9
+
+ movdqa %xmm2,112-128(%rax)
+ paddd %xmm2,%xmm12
+ movd -28(%r8),%xmm4
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm14,%xmm7
+
+ por %xmm9,%xmm8
+ movd -28(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+.byte 102,15,56,0,221
+ movd -28(%r10),%xmm8
+ por %xmm7,%xmm14
+ movd -28(%r11),%xmm7
+ punpckldq %xmm8,%xmm4
+ movdqa %xmm12,%xmm8
+ paddd %xmm15,%xmm11
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm10,%xmm7
+ pand %xmm14,%xmm6
+ punpckldq %xmm9,%xmm4
+ movdqa %xmm12,%xmm9
+
+ movdqa %xmm3,128-128(%rax)
+ paddd %xmm3,%xmm11
+ movd -24(%r8),%xmm0
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm13,%xmm7
+
+ por %xmm9,%xmm8
+ movd -24(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+.byte 102,15,56,0,229
+ movd -24(%r10),%xmm8
+ por %xmm7,%xmm13
+ movd -24(%r11),%xmm7
+ punpckldq %xmm8,%xmm0
+ movdqa %xmm11,%xmm8
+ paddd %xmm15,%xmm10
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm14,%xmm7
+ pand %xmm13,%xmm6
+ punpckldq %xmm9,%xmm0
+ movdqa %xmm11,%xmm9
+
+ movdqa %xmm4,144-128(%rax)
+ paddd %xmm4,%xmm10
+ movd -20(%r8),%xmm1
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm12,%xmm7
+
+ por %xmm9,%xmm8
+ movd -20(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+.byte 102,15,56,0,197
+ movd -20(%r10),%xmm8
+ por %xmm7,%xmm12
+ movd -20(%r11),%xmm7
+ punpckldq %xmm8,%xmm1
+ movdqa %xmm10,%xmm8
+ paddd %xmm15,%xmm14
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm13,%xmm7
+ pand %xmm12,%xmm6
+ punpckldq %xmm9,%xmm1
+ movdqa %xmm10,%xmm9
+
+ movdqa %xmm0,160-128(%rax)
+ paddd %xmm0,%xmm14
+ movd -16(%r8),%xmm2
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm11,%xmm7
+
+ por %xmm9,%xmm8
+ movd -16(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+.byte 102,15,56,0,205
+ movd -16(%r10),%xmm8
+ por %xmm7,%xmm11
+ movd -16(%r11),%xmm7
+ punpckldq %xmm8,%xmm2
+ movdqa %xmm14,%xmm8
+ paddd %xmm15,%xmm13
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm12,%xmm7
+ pand %xmm11,%xmm6
+ punpckldq %xmm9,%xmm2
+ movdqa %xmm14,%xmm9
+
+ movdqa %xmm1,176-128(%rax)
+ paddd %xmm1,%xmm13
+ movd -12(%r8),%xmm3
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm10,%xmm7
+
+ por %xmm9,%xmm8
+ movd -12(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+.byte 102,15,56,0,213
+ movd -12(%r10),%xmm8
+ por %xmm7,%xmm10
+ movd -12(%r11),%xmm7
+ punpckldq %xmm8,%xmm3
+ movdqa %xmm13,%xmm8
+ paddd %xmm15,%xmm12
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm11,%xmm7
+ pand %xmm10,%xmm6
+ punpckldq %xmm9,%xmm3
+ movdqa %xmm13,%xmm9
+
+ movdqa %xmm2,192-128(%rax)
+ paddd %xmm2,%xmm12
+ movd -8(%r8),%xmm4
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm14,%xmm7
+
+ por %xmm9,%xmm8
+ movd -8(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+.byte 102,15,56,0,221
+ movd -8(%r10),%xmm8
+ por %xmm7,%xmm14
+ movd -8(%r11),%xmm7
+ punpckldq %xmm8,%xmm4
+ movdqa %xmm12,%xmm8
+ paddd %xmm15,%xmm11
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm6
+ pslld $5,%xmm8
+ pandn %xmm10,%xmm7
+ pand %xmm14,%xmm6
+ punpckldq %xmm9,%xmm4
+ movdqa %xmm12,%xmm9
+
+ movdqa %xmm3,208-128(%rax)
+ paddd %xmm3,%xmm11
+ movd -4(%r8),%xmm0
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm13,%xmm7
+
+ por %xmm9,%xmm8
+ movd -4(%r9),%xmm9
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+.byte 102,15,56,0,229
+ movd -4(%r10),%xmm8
+ por %xmm7,%xmm13
+ movdqa 0-128(%rax),%xmm1
+ movd -4(%r11),%xmm7
+ punpckldq %xmm8,%xmm0
+ movdqa %xmm11,%xmm8
+ paddd %xmm15,%xmm10
+ punpckldq %xmm7,%xmm9
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm6
+ pslld $5,%xmm8
+ prefetcht0 63(%r8)
+ pandn %xmm14,%xmm7
+ pand %xmm13,%xmm6
+ punpckldq %xmm9,%xmm0
+ movdqa %xmm11,%xmm9
+
+ movdqa %xmm4,224-128(%rax)
+ paddd %xmm4,%xmm10
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+ movdqa %xmm12,%xmm7
+ prefetcht0 63(%r9)
+
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm10
+ prefetcht0 63(%r10)
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+.byte 102,15,56,0,197
+ prefetcht0 63(%r11)
+ por %xmm7,%xmm12
+ movdqa 16-128(%rax),%xmm2
+ pxor %xmm3,%xmm1
+ movdqa 32-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ pxor 128-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ movdqa %xmm11,%xmm7
+ pslld $5,%xmm8
+ pxor %xmm3,%xmm1
+ movdqa %xmm11,%xmm6
+ pandn %xmm13,%xmm7
+ movdqa %xmm1,%xmm5
+ pand %xmm12,%xmm6
+ movdqa %xmm10,%xmm9
+ psrld $31,%xmm5
+ paddd %xmm1,%xmm1
+
+ movdqa %xmm0,240-128(%rax)
+ paddd %xmm0,%xmm14
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+
+ movdqa %xmm11,%xmm7
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 48-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ pxor 144-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ movdqa %xmm10,%xmm7
+ pslld $5,%xmm8
+ pxor %xmm4,%xmm2
+ movdqa %xmm10,%xmm6
+ pandn %xmm12,%xmm7
+ movdqa %xmm2,%xmm5
+ pand %xmm11,%xmm6
+ movdqa %xmm14,%xmm9
+ psrld $31,%xmm5
+ paddd %xmm2,%xmm2
+
+ movdqa %xmm1,0-128(%rax)
+ paddd %xmm1,%xmm13
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+
+ movdqa %xmm10,%xmm7
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 64-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ pxor 160-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ movdqa %xmm14,%xmm7
+ pslld $5,%xmm8
+ pxor %xmm0,%xmm3
+ movdqa %xmm14,%xmm6
+ pandn %xmm11,%xmm7
+ movdqa %xmm3,%xmm5
+ pand %xmm10,%xmm6
+ movdqa %xmm13,%xmm9
+ psrld $31,%xmm5
+ paddd %xmm3,%xmm3
+
+ movdqa %xmm2,16-128(%rax)
+ paddd %xmm2,%xmm12
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+
+ movdqa %xmm14,%xmm7
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 80-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ pxor 176-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ movdqa %xmm13,%xmm7
+ pslld $5,%xmm8
+ pxor %xmm1,%xmm4
+ movdqa %xmm13,%xmm6
+ pandn %xmm10,%xmm7
+ movdqa %xmm4,%xmm5
+ pand %xmm14,%xmm6
+ movdqa %xmm12,%xmm9
+ psrld $31,%xmm5
+ paddd %xmm4,%xmm4
+
+ movdqa %xmm3,32-128(%rax)
+ paddd %xmm3,%xmm11
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+
+ movdqa %xmm13,%xmm7
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 96-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ pxor 192-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ movdqa %xmm12,%xmm7
+ pslld $5,%xmm8
+ pxor %xmm2,%xmm0
+ movdqa %xmm12,%xmm6
+ pandn %xmm14,%xmm7
+ movdqa %xmm0,%xmm5
+ pand %xmm13,%xmm6
+ movdqa %xmm11,%xmm9
+ psrld $31,%xmm5
+ paddd %xmm0,%xmm0
+
+ movdqa %xmm4,48-128(%rax)
+ paddd %xmm4,%xmm10
+ psrld $27,%xmm9
+ pxor %xmm7,%xmm6
+
+ movdqa %xmm12,%xmm7
+ por %xmm9,%xmm8
+ pslld $30,%xmm7
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ movdqa 0(%rbp),%xmm15
+ pxor %xmm3,%xmm1
+ movdqa 112-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 208-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,64-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 128-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 224-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,80-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 144-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 240-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,96-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 160-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 0-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,112-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 176-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 16-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,128-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 192-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 32-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,144-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 208-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 48-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,160-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 224-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 64-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,176-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 240-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 80-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,192-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 0-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 96-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,208-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 16-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 112-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,224-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 32-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 128-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,240-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 48-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 144-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,0-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 64-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 160-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,16-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 80-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 176-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,32-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 96-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 192-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,48-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 112-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 208-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,64-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 128-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 224-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,80-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 144-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 240-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,96-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 160-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 0-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,112-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ movdqa 32(%rbp),%xmm15
+ pxor %xmm3,%xmm1
+ movdqa 176-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm7
+ pxor 16-128(%rax),%xmm1
+ pxor %xmm3,%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ movdqa %xmm10,%xmm9
+ pand %xmm12,%xmm7
+
+ movdqa %xmm13,%xmm6
+ movdqa %xmm1,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm14
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm0,128-128(%rax)
+ paddd %xmm0,%xmm14
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm11,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm1,%xmm1
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 192-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm7
+ pxor 32-128(%rax),%xmm2
+ pxor %xmm4,%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ movdqa %xmm14,%xmm9
+ pand %xmm11,%xmm7
+
+ movdqa %xmm12,%xmm6
+ movdqa %xmm2,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm13
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm1,144-128(%rax)
+ paddd %xmm1,%xmm13
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm10,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm2,%xmm2
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 208-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm7
+ pxor 48-128(%rax),%xmm3
+ pxor %xmm0,%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ movdqa %xmm13,%xmm9
+ pand %xmm10,%xmm7
+
+ movdqa %xmm11,%xmm6
+ movdqa %xmm3,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm12
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm2,160-128(%rax)
+ paddd %xmm2,%xmm12
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm14,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm3,%xmm3
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 224-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm7
+ pxor 64-128(%rax),%xmm4
+ pxor %xmm1,%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ movdqa %xmm12,%xmm9
+ pand %xmm14,%xmm7
+
+ movdqa %xmm10,%xmm6
+ movdqa %xmm4,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm11
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm3,176-128(%rax)
+ paddd %xmm3,%xmm11
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm13,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm4,%xmm4
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 240-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm7
+ pxor 80-128(%rax),%xmm0
+ pxor %xmm2,%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ movdqa %xmm11,%xmm9
+ pand %xmm13,%xmm7
+
+ movdqa %xmm14,%xmm6
+ movdqa %xmm0,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm10
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm4,192-128(%rax)
+ paddd %xmm4,%xmm10
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm12,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm0,%xmm0
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 0-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm7
+ pxor 96-128(%rax),%xmm1
+ pxor %xmm3,%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ movdqa %xmm10,%xmm9
+ pand %xmm12,%xmm7
+
+ movdqa %xmm13,%xmm6
+ movdqa %xmm1,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm14
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm0,208-128(%rax)
+ paddd %xmm0,%xmm14
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm11,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm1,%xmm1
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 16-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm7
+ pxor 112-128(%rax),%xmm2
+ pxor %xmm4,%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ movdqa %xmm14,%xmm9
+ pand %xmm11,%xmm7
+
+ movdqa %xmm12,%xmm6
+ movdqa %xmm2,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm13
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm1,224-128(%rax)
+ paddd %xmm1,%xmm13
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm10,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm2,%xmm2
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 32-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm7
+ pxor 128-128(%rax),%xmm3
+ pxor %xmm0,%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ movdqa %xmm13,%xmm9
+ pand %xmm10,%xmm7
+
+ movdqa %xmm11,%xmm6
+ movdqa %xmm3,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm12
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm2,240-128(%rax)
+ paddd %xmm2,%xmm12
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm14,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm3,%xmm3
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 48-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm7
+ pxor 144-128(%rax),%xmm4
+ pxor %xmm1,%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ movdqa %xmm12,%xmm9
+ pand %xmm14,%xmm7
+
+ movdqa %xmm10,%xmm6
+ movdqa %xmm4,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm11
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm3,0-128(%rax)
+ paddd %xmm3,%xmm11
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm13,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm4,%xmm4
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 64-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm7
+ pxor 160-128(%rax),%xmm0
+ pxor %xmm2,%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ movdqa %xmm11,%xmm9
+ pand %xmm13,%xmm7
+
+ movdqa %xmm14,%xmm6
+ movdqa %xmm0,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm10
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm4,16-128(%rax)
+ paddd %xmm4,%xmm10
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm12,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm0,%xmm0
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 80-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm7
+ pxor 176-128(%rax),%xmm1
+ pxor %xmm3,%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ movdqa %xmm10,%xmm9
+ pand %xmm12,%xmm7
+
+ movdqa %xmm13,%xmm6
+ movdqa %xmm1,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm14
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm0,32-128(%rax)
+ paddd %xmm0,%xmm14
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm11,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm1,%xmm1
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 96-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm7
+ pxor 192-128(%rax),%xmm2
+ pxor %xmm4,%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ movdqa %xmm14,%xmm9
+ pand %xmm11,%xmm7
+
+ movdqa %xmm12,%xmm6
+ movdqa %xmm2,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm13
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm1,48-128(%rax)
+ paddd %xmm1,%xmm13
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm10,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm2,%xmm2
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 112-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm7
+ pxor 208-128(%rax),%xmm3
+ pxor %xmm0,%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ movdqa %xmm13,%xmm9
+ pand %xmm10,%xmm7
+
+ movdqa %xmm11,%xmm6
+ movdqa %xmm3,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm12
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm2,64-128(%rax)
+ paddd %xmm2,%xmm12
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm14,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm3,%xmm3
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 128-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm7
+ pxor 224-128(%rax),%xmm4
+ pxor %xmm1,%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ movdqa %xmm12,%xmm9
+ pand %xmm14,%xmm7
+
+ movdqa %xmm10,%xmm6
+ movdqa %xmm4,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm11
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm3,80-128(%rax)
+ paddd %xmm3,%xmm11
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm13,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm4,%xmm4
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 144-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm7
+ pxor 240-128(%rax),%xmm0
+ pxor %xmm2,%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ movdqa %xmm11,%xmm9
+ pand %xmm13,%xmm7
+
+ movdqa %xmm14,%xmm6
+ movdqa %xmm0,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm10
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm4,96-128(%rax)
+ paddd %xmm4,%xmm10
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm12,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm0,%xmm0
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 160-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm7
+ pxor 0-128(%rax),%xmm1
+ pxor %xmm3,%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ movdqa %xmm10,%xmm9
+ pand %xmm12,%xmm7
+
+ movdqa %xmm13,%xmm6
+ movdqa %xmm1,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm14
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm0,112-128(%rax)
+ paddd %xmm0,%xmm14
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm11,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm1,%xmm1
+ paddd %xmm6,%xmm14
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 176-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm7
+ pxor 16-128(%rax),%xmm2
+ pxor %xmm4,%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ movdqa %xmm14,%xmm9
+ pand %xmm11,%xmm7
+
+ movdqa %xmm12,%xmm6
+ movdqa %xmm2,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm13
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm1,128-128(%rax)
+ paddd %xmm1,%xmm13
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm10,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm2,%xmm2
+ paddd %xmm6,%xmm13
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 192-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm7
+ pxor 32-128(%rax),%xmm3
+ pxor %xmm0,%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ movdqa %xmm13,%xmm9
+ pand %xmm10,%xmm7
+
+ movdqa %xmm11,%xmm6
+ movdqa %xmm3,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm12
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm2,144-128(%rax)
+ paddd %xmm2,%xmm12
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm14,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm3,%xmm3
+ paddd %xmm6,%xmm12
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 208-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm7
+ pxor 48-128(%rax),%xmm4
+ pxor %xmm1,%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ movdqa %xmm12,%xmm9
+ pand %xmm14,%xmm7
+
+ movdqa %xmm10,%xmm6
+ movdqa %xmm4,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm11
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm3,160-128(%rax)
+ paddd %xmm3,%xmm11
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm13,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm4,%xmm4
+ paddd %xmm6,%xmm11
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 224-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm7
+ pxor 64-128(%rax),%xmm0
+ pxor %xmm2,%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ movdqa %xmm11,%xmm9
+ pand %xmm13,%xmm7
+
+ movdqa %xmm14,%xmm6
+ movdqa %xmm0,%xmm5
+ psrld $27,%xmm9
+ paddd %xmm7,%xmm10
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm4,176-128(%rax)
+ paddd %xmm4,%xmm10
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ pand %xmm12,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ paddd %xmm0,%xmm0
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ movdqa 64(%rbp),%xmm15
+ pxor %xmm3,%xmm1
+ movdqa 240-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 80-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,192-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 0-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 96-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,208-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 16-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 112-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,224-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 32-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 128-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,240-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 48-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 144-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,0-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 64-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 160-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,16-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 80-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 176-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,32-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 96-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 192-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ movdqa %xmm2,48-128(%rax)
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 112-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 208-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ movdqa %xmm3,64-128(%rax)
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 128-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 224-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ movdqa %xmm4,80-128(%rax)
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 144-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 240-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ movdqa %xmm0,96-128(%rax)
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 160-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 0-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ movdqa %xmm1,112-128(%rax)
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 176-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 16-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 192-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 32-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ pxor %xmm2,%xmm0
+ movdqa 208-128(%rax),%xmm2
+
+ movdqa %xmm11,%xmm8
+ movdqa %xmm14,%xmm6
+ pxor 48-128(%rax),%xmm0
+ paddd %xmm15,%xmm10
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ paddd %xmm4,%xmm10
+ pxor %xmm2,%xmm0
+ psrld $27,%xmm9
+ pxor %xmm13,%xmm6
+ movdqa %xmm12,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm0,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm10
+ paddd %xmm0,%xmm0
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm5,%xmm0
+ por %xmm7,%xmm12
+ pxor %xmm3,%xmm1
+ movdqa 224-128(%rax),%xmm3
+
+ movdqa %xmm10,%xmm8
+ movdqa %xmm13,%xmm6
+ pxor 64-128(%rax),%xmm1
+ paddd %xmm15,%xmm14
+ pslld $5,%xmm8
+ pxor %xmm11,%xmm6
+
+ movdqa %xmm10,%xmm9
+ paddd %xmm0,%xmm14
+ pxor %xmm3,%xmm1
+ psrld $27,%xmm9
+ pxor %xmm12,%xmm6
+ movdqa %xmm11,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm1,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm14
+ paddd %xmm1,%xmm1
+
+ psrld $2,%xmm11
+ paddd %xmm8,%xmm14
+ por %xmm5,%xmm1
+ por %xmm7,%xmm11
+ pxor %xmm4,%xmm2
+ movdqa 240-128(%rax),%xmm4
+
+ movdqa %xmm14,%xmm8
+ movdqa %xmm12,%xmm6
+ pxor 80-128(%rax),%xmm2
+ paddd %xmm15,%xmm13
+ pslld $5,%xmm8
+ pxor %xmm10,%xmm6
+
+ movdqa %xmm14,%xmm9
+ paddd %xmm1,%xmm13
+ pxor %xmm4,%xmm2
+ psrld $27,%xmm9
+ pxor %xmm11,%xmm6
+ movdqa %xmm10,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm2,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm13
+ paddd %xmm2,%xmm2
+
+ psrld $2,%xmm10
+ paddd %xmm8,%xmm13
+ por %xmm5,%xmm2
+ por %xmm7,%xmm10
+ pxor %xmm0,%xmm3
+ movdqa 0-128(%rax),%xmm0
+
+ movdqa %xmm13,%xmm8
+ movdqa %xmm11,%xmm6
+ pxor 96-128(%rax),%xmm3
+ paddd %xmm15,%xmm12
+ pslld $5,%xmm8
+ pxor %xmm14,%xmm6
+
+ movdqa %xmm13,%xmm9
+ paddd %xmm2,%xmm12
+ pxor %xmm0,%xmm3
+ psrld $27,%xmm9
+ pxor %xmm10,%xmm6
+ movdqa %xmm14,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm3,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm12
+ paddd %xmm3,%xmm3
+
+ psrld $2,%xmm14
+ paddd %xmm8,%xmm12
+ por %xmm5,%xmm3
+ por %xmm7,%xmm14
+ pxor %xmm1,%xmm4
+ movdqa 16-128(%rax),%xmm1
+
+ movdqa %xmm12,%xmm8
+ movdqa %xmm10,%xmm6
+ pxor 112-128(%rax),%xmm4
+ paddd %xmm15,%xmm11
+ pslld $5,%xmm8
+ pxor %xmm13,%xmm6
+
+ movdqa %xmm12,%xmm9
+ paddd %xmm3,%xmm11
+ pxor %xmm1,%xmm4
+ psrld $27,%xmm9
+ pxor %xmm14,%xmm6
+ movdqa %xmm13,%xmm7
+
+ pslld $30,%xmm7
+ movdqa %xmm4,%xmm5
+ por %xmm9,%xmm8
+ psrld $31,%xmm5
+ paddd %xmm6,%xmm11
+ paddd %xmm4,%xmm4
+
+ psrld $2,%xmm13
+ paddd %xmm8,%xmm11
+ por %xmm5,%xmm4
+ por %xmm7,%xmm13
+ movdqa %xmm11,%xmm8
+ paddd %xmm15,%xmm10
+ movdqa %xmm14,%xmm6
+ pslld $5,%xmm8
+ pxor %xmm12,%xmm6
+
+ movdqa %xmm11,%xmm9
+ paddd %xmm4,%xmm10
+ psrld $27,%xmm9
+ movdqa %xmm12,%xmm7
+ pxor %xmm13,%xmm6
+
+ pslld $30,%xmm7
+ por %xmm9,%xmm8
+ paddd %xmm6,%xmm10
+
+ psrld $2,%xmm12
+ paddd %xmm8,%xmm10
+ por %xmm7,%xmm12
+ movdqa (%rbx),%xmm0
+ movl $1,%ecx
+ cmpl 0(%rbx),%ecx
+ pxor %xmm8,%xmm8
+ cmovgeq %rbp,%r8
+ cmpl 4(%rbx),%ecx
+ movdqa %xmm0,%xmm1
+ cmovgeq %rbp,%r9
+ cmpl 8(%rbx),%ecx
+ pcmpgtd %xmm8,%xmm1
+ cmovgeq %rbp,%r10
+ cmpl 12(%rbx),%ecx
+ paddd %xmm1,%xmm0
+ cmovgeq %rbp,%r11
+
+ movdqu 0(%rdi),%xmm6
+ pand %xmm1,%xmm10
+ movdqu 32(%rdi),%xmm7
+ pand %xmm1,%xmm11
+ paddd %xmm6,%xmm10
+ movdqu 64(%rdi),%xmm8
+ pand %xmm1,%xmm12
+ paddd %xmm7,%xmm11
+ movdqu 96(%rdi),%xmm9
+ pand %xmm1,%xmm13
+ paddd %xmm8,%xmm12
+ movdqu 128(%rdi),%xmm5
+ pand %xmm1,%xmm14
+ movdqu %xmm10,0(%rdi)
+ paddd %xmm9,%xmm13
+ movdqu %xmm11,32(%rdi)
+ paddd %xmm5,%xmm14
+ movdqu %xmm12,64(%rdi)
+ movdqu %xmm13,96(%rdi)
+ movdqu %xmm14,128(%rdi)
+
+ movdqa %xmm0,(%rbx)
+ movdqa 96(%rbp),%xmm5
+ movdqa -32(%rbp),%xmm15
+ decl %edx
+ jnz .Loop
+
+ movl 280(%rsp),%edx
+ leaq 16(%rdi),%rdi
+ leaq 64(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande
+
+.Ldone:
+ movq 272(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha1_multi_block,.-sha1_multi_block
+.type sha1_multi_block_shaext,@function
+.align 32
+sha1_multi_block_shaext:
+.cfi_startproc
+_shaext_shortcut:
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ subq $288,%rsp
+ shll $1,%edx
+ andq $-256,%rsp
+ leaq 64(%rdi),%rdi
+ movq %rax,272(%rsp)
+.Lbody_shaext:
+ leaq 256(%rsp),%rbx
+ movdqa K_XX_XX+128(%rip),%xmm3
+
+.Loop_grande_shaext:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rsp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rsp,%r9
+ testl %edx,%edx
+ jz .Ldone_shaext
+
+ movq 0-64(%rdi),%xmm0
+ movq 32-64(%rdi),%xmm4
+ movq 64-64(%rdi),%xmm5
+ movq 96-64(%rdi),%xmm6
+ movq 128-64(%rdi),%xmm7
+
+ punpckldq %xmm4,%xmm0
+ punpckldq %xmm6,%xmm5
+
+ movdqa %xmm0,%xmm8
+ punpcklqdq %xmm5,%xmm0
+ punpckhqdq %xmm5,%xmm8
+
+ pshufd $63,%xmm7,%xmm1
+ pshufd $127,%xmm7,%xmm9
+ pshufd $27,%xmm0,%xmm0
+ pshufd $27,%xmm8,%xmm8
+ jmp .Loop_shaext
+
+.align 32
+.Loop_shaext:
+ movdqu 0(%r8),%xmm4
+ movdqu 0(%r9),%xmm11
+ movdqu 16(%r8),%xmm5
+ movdqu 16(%r9),%xmm12
+ movdqu 32(%r8),%xmm6
+.byte 102,15,56,0,227
+ movdqu 32(%r9),%xmm13
+.byte 102,68,15,56,0,219
+ movdqu 48(%r8),%xmm7
+ leaq 64(%r8),%r8
+.byte 102,15,56,0,235
+ movdqu 48(%r9),%xmm14
+ leaq 64(%r9),%r9
+.byte 102,68,15,56,0,227
+
+ movdqa %xmm1,80(%rsp)
+ paddd %xmm4,%xmm1
+ movdqa %xmm9,112(%rsp)
+ paddd %xmm11,%xmm9
+ movdqa %xmm0,64(%rsp)
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,96(%rsp)
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,0
+.byte 15,56,200,213
+.byte 69,15,58,204,193,0
+.byte 69,15,56,200,212
+.byte 102,15,56,0,243
+ prefetcht0 127(%r8)
+.byte 15,56,201,229
+.byte 102,68,15,56,0,235
+ prefetcht0 127(%r9)
+.byte 69,15,56,201,220
+
+.byte 102,15,56,0,251
+ movdqa %xmm0,%xmm1
+.byte 102,68,15,56,0,243
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,0
+.byte 15,56,200,206
+.byte 69,15,58,204,194,0
+.byte 69,15,56,200,205
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ pxor %xmm13,%xmm11
+.byte 69,15,56,201,229
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,0
+.byte 15,56,200,215
+.byte 69,15,58,204,193,0
+.byte 69,15,56,200,214
+.byte 15,56,202,231
+.byte 69,15,56,202,222
+ pxor %xmm7,%xmm5
+.byte 15,56,201,247
+ pxor %xmm14,%xmm12
+.byte 69,15,56,201,238
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,0
+.byte 15,56,200,204
+.byte 69,15,58,204,194,0
+.byte 69,15,56,200,203
+.byte 15,56,202,236
+.byte 69,15,56,202,227
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+ pxor %xmm11,%xmm13
+.byte 69,15,56,201,243
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,0
+.byte 15,56,200,213
+.byte 69,15,58,204,193,0
+.byte 69,15,56,200,212
+.byte 15,56,202,245
+.byte 69,15,56,202,236
+ pxor %xmm5,%xmm7
+.byte 15,56,201,229
+ pxor %xmm12,%xmm14
+.byte 69,15,56,201,220
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,1
+.byte 15,56,200,206
+.byte 69,15,58,204,194,1
+.byte 69,15,56,200,205
+.byte 15,56,202,254
+.byte 69,15,56,202,245
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ pxor %xmm13,%xmm11
+.byte 69,15,56,201,229
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,1
+.byte 15,56,200,215
+.byte 69,15,58,204,193,1
+.byte 69,15,56,200,214
+.byte 15,56,202,231
+.byte 69,15,56,202,222
+ pxor %xmm7,%xmm5
+.byte 15,56,201,247
+ pxor %xmm14,%xmm12
+.byte 69,15,56,201,238
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,1
+.byte 15,56,200,204
+.byte 69,15,58,204,194,1
+.byte 69,15,56,200,203
+.byte 15,56,202,236
+.byte 69,15,56,202,227
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+ pxor %xmm11,%xmm13
+.byte 69,15,56,201,243
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,1
+.byte 15,56,200,213
+.byte 69,15,58,204,193,1
+.byte 69,15,56,200,212
+.byte 15,56,202,245
+.byte 69,15,56,202,236
+ pxor %xmm5,%xmm7
+.byte 15,56,201,229
+ pxor %xmm12,%xmm14
+.byte 69,15,56,201,220
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,1
+.byte 15,56,200,206
+.byte 69,15,58,204,194,1
+.byte 69,15,56,200,205
+.byte 15,56,202,254
+.byte 69,15,56,202,245
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ pxor %xmm13,%xmm11
+.byte 69,15,56,201,229
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,2
+.byte 15,56,200,215
+.byte 69,15,58,204,193,2
+.byte 69,15,56,200,214
+.byte 15,56,202,231
+.byte 69,15,56,202,222
+ pxor %xmm7,%xmm5
+.byte 15,56,201,247
+ pxor %xmm14,%xmm12
+.byte 69,15,56,201,238
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,2
+.byte 15,56,200,204
+.byte 69,15,58,204,194,2
+.byte 69,15,56,200,203
+.byte 15,56,202,236
+.byte 69,15,56,202,227
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+ pxor %xmm11,%xmm13
+.byte 69,15,56,201,243
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,2
+.byte 15,56,200,213
+.byte 69,15,58,204,193,2
+.byte 69,15,56,200,212
+.byte 15,56,202,245
+.byte 69,15,56,202,236
+ pxor %xmm5,%xmm7
+.byte 15,56,201,229
+ pxor %xmm12,%xmm14
+.byte 69,15,56,201,220
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,2
+.byte 15,56,200,206
+.byte 69,15,58,204,194,2
+.byte 69,15,56,200,205
+.byte 15,56,202,254
+.byte 69,15,56,202,245
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+ pxor %xmm13,%xmm11
+.byte 69,15,56,201,229
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,2
+.byte 15,56,200,215
+.byte 69,15,58,204,193,2
+.byte 69,15,56,200,214
+.byte 15,56,202,231
+.byte 69,15,56,202,222
+ pxor %xmm7,%xmm5
+.byte 15,56,201,247
+ pxor %xmm14,%xmm12
+.byte 69,15,56,201,238
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,3
+.byte 15,56,200,204
+.byte 69,15,58,204,194,3
+.byte 69,15,56,200,203
+.byte 15,56,202,236
+.byte 69,15,56,202,227
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+ pxor %xmm11,%xmm13
+.byte 69,15,56,201,243
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,3
+.byte 15,56,200,213
+.byte 69,15,58,204,193,3
+.byte 69,15,56,200,212
+.byte 15,56,202,245
+.byte 69,15,56,202,236
+ pxor %xmm5,%xmm7
+ pxor %xmm12,%xmm14
+
+ movl $1,%ecx
+ pxor %xmm4,%xmm4
+ cmpl 0(%rbx),%ecx
+ cmovgeq %rsp,%r8
+
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,3
+.byte 15,56,200,206
+.byte 69,15,58,204,194,3
+.byte 69,15,56,200,205
+.byte 15,56,202,254
+.byte 69,15,56,202,245
+
+ cmpl 4(%rbx),%ecx
+ cmovgeq %rsp,%r9
+ movq (%rbx),%xmm6
+
+ movdqa %xmm0,%xmm2
+ movdqa %xmm8,%xmm10
+.byte 15,58,204,193,3
+.byte 15,56,200,215
+.byte 69,15,58,204,193,3
+.byte 69,15,56,200,214
+
+ pshufd $0x00,%xmm6,%xmm11
+ pshufd $0x55,%xmm6,%xmm12
+ movdqa %xmm6,%xmm7
+ pcmpgtd %xmm4,%xmm11
+ pcmpgtd %xmm4,%xmm12
+
+ movdqa %xmm0,%xmm1
+ movdqa %xmm8,%xmm9
+.byte 15,58,204,194,3
+.byte 15,56,200,204
+.byte 69,15,58,204,194,3
+.byte 68,15,56,200,204
+
+ pcmpgtd %xmm4,%xmm7
+ pand %xmm11,%xmm0
+ pand %xmm11,%xmm1
+ pand %xmm12,%xmm8
+ pand %xmm12,%xmm9
+ paddd %xmm7,%xmm6
+
+ paddd 64(%rsp),%xmm0
+ paddd 80(%rsp),%xmm1
+ paddd 96(%rsp),%xmm8
+ paddd 112(%rsp),%xmm9
+
+ movq %xmm6,(%rbx)
+ decl %edx
+ jnz .Loop_shaext
+
+ movl 280(%rsp),%edx
+
+ pshufd $27,%xmm0,%xmm0
+ pshufd $27,%xmm8,%xmm8
+
+ movdqa %xmm0,%xmm6
+ punpckldq %xmm8,%xmm0
+ punpckhdq %xmm8,%xmm6
+ punpckhdq %xmm9,%xmm1
+ movq %xmm0,0-64(%rdi)
+ psrldq $8,%xmm0
+ movq %xmm6,64-64(%rdi)
+ psrldq $8,%xmm6
+ movq %xmm0,32-64(%rdi)
+ psrldq $8,%xmm1
+ movq %xmm6,96-64(%rdi)
+ movq %xmm1,128-64(%rdi)
+
+ leaq 8(%rdi),%rdi
+ leaq 32(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande_shaext
+
+.Ldone_shaext:
+
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue_shaext:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha1_multi_block_shaext,.-sha1_multi_block_shaext
+
+.align 256
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+K_XX_XX:
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0
+.byte 83,72,65,49,32,109,117,108,116,105,45,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-x86_64.S
new file mode 100644
index 0000000000..0b59726ae4
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha1-x86_64.S
@@ -0,0 +1,2631 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/sha/asm/sha1-x86_64.pl
+#
+# Copyright 2006-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl sha1_block_data_order
+.type sha1_block_data_order,@function
+.align 16
+sha1_block_data_order:
+.cfi_startproc
+ movl OPENSSL_ia32cap_P+0(%rip),%r9d
+ movl OPENSSL_ia32cap_P+4(%rip),%r8d
+ movl OPENSSL_ia32cap_P+8(%rip),%r10d
+ testl $512,%r8d
+ jz .Lialu
+ testl $536870912,%r10d
+ jnz _shaext_shortcut
+ jmp _ssse3_shortcut
+
+.align 16
+.Lialu:
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %rax,64(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xc0,0x00,0x06,0x23,0x08
+.Lprologue:
+
+ movl 0(%r8),%esi
+ movl 4(%r8),%edi
+ movl 8(%r8),%r11d
+ movl 12(%r8),%r12d
+ movl 16(%r8),%r13d
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ movl 0(%r9),%edx
+ bswapl %edx
+ movl 4(%r9),%ebp
+ movl %r12d,%eax
+ movl %edx,0(%rsp)
+ movl %esi,%ecx
+ bswapl %ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ andl %edi,%eax
+ leal 1518500249(%rdx,%r13,1),%r13d
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl 8(%r9),%r14d
+ movl %r11d,%eax
+ movl %ebp,4(%rsp)
+ movl %r13d,%ecx
+ bswapl %r14d
+ xorl %edi,%eax
+ roll $5,%ecx
+ andl %esi,%eax
+ leal 1518500249(%rbp,%r12,1),%r12d
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl 12(%r9),%edx
+ movl %edi,%eax
+ movl %r14d,8(%rsp)
+ movl %r12d,%ecx
+ bswapl %edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ andl %r13d,%eax
+ leal 1518500249(%r14,%r11,1),%r11d
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl 16(%r9),%ebp
+ movl %esi,%eax
+ movl %edx,12(%rsp)
+ movl %r11d,%ecx
+ bswapl %ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ andl %r12d,%eax
+ leal 1518500249(%rdx,%rdi,1),%edi
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl 20(%r9),%r14d
+ movl %r13d,%eax
+ movl %ebp,16(%rsp)
+ movl %edi,%ecx
+ bswapl %r14d
+ xorl %r12d,%eax
+ roll $5,%ecx
+ andl %r11d,%eax
+ leal 1518500249(%rbp,%rsi,1),%esi
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl 24(%r9),%edx
+ movl %r12d,%eax
+ movl %r14d,20(%rsp)
+ movl %esi,%ecx
+ bswapl %edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ andl %edi,%eax
+ leal 1518500249(%r14,%r13,1),%r13d
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl 28(%r9),%ebp
+ movl %r11d,%eax
+ movl %edx,24(%rsp)
+ movl %r13d,%ecx
+ bswapl %ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ andl %esi,%eax
+ leal 1518500249(%rdx,%r12,1),%r12d
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl 32(%r9),%r14d
+ movl %edi,%eax
+ movl %ebp,28(%rsp)
+ movl %r12d,%ecx
+ bswapl %r14d
+ xorl %esi,%eax
+ roll $5,%ecx
+ andl %r13d,%eax
+ leal 1518500249(%rbp,%r11,1),%r11d
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl 36(%r9),%edx
+ movl %esi,%eax
+ movl %r14d,32(%rsp)
+ movl %r11d,%ecx
+ bswapl %edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ andl %r12d,%eax
+ leal 1518500249(%r14,%rdi,1),%edi
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl 40(%r9),%ebp
+ movl %r13d,%eax
+ movl %edx,36(%rsp)
+ movl %edi,%ecx
+ bswapl %ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ andl %r11d,%eax
+ leal 1518500249(%rdx,%rsi,1),%esi
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl 44(%r9),%r14d
+ movl %r12d,%eax
+ movl %ebp,40(%rsp)
+ movl %esi,%ecx
+ bswapl %r14d
+ xorl %r11d,%eax
+ roll $5,%ecx
+ andl %edi,%eax
+ leal 1518500249(%rbp,%r13,1),%r13d
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl 48(%r9),%edx
+ movl %r11d,%eax
+ movl %r14d,44(%rsp)
+ movl %r13d,%ecx
+ bswapl %edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ andl %esi,%eax
+ leal 1518500249(%r14,%r12,1),%r12d
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl 52(%r9),%ebp
+ movl %edi,%eax
+ movl %edx,48(%rsp)
+ movl %r12d,%ecx
+ bswapl %ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ andl %r13d,%eax
+ leal 1518500249(%rdx,%r11,1),%r11d
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl 56(%r9),%r14d
+ movl %esi,%eax
+ movl %ebp,52(%rsp)
+ movl %r11d,%ecx
+ bswapl %r14d
+ xorl %r13d,%eax
+ roll $5,%ecx
+ andl %r12d,%eax
+ leal 1518500249(%rbp,%rdi,1),%edi
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl 60(%r9),%edx
+ movl %r13d,%eax
+ movl %r14d,56(%rsp)
+ movl %edi,%ecx
+ bswapl %edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ andl %r11d,%eax
+ leal 1518500249(%r14,%rsi,1),%esi
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ xorl 0(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edx,60(%rsp)
+ movl %esi,%ecx
+ xorl 8(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ andl %edi,%eax
+ leal 1518500249(%rdx,%r13,1),%r13d
+ roll $30,%edi
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ roll $1,%ebp
+ addl %eax,%r13d
+ xorl 4(%rsp),%r14d
+ movl %r11d,%eax
+ movl %ebp,0(%rsp)
+ movl %r13d,%ecx
+ xorl 12(%rsp),%r14d
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%r14d
+ andl %esi,%eax
+ leal 1518500249(%rbp,%r12,1),%r12d
+ roll $30,%esi
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ roll $1,%r14d
+ addl %eax,%r12d
+ xorl 8(%rsp),%edx
+ movl %edi,%eax
+ movl %r14d,4(%rsp)
+ movl %r12d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%edx
+ andl %r13d,%eax
+ leal 1518500249(%r14,%r11,1),%r11d
+ roll $30,%r13d
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ roll $1,%edx
+ addl %eax,%r11d
+ xorl 12(%rsp),%ebp
+ movl %esi,%eax
+ movl %edx,8(%rsp)
+ movl %r11d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ leal 1518500249(%rdx,%rdi,1),%edi
+ roll $30,%r12d
+ xorl %esi,%eax
+ addl %ecx,%edi
+ roll $1,%ebp
+ addl %eax,%edi
+ xorl 16(%rsp),%r14d
+ movl %r13d,%eax
+ movl %ebp,12(%rsp)
+ movl %edi,%ecx
+ xorl 24(%rsp),%r14d
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%r14d
+ andl %r11d,%eax
+ leal 1518500249(%rbp,%rsi,1),%esi
+ roll $30,%r11d
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ roll $1,%r14d
+ addl %eax,%esi
+ xorl 20(%rsp),%edx
+ movl %edi,%eax
+ movl %r14d,16(%rsp)
+ movl %esi,%ecx
+ xorl 28(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 52(%rsp),%edx
+ leal 1859775393(%r14,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ xorl 24(%rsp),%ebp
+ movl %esi,%eax
+ movl %edx,20(%rsp)
+ movl %r13d,%ecx
+ xorl 32(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 56(%rsp),%ebp
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ xorl 28(%rsp),%r14d
+ movl %r13d,%eax
+ movl %ebp,24(%rsp)
+ movl %r12d,%ecx
+ xorl 36(%rsp),%r14d
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 60(%rsp),%r14d
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%r14d
+ xorl 32(%rsp),%edx
+ movl %r12d,%eax
+ movl %r14d,28(%rsp)
+ movl %r11d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 0(%rsp),%edx
+ leal 1859775393(%r14,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ xorl 36(%rsp),%ebp
+ movl %r11d,%eax
+ movl %edx,32(%rsp)
+ movl %edi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 4(%rsp),%ebp
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ xorl 40(%rsp),%r14d
+ movl %edi,%eax
+ movl %ebp,36(%rsp)
+ movl %esi,%ecx
+ xorl 48(%rsp),%r14d
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 8(%rsp),%r14d
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%r14d
+ xorl 44(%rsp),%edx
+ movl %esi,%eax
+ movl %r14d,40(%rsp)
+ movl %r13d,%ecx
+ xorl 52(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 12(%rsp),%edx
+ leal 1859775393(%r14,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ xorl 48(%rsp),%ebp
+ movl %r13d,%eax
+ movl %edx,44(%rsp)
+ movl %r12d,%ecx
+ xorl 56(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 16(%rsp),%ebp
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ xorl 52(%rsp),%r14d
+ movl %r12d,%eax
+ movl %ebp,48(%rsp)
+ movl %r11d,%ecx
+ xorl 60(%rsp),%r14d
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 20(%rsp),%r14d
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%r14d
+ xorl 56(%rsp),%edx
+ movl %r11d,%eax
+ movl %r14d,52(%rsp)
+ movl %edi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 24(%rsp),%edx
+ leal 1859775393(%r14,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ xorl 60(%rsp),%ebp
+ movl %edi,%eax
+ movl %edx,56(%rsp)
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 28(%rsp),%ebp
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ xorl 0(%rsp),%r14d
+ movl %esi,%eax
+ movl %ebp,60(%rsp)
+ movl %r13d,%ecx
+ xorl 8(%rsp),%r14d
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%r14d
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%r14d
+ xorl 4(%rsp),%edx
+ movl %r13d,%eax
+ movl %r14d,0(%rsp)
+ movl %r12d,%ecx
+ xorl 12(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ leal 1859775393(%r14,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ xorl 8(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edx,4(%rsp)
+ movl %r11d,%ecx
+ xorl 16(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%ebp
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ xorl 12(%rsp),%r14d
+ movl %r11d,%eax
+ movl %ebp,8(%rsp)
+ movl %edi,%ecx
+ xorl 20(%rsp),%r14d
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%r14d
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%r14d
+ xorl 16(%rsp),%edx
+ movl %edi,%eax
+ movl %r14d,12(%rsp)
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%edx
+ leal 1859775393(%r14,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ xorl 20(%rsp),%ebp
+ movl %esi,%eax
+ movl %edx,16(%rsp)
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 52(%rsp),%ebp
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ xorl 24(%rsp),%r14d
+ movl %r13d,%eax
+ movl %ebp,20(%rsp)
+ movl %r12d,%ecx
+ xorl 32(%rsp),%r14d
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 56(%rsp),%r14d
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%r14d
+ xorl 28(%rsp),%edx
+ movl %r12d,%eax
+ movl %r14d,24(%rsp)
+ movl %r11d,%ecx
+ xorl 36(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 60(%rsp),%edx
+ leal 1859775393(%r14,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ xorl 32(%rsp),%ebp
+ movl %r11d,%eax
+ movl %edx,28(%rsp)
+ movl %edi,%ecx
+ xorl 40(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 0(%rsp),%ebp
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ xorl 36(%rsp),%r14d
+ movl %r12d,%eax
+ movl %ebp,32(%rsp)
+ movl %r12d,%ebx
+ xorl 44(%rsp),%r14d
+ andl %r11d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%r14d
+ leal -1894007588(%rbp,%r13,1),%r13d
+ xorl %r11d,%ebx
+ roll $5,%ecx
+ addl %eax,%r13d
+ roll $1,%r14d
+ andl %edi,%ebx
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %ebx,%r13d
+ xorl 40(%rsp),%edx
+ movl %r11d,%eax
+ movl %r14d,36(%rsp)
+ movl %r11d,%ebx
+ xorl 48(%rsp),%edx
+ andl %edi,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ leal -1894007588(%r14,%r12,1),%r12d
+ xorl %edi,%ebx
+ roll $5,%ecx
+ addl %eax,%r12d
+ roll $1,%edx
+ andl %esi,%ebx
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %ebx,%r12d
+ xorl 44(%rsp),%ebp
+ movl %edi,%eax
+ movl %edx,40(%rsp)
+ movl %edi,%ebx
+ xorl 52(%rsp),%ebp
+ andl %esi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ leal -1894007588(%rdx,%r11,1),%r11d
+ xorl %esi,%ebx
+ roll $5,%ecx
+ addl %eax,%r11d
+ roll $1,%ebp
+ andl %r13d,%ebx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %ebx,%r11d
+ xorl 48(%rsp),%r14d
+ movl %esi,%eax
+ movl %ebp,44(%rsp)
+ movl %esi,%ebx
+ xorl 56(%rsp),%r14d
+ andl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%r14d
+ leal -1894007588(%rbp,%rdi,1),%edi
+ xorl %r13d,%ebx
+ roll $5,%ecx
+ addl %eax,%edi
+ roll $1,%r14d
+ andl %r12d,%ebx
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %ebx,%edi
+ xorl 52(%rsp),%edx
+ movl %r13d,%eax
+ movl %r14d,48(%rsp)
+ movl %r13d,%ebx
+ xorl 60(%rsp),%edx
+ andl %r12d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%edx
+ leal -1894007588(%r14,%rsi,1),%esi
+ xorl %r12d,%ebx
+ roll $5,%ecx
+ addl %eax,%esi
+ roll $1,%edx
+ andl %r11d,%ebx
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %ebx,%esi
+ xorl 56(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edx,52(%rsp)
+ movl %r12d,%ebx
+ xorl 0(%rsp),%ebp
+ andl %r11d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%ebp
+ leal -1894007588(%rdx,%r13,1),%r13d
+ xorl %r11d,%ebx
+ roll $5,%ecx
+ addl %eax,%r13d
+ roll $1,%ebp
+ andl %edi,%ebx
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %ebx,%r13d
+ xorl 60(%rsp),%r14d
+ movl %r11d,%eax
+ movl %ebp,56(%rsp)
+ movl %r11d,%ebx
+ xorl 4(%rsp),%r14d
+ andl %edi,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%r14d
+ leal -1894007588(%rbp,%r12,1),%r12d
+ xorl %edi,%ebx
+ roll $5,%ecx
+ addl %eax,%r12d
+ roll $1,%r14d
+ andl %esi,%ebx
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %ebx,%r12d
+ xorl 0(%rsp),%edx
+ movl %edi,%eax
+ movl %r14d,60(%rsp)
+ movl %edi,%ebx
+ xorl 8(%rsp),%edx
+ andl %esi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ leal -1894007588(%r14,%r11,1),%r11d
+ xorl %esi,%ebx
+ roll $5,%ecx
+ addl %eax,%r11d
+ roll $1,%edx
+ andl %r13d,%ebx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %ebx,%r11d
+ xorl 4(%rsp),%ebp
+ movl %esi,%eax
+ movl %edx,0(%rsp)
+ movl %esi,%ebx
+ xorl 12(%rsp),%ebp
+ andl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ leal -1894007588(%rdx,%rdi,1),%edi
+ xorl %r13d,%ebx
+ roll $5,%ecx
+ addl %eax,%edi
+ roll $1,%ebp
+ andl %r12d,%ebx
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %ebx,%edi
+ xorl 8(%rsp),%r14d
+ movl %r13d,%eax
+ movl %ebp,4(%rsp)
+ movl %r13d,%ebx
+ xorl 16(%rsp),%r14d
+ andl %r12d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%r14d
+ leal -1894007588(%rbp,%rsi,1),%esi
+ xorl %r12d,%ebx
+ roll $5,%ecx
+ addl %eax,%esi
+ roll $1,%r14d
+ andl %r11d,%ebx
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %ebx,%esi
+ xorl 12(%rsp),%edx
+ movl %r12d,%eax
+ movl %r14d,8(%rsp)
+ movl %r12d,%ebx
+ xorl 20(%rsp),%edx
+ andl %r11d,%eax
+ movl %esi,%ecx
+ xorl 44(%rsp),%edx
+ leal -1894007588(%r14,%r13,1),%r13d
+ xorl %r11d,%ebx
+ roll $5,%ecx
+ addl %eax,%r13d
+ roll $1,%edx
+ andl %edi,%ebx
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %ebx,%r13d
+ xorl 16(%rsp),%ebp
+ movl %r11d,%eax
+ movl %edx,12(%rsp)
+ movl %r11d,%ebx
+ xorl 24(%rsp),%ebp
+ andl %edi,%eax
+ movl %r13d,%ecx
+ xorl 48(%rsp),%ebp
+ leal -1894007588(%rdx,%r12,1),%r12d
+ xorl %edi,%ebx
+ roll $5,%ecx
+ addl %eax,%r12d
+ roll $1,%ebp
+ andl %esi,%ebx
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %ebx,%r12d
+ xorl 20(%rsp),%r14d
+ movl %edi,%eax
+ movl %ebp,16(%rsp)
+ movl %edi,%ebx
+ xorl 28(%rsp),%r14d
+ andl %esi,%eax
+ movl %r12d,%ecx
+ xorl 52(%rsp),%r14d
+ leal -1894007588(%rbp,%r11,1),%r11d
+ xorl %esi,%ebx
+ roll $5,%ecx
+ addl %eax,%r11d
+ roll $1,%r14d
+ andl %r13d,%ebx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %ebx,%r11d
+ xorl 24(%rsp),%edx
+ movl %esi,%eax
+ movl %r14d,20(%rsp)
+ movl %esi,%ebx
+ xorl 32(%rsp),%edx
+ andl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 56(%rsp),%edx
+ leal -1894007588(%r14,%rdi,1),%edi
+ xorl %r13d,%ebx
+ roll $5,%ecx
+ addl %eax,%edi
+ roll $1,%edx
+ andl %r12d,%ebx
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %ebx,%edi
+ xorl 28(%rsp),%ebp
+ movl %r13d,%eax
+ movl %edx,24(%rsp)
+ movl %r13d,%ebx
+ xorl 36(%rsp),%ebp
+ andl %r12d,%eax
+ movl %edi,%ecx
+ xorl 60(%rsp),%ebp
+ leal -1894007588(%rdx,%rsi,1),%esi
+ xorl %r12d,%ebx
+ roll $5,%ecx
+ addl %eax,%esi
+ roll $1,%ebp
+ andl %r11d,%ebx
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %ebx,%esi
+ xorl 32(%rsp),%r14d
+ movl %r12d,%eax
+ movl %ebp,28(%rsp)
+ movl %r12d,%ebx
+ xorl 40(%rsp),%r14d
+ andl %r11d,%eax
+ movl %esi,%ecx
+ xorl 0(%rsp),%r14d
+ leal -1894007588(%rbp,%r13,1),%r13d
+ xorl %r11d,%ebx
+ roll $5,%ecx
+ addl %eax,%r13d
+ roll $1,%r14d
+ andl %edi,%ebx
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %ebx,%r13d
+ xorl 36(%rsp),%edx
+ movl %r11d,%eax
+ movl %r14d,32(%rsp)
+ movl %r11d,%ebx
+ xorl 44(%rsp),%edx
+ andl %edi,%eax
+ movl %r13d,%ecx
+ xorl 4(%rsp),%edx
+ leal -1894007588(%r14,%r12,1),%r12d
+ xorl %edi,%ebx
+ roll $5,%ecx
+ addl %eax,%r12d
+ roll $1,%edx
+ andl %esi,%ebx
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %ebx,%r12d
+ xorl 40(%rsp),%ebp
+ movl %edi,%eax
+ movl %edx,36(%rsp)
+ movl %edi,%ebx
+ xorl 48(%rsp),%ebp
+ andl %esi,%eax
+ movl %r12d,%ecx
+ xorl 8(%rsp),%ebp
+ leal -1894007588(%rdx,%r11,1),%r11d
+ xorl %esi,%ebx
+ roll $5,%ecx
+ addl %eax,%r11d
+ roll $1,%ebp
+ andl %r13d,%ebx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %ebx,%r11d
+ xorl 44(%rsp),%r14d
+ movl %esi,%eax
+ movl %ebp,40(%rsp)
+ movl %esi,%ebx
+ xorl 52(%rsp),%r14d
+ andl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 12(%rsp),%r14d
+ leal -1894007588(%rbp,%rdi,1),%edi
+ xorl %r13d,%ebx
+ roll $5,%ecx
+ addl %eax,%edi
+ roll $1,%r14d
+ andl %r12d,%ebx
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %ebx,%edi
+ xorl 48(%rsp),%edx
+ movl %r13d,%eax
+ movl %r14d,44(%rsp)
+ movl %r13d,%ebx
+ xorl 56(%rsp),%edx
+ andl %r12d,%eax
+ movl %edi,%ecx
+ xorl 16(%rsp),%edx
+ leal -1894007588(%r14,%rsi,1),%esi
+ xorl %r12d,%ebx
+ roll $5,%ecx
+ addl %eax,%esi
+ roll $1,%edx
+ andl %r11d,%ebx
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %ebx,%esi
+ xorl 52(%rsp),%ebp
+ movl %edi,%eax
+ movl %edx,48(%rsp)
+ movl %esi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 20(%rsp),%ebp
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ xorl 56(%rsp),%r14d
+ movl %esi,%eax
+ movl %ebp,52(%rsp)
+ movl %r13d,%ecx
+ xorl 0(%rsp),%r14d
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 24(%rsp),%r14d
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%r14d
+ xorl 60(%rsp),%edx
+ movl %r13d,%eax
+ movl %r14d,56(%rsp)
+ movl %r12d,%ecx
+ xorl 4(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ leal -899497514(%r14,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ xorl 0(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edx,60(%rsp)
+ movl %r11d,%ecx
+ xorl 8(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ xorl 4(%rsp),%r14d
+ movl %r11d,%eax
+ movl %ebp,0(%rsp)
+ movl %edi,%ecx
+ xorl 12(%rsp),%r14d
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%r14d
+ leal -899497514(%rbp,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%r14d
+ xorl 8(%rsp),%edx
+ movl %edi,%eax
+ movl %r14d,4(%rsp)
+ movl %esi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%edx
+ leal -899497514(%r14,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ xorl 12(%rsp),%ebp
+ movl %esi,%eax
+ movl %edx,8(%rsp)
+ movl %r13d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%ebp
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ xorl 16(%rsp),%r14d
+ movl %r13d,%eax
+ movl %ebp,12(%rsp)
+ movl %r12d,%ecx
+ xorl 24(%rsp),%r14d
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%r14d
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%r14d
+ xorl 20(%rsp),%edx
+ movl %r12d,%eax
+ movl %r14d,16(%rsp)
+ movl %r11d,%ecx
+ xorl 28(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 52(%rsp),%edx
+ leal -899497514(%r14,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ xorl 24(%rsp),%ebp
+ movl %r11d,%eax
+ movl %edx,20(%rsp)
+ movl %edi,%ecx
+ xorl 32(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 56(%rsp),%ebp
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ xorl 28(%rsp),%r14d
+ movl %edi,%eax
+ movl %ebp,24(%rsp)
+ movl %esi,%ecx
+ xorl 36(%rsp),%r14d
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 60(%rsp),%r14d
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%r14d
+ xorl 32(%rsp),%edx
+ movl %esi,%eax
+ movl %r14d,28(%rsp)
+ movl %r13d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 0(%rsp),%edx
+ leal -899497514(%r14,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ xorl 36(%rsp),%ebp
+ movl %r13d,%eax
+
+ movl %r12d,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 4(%rsp),%ebp
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ xorl 40(%rsp),%r14d
+ movl %r12d,%eax
+
+ movl %r11d,%ecx
+ xorl 48(%rsp),%r14d
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 8(%rsp),%r14d
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%r14d
+ xorl 44(%rsp),%edx
+ movl %r11d,%eax
+
+ movl %edi,%ecx
+ xorl 52(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 12(%rsp),%edx
+ leal -899497514(%r14,%rsi,1),%esi
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ xorl 48(%rsp),%ebp
+ movl %edi,%eax
+
+ movl %esi,%ecx
+ xorl 56(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 16(%rsp),%ebp
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl %r11d,%eax
+ addl %ecx,%r13d
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ xorl 52(%rsp),%r14d
+ movl %esi,%eax
+
+ movl %r13d,%ecx
+ xorl 60(%rsp),%r14d
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 20(%rsp),%r14d
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl %edi,%eax
+ addl %ecx,%r12d
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%r14d
+ xorl 56(%rsp),%edx
+ movl %r13d,%eax
+
+ movl %r12d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 24(%rsp),%edx
+ leal -899497514(%r14,%r11,1),%r11d
+ xorl %esi,%eax
+ addl %ecx,%r11d
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ xorl 60(%rsp),%ebp
+ movl %r12d,%eax
+
+ movl %r11d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 28(%rsp),%ebp
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl %r13d,%eax
+ addl %ecx,%edi
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %r11d,%eax
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ leal -899497514(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl %r12d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ addl 0(%r8),%esi
+ addl 4(%r8),%edi
+ addl 8(%r8),%r11d
+ addl 12(%r8),%r12d
+ addl 16(%r8),%r13d
+ movl %esi,0(%r8)
+ movl %edi,4(%r8)
+ movl %r11d,8(%r8)
+ movl %r12d,12(%r8)
+ movl %r13d,16(%r8)
+
+ subq $1,%r10
+ leaq 64(%r9),%r9
+ jnz .Lloop
+
+ movq 64(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha1_block_data_order,.-sha1_block_data_order
+.type sha1_block_data_order_shaext,@function
+.align 32
+sha1_block_data_order_shaext:
+_shaext_shortcut:
+.cfi_startproc
+ movdqu (%rdi),%xmm0
+ movd 16(%rdi),%xmm1
+ movdqa K_XX_XX+160(%rip),%xmm3
+
+ movdqu (%rsi),%xmm4
+ pshufd $27,%xmm0,%xmm0
+ movdqu 16(%rsi),%xmm5
+ pshufd $27,%xmm1,%xmm1
+ movdqu 32(%rsi),%xmm6
+.byte 102,15,56,0,227
+ movdqu 48(%rsi),%xmm7
+.byte 102,15,56,0,235
+.byte 102,15,56,0,243
+ movdqa %xmm1,%xmm9
+.byte 102,15,56,0,251
+ jmp .Loop_shaext
+
+.align 16
+.Loop_shaext:
+ decq %rdx
+ leaq 64(%rsi),%r8
+ paddd %xmm4,%xmm1
+ cmovneq %r8,%rsi
+ movdqa %xmm0,%xmm8
+.byte 15,56,201,229
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,0
+.byte 15,56,200,213
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+.byte 15,56,202,231
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,0
+.byte 15,56,200,206
+ pxor %xmm7,%xmm5
+.byte 15,56,202,236
+.byte 15,56,201,247
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,0
+.byte 15,56,200,215
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+.byte 15,56,202,245
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,0
+.byte 15,56,200,204
+ pxor %xmm5,%xmm7
+.byte 15,56,202,254
+.byte 15,56,201,229
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,0
+.byte 15,56,200,213
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+.byte 15,56,202,231
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,1
+.byte 15,56,200,206
+ pxor %xmm7,%xmm5
+.byte 15,56,202,236
+.byte 15,56,201,247
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,1
+.byte 15,56,200,215
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+.byte 15,56,202,245
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,1
+.byte 15,56,200,204
+ pxor %xmm5,%xmm7
+.byte 15,56,202,254
+.byte 15,56,201,229
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,1
+.byte 15,56,200,213
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+.byte 15,56,202,231
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,1
+.byte 15,56,200,206
+ pxor %xmm7,%xmm5
+.byte 15,56,202,236
+.byte 15,56,201,247
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,2
+.byte 15,56,200,215
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+.byte 15,56,202,245
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,2
+.byte 15,56,200,204
+ pxor %xmm5,%xmm7
+.byte 15,56,202,254
+.byte 15,56,201,229
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,2
+.byte 15,56,200,213
+ pxor %xmm6,%xmm4
+.byte 15,56,201,238
+.byte 15,56,202,231
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,2
+.byte 15,56,200,206
+ pxor %xmm7,%xmm5
+.byte 15,56,202,236
+.byte 15,56,201,247
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,2
+.byte 15,56,200,215
+ pxor %xmm4,%xmm6
+.byte 15,56,201,252
+.byte 15,56,202,245
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,3
+.byte 15,56,200,204
+ pxor %xmm5,%xmm7
+.byte 15,56,202,254
+ movdqu (%rsi),%xmm4
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,3
+.byte 15,56,200,213
+ movdqu 16(%rsi),%xmm5
+.byte 102,15,56,0,227
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,3
+.byte 15,56,200,206
+ movdqu 32(%rsi),%xmm6
+.byte 102,15,56,0,235
+
+ movdqa %xmm0,%xmm2
+.byte 15,58,204,193,3
+.byte 15,56,200,215
+ movdqu 48(%rsi),%xmm7
+.byte 102,15,56,0,243
+
+ movdqa %xmm0,%xmm1
+.byte 15,58,204,194,3
+.byte 65,15,56,200,201
+.byte 102,15,56,0,251
+
+ paddd %xmm8,%xmm0
+ movdqa %xmm1,%xmm9
+
+ jnz .Loop_shaext
+
+ pshufd $27,%xmm0,%xmm0
+ pshufd $27,%xmm1,%xmm1
+ movdqu %xmm0,(%rdi)
+ movd %xmm1,16(%rdi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha1_block_data_order_shaext,.-sha1_block_data_order_shaext
+.type sha1_block_data_order_ssse3,@function
+.align 16
+sha1_block_data_order_ssse3:
+_ssse3_shortcut:
+.cfi_startproc
+ movq %rsp,%r11
+.cfi_def_cfa_register %r11
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ leaq -64(%rsp),%rsp
+ andq $-64,%rsp
+ movq %rdi,%r8
+ movq %rsi,%r9
+ movq %rdx,%r10
+
+ shlq $6,%r10
+ addq %r9,%r10
+ leaq K_XX_XX+64(%rip),%r14
+
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movl %ebx,%esi
+ movl 16(%r8),%ebp
+ movl %ecx,%edi
+ xorl %edx,%edi
+ andl %edi,%esi
+
+ movdqa 64(%r14),%xmm6
+ movdqa -64(%r14),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+.byte 102,15,56,0,206
+.byte 102,15,56,0,214
+ addq $64,%r9
+ paddd %xmm9,%xmm0
+.byte 102,15,56,0,222
+ paddd %xmm9,%xmm1
+ paddd %xmm9,%xmm2
+ movdqa %xmm0,0(%rsp)
+ psubd %xmm9,%xmm0
+ movdqa %xmm1,16(%rsp)
+ psubd %xmm9,%xmm1
+ movdqa %xmm2,32(%rsp)
+ psubd %xmm9,%xmm2
+ jmp .Loop_ssse3
+.align 16
+.Loop_ssse3:
+ rorl $2,%ebx
+ pshufd $238,%xmm0,%xmm4
+ xorl %edx,%esi
+ movdqa %xmm3,%xmm8
+ paddd %xmm3,%xmm9
+ movl %eax,%edi
+ addl 0(%rsp),%ebp
+ punpcklqdq %xmm1,%xmm4
+ xorl %ecx,%ebx
+ roll $5,%eax
+ addl %esi,%ebp
+ psrldq $4,%xmm8
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ pxor %xmm0,%xmm4
+ addl %eax,%ebp
+ rorl $7,%eax
+ pxor %xmm2,%xmm8
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 4(%rsp),%edx
+ pxor %xmm8,%xmm4
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm9,48(%rsp)
+ addl %edi,%edx
+ andl %eax,%esi
+ movdqa %xmm4,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ movdqa %xmm4,%xmm8
+ xorl %ebx,%esi
+ pslldq $12,%xmm10
+ paddd %xmm4,%xmm4
+ movl %edx,%edi
+ addl 8(%rsp),%ecx
+ psrld $31,%xmm8
+ xorl %eax,%ebp
+ roll $5,%edx
+ addl %esi,%ecx
+ movdqa %xmm10,%xmm9
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ psrld $30,%xmm10
+ addl %edx,%ecx
+ rorl $7,%edx
+ por %xmm8,%xmm4
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 12(%rsp),%ebx
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm4
+ xorl %ebp,%edx
+ movdqa -64(%r14),%xmm10
+ roll $5,%ecx
+ addl %edi,%ebx
+ andl %edx,%esi
+ pxor %xmm9,%xmm4
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ pshufd $238,%xmm1,%xmm5
+ xorl %ebp,%esi
+ movdqa %xmm4,%xmm9
+ paddd %xmm4,%xmm10
+ movl %ebx,%edi
+ addl 16(%rsp),%eax
+ punpcklqdq %xmm2,%xmm5
+ xorl %edx,%ecx
+ roll $5,%ebx
+ addl %esi,%eax
+ psrldq $4,%xmm9
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ pxor %xmm1,%xmm5
+ addl %ebx,%eax
+ rorl $7,%ebx
+ pxor %xmm3,%xmm9
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 20(%rsp),%ebp
+ pxor %xmm9,%xmm5
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa %xmm10,0(%rsp)
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movdqa %xmm5,%xmm8
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ movdqa %xmm5,%xmm9
+ xorl %ecx,%esi
+ pslldq $12,%xmm8
+ paddd %xmm5,%xmm5
+ movl %ebp,%edi
+ addl 24(%rsp),%edx
+ psrld $31,%xmm9
+ xorl %ebx,%eax
+ roll $5,%ebp
+ addl %esi,%edx
+ movdqa %xmm8,%xmm10
+ andl %eax,%edi
+ xorl %ebx,%eax
+ psrld $30,%xmm8
+ addl %ebp,%edx
+ rorl $7,%ebp
+ por %xmm9,%xmm5
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 28(%rsp),%ecx
+ pslld $2,%xmm10
+ pxor %xmm8,%xmm5
+ xorl %eax,%ebp
+ movdqa -32(%r14),%xmm8
+ roll $5,%edx
+ addl %edi,%ecx
+ andl %ebp,%esi
+ pxor %xmm10,%xmm5
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ rorl $7,%edx
+ pshufd $238,%xmm2,%xmm6
+ xorl %eax,%esi
+ movdqa %xmm5,%xmm10
+ paddd %xmm5,%xmm8
+ movl %ecx,%edi
+ addl 32(%rsp),%ebx
+ punpcklqdq %xmm3,%xmm6
+ xorl %ebp,%edx
+ roll $5,%ecx
+ addl %esi,%ebx
+ psrldq $4,%xmm10
+ andl %edx,%edi
+ xorl %ebp,%edx
+ pxor %xmm2,%xmm6
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ pxor %xmm4,%xmm10
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ addl 36(%rsp),%eax
+ pxor %xmm10,%xmm6
+ xorl %edx,%ecx
+ roll $5,%ebx
+ movdqa %xmm8,16(%rsp)
+ addl %edi,%eax
+ andl %ecx,%esi
+ movdqa %xmm6,%xmm9
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ rorl $7,%ebx
+ movdqa %xmm6,%xmm10
+ xorl %edx,%esi
+ pslldq $12,%xmm9
+ paddd %xmm6,%xmm6
+ movl %eax,%edi
+ addl 40(%rsp),%ebp
+ psrld $31,%xmm10
+ xorl %ecx,%ebx
+ roll $5,%eax
+ addl %esi,%ebp
+ movdqa %xmm9,%xmm8
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ psrld $30,%xmm9
+ addl %eax,%ebp
+ rorl $7,%eax
+ por %xmm10,%xmm6
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 44(%rsp),%edx
+ pslld $2,%xmm8
+ pxor %xmm9,%xmm6
+ xorl %ebx,%eax
+ movdqa -32(%r14),%xmm9
+ roll $5,%ebp
+ addl %edi,%edx
+ andl %eax,%esi
+ pxor %xmm8,%xmm6
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ pshufd $238,%xmm3,%xmm7
+ xorl %ebx,%esi
+ movdqa %xmm6,%xmm8
+ paddd %xmm6,%xmm9
+ movl %edx,%edi
+ addl 48(%rsp),%ecx
+ punpcklqdq %xmm4,%xmm7
+ xorl %eax,%ebp
+ roll $5,%edx
+ addl %esi,%ecx
+ psrldq $4,%xmm8
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ pxor %xmm3,%xmm7
+ addl %edx,%ecx
+ rorl $7,%edx
+ pxor %xmm5,%xmm8
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 52(%rsp),%ebx
+ pxor %xmm8,%xmm7
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%ebx
+ andl %edx,%esi
+ movdqa %xmm7,%xmm10
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ movdqa %xmm7,%xmm8
+ xorl %ebp,%esi
+ pslldq $12,%xmm10
+ paddd %xmm7,%xmm7
+ movl %ebx,%edi
+ addl 56(%rsp),%eax
+ psrld $31,%xmm8
+ xorl %edx,%ecx
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa %xmm10,%xmm9
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ psrld $30,%xmm10
+ addl %ebx,%eax
+ rorl $7,%ebx
+ por %xmm8,%xmm7
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 60(%rsp),%ebp
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm7
+ xorl %ecx,%ebx
+ movdqa -32(%r14),%xmm10
+ roll $5,%eax
+ addl %edi,%ebp
+ andl %ebx,%esi
+ pxor %xmm9,%xmm7
+ pshufd $238,%xmm6,%xmm9
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ pxor %xmm4,%xmm0
+ xorl %ecx,%esi
+ movl %ebp,%edi
+ addl 0(%rsp),%edx
+ punpcklqdq %xmm7,%xmm9
+ xorl %ebx,%eax
+ roll $5,%ebp
+ pxor %xmm1,%xmm0
+ addl %esi,%edx
+ andl %eax,%edi
+ movdqa %xmm10,%xmm8
+ xorl %ebx,%eax
+ paddd %xmm7,%xmm10
+ addl %ebp,%edx
+ pxor %xmm9,%xmm0
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 4(%rsp),%ecx
+ movdqa %xmm0,%xmm9
+ xorl %eax,%ebp
+ roll $5,%edx
+ movdqa %xmm10,48(%rsp)
+ addl %edi,%ecx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ pslld $2,%xmm0
+ addl %edx,%ecx
+ rorl $7,%edx
+ psrld $30,%xmm9
+ xorl %eax,%esi
+ movl %ecx,%edi
+ addl 8(%rsp),%ebx
+ por %xmm9,%xmm0
+ xorl %ebp,%edx
+ roll $5,%ecx
+ pshufd $238,%xmm7,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 12(%rsp),%eax
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pxor %xmm5,%xmm1
+ addl 16(%rsp),%ebp
+ xorl %ecx,%esi
+ punpcklqdq %xmm0,%xmm10
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm2,%xmm1
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm8,%xmm9
+ rorl $7,%ebx
+ paddd %xmm0,%xmm8
+ addl %eax,%ebp
+ pxor %xmm10,%xmm1
+ addl 20(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm1,%xmm10
+ addl %edi,%edx
+ xorl %ebx,%esi
+ movdqa %xmm8,0(%rsp)
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 24(%rsp),%ecx
+ pslld $2,%xmm1
+ xorl %eax,%esi
+ movl %edx,%edi
+ psrld $30,%xmm10
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ por %xmm10,%xmm1
+ addl %edx,%ecx
+ addl 28(%rsp),%ebx
+ pshufd $238,%xmm0,%xmm8
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pxor %xmm6,%xmm2
+ addl 32(%rsp),%eax
+ xorl %edx,%esi
+ punpcklqdq %xmm1,%xmm8
+ movl %ebx,%edi
+ roll $5,%ebx
+ pxor %xmm3,%xmm2
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa 0(%r14),%xmm10
+ rorl $7,%ecx
+ paddd %xmm1,%xmm9
+ addl %ebx,%eax
+ pxor %xmm8,%xmm2
+ addl 36(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ movdqa %xmm2,%xmm8
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ movdqa %xmm9,16(%rsp)
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 40(%rsp),%edx
+ pslld $2,%xmm2
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ psrld $30,%xmm8
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ por %xmm8,%xmm2
+ addl %ebp,%edx
+ addl 44(%rsp),%ecx
+ pshufd $238,%xmm1,%xmm9
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pxor %xmm7,%xmm3
+ addl 48(%rsp),%ebx
+ xorl %ebp,%esi
+ punpcklqdq %xmm2,%xmm9
+ movl %ecx,%edi
+ roll $5,%ecx
+ pxor %xmm4,%xmm3
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ movdqa %xmm10,%xmm8
+ rorl $7,%edx
+ paddd %xmm2,%xmm10
+ addl %ecx,%ebx
+ pxor %xmm9,%xmm3
+ addl 52(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ movdqa %xmm3,%xmm9
+ addl %edi,%eax
+ xorl %edx,%esi
+ movdqa %xmm10,32(%rsp)
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 56(%rsp),%ebp
+ pslld $2,%xmm3
+ xorl %ecx,%esi
+ movl %eax,%edi
+ psrld $30,%xmm9
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ por %xmm9,%xmm3
+ addl %eax,%ebp
+ addl 60(%rsp),%edx
+ pshufd $238,%xmm2,%xmm10
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pxor %xmm0,%xmm4
+ addl 0(%rsp),%ecx
+ xorl %eax,%esi
+ punpcklqdq %xmm3,%xmm10
+ movl %edx,%edi
+ roll $5,%edx
+ pxor %xmm5,%xmm4
+ addl %esi,%ecx
+ xorl %eax,%edi
+ movdqa %xmm8,%xmm9
+ rorl $7,%ebp
+ paddd %xmm3,%xmm8
+ addl %edx,%ecx
+ pxor %xmm10,%xmm4
+ addl 4(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ movdqa %xmm4,%xmm10
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ movdqa %xmm8,48(%rsp)
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 8(%rsp),%eax
+ pslld $2,%xmm4
+ xorl %edx,%esi
+ movl %ebx,%edi
+ psrld $30,%xmm10
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ por %xmm10,%xmm4
+ addl %ebx,%eax
+ addl 12(%rsp),%ebp
+ pshufd $238,%xmm3,%xmm8
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm1,%xmm5
+ addl 16(%rsp),%edx
+ xorl %ebx,%esi
+ punpcklqdq %xmm4,%xmm8
+ movl %ebp,%edi
+ roll $5,%ebp
+ pxor %xmm6,%xmm5
+ addl %esi,%edx
+ xorl %ebx,%edi
+ movdqa %xmm9,%xmm10
+ rorl $7,%eax
+ paddd %xmm4,%xmm9
+ addl %ebp,%edx
+ pxor %xmm8,%xmm5
+ addl 20(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ movdqa %xmm5,%xmm8
+ addl %edi,%ecx
+ xorl %eax,%esi
+ movdqa %xmm9,0(%rsp)
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 24(%rsp),%ebx
+ pslld $2,%xmm5
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ psrld $30,%xmm8
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ por %xmm8,%xmm5
+ addl %ecx,%ebx
+ addl 28(%rsp),%eax
+ pshufd $238,%xmm4,%xmm9
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ pxor %xmm2,%xmm6
+ addl 32(%rsp),%ebp
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ punpcklqdq %xmm5,%xmm9
+ movl %eax,%edi
+ xorl %ecx,%esi
+ pxor %xmm7,%xmm6
+ roll $5,%eax
+ addl %esi,%ebp
+ movdqa %xmm10,%xmm8
+ xorl %ebx,%edi
+ paddd %xmm5,%xmm10
+ xorl %ecx,%ebx
+ pxor %xmm9,%xmm6
+ addl %eax,%ebp
+ addl 36(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movdqa %xmm6,%xmm9
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ movdqa %xmm10,16(%rsp)
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %eax,%esi
+ pslld $2,%xmm6
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ psrld $30,%xmm9
+ addl 40(%rsp),%ecx
+ andl %eax,%esi
+ xorl %ebx,%eax
+ por %xmm9,%xmm6
+ rorl $7,%ebp
+ movl %edx,%edi
+ xorl %eax,%esi
+ roll $5,%edx
+ pshufd $238,%xmm5,%xmm10
+ addl %esi,%ecx
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 44(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ pxor %xmm3,%xmm7
+ addl 48(%rsp),%eax
+ andl %edx,%esi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ punpcklqdq %xmm6,%xmm10
+ movl %ebx,%edi
+ xorl %edx,%esi
+ pxor %xmm0,%xmm7
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa 32(%r14),%xmm9
+ xorl %ecx,%edi
+ paddd %xmm6,%xmm8
+ xorl %edx,%ecx
+ pxor %xmm10,%xmm7
+ addl %ebx,%eax
+ addl 52(%rsp),%ebp
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movdqa %xmm7,%xmm10
+ movl %eax,%esi
+ xorl %ecx,%edi
+ movdqa %xmm8,32(%rsp)
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ pslld $2,%xmm7
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ psrld $30,%xmm10
+ addl 56(%rsp),%edx
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ por %xmm10,%xmm7
+ rorl $7,%eax
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ roll $5,%ebp
+ pshufd $238,%xmm6,%xmm8
+ addl %esi,%edx
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 60(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ xorl %eax,%edi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ pxor %xmm4,%xmm0
+ addl 0(%rsp),%ebx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ punpcklqdq %xmm7,%xmm8
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ pxor %xmm1,%xmm0
+ roll $5,%ecx
+ addl %esi,%ebx
+ movdqa %xmm9,%xmm10
+ xorl %edx,%edi
+ paddd %xmm7,%xmm9
+ xorl %ebp,%edx
+ pxor %xmm8,%xmm0
+ addl %ecx,%ebx
+ addl 4(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movdqa %xmm0,%xmm8
+ movl %ebx,%esi
+ xorl %edx,%edi
+ movdqa %xmm9,48(%rsp)
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ pslld $2,%xmm0
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ psrld $30,%xmm8
+ addl 8(%rsp),%ebp
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ por %xmm8,%xmm0
+ rorl $7,%ebx
+ movl %eax,%edi
+ xorl %ecx,%esi
+ roll $5,%eax
+ pshufd $238,%xmm7,%xmm9
+ addl %esi,%ebp
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 12(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ pxor %xmm5,%xmm1
+ addl 16(%rsp),%ecx
+ andl %eax,%esi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ punpcklqdq %xmm0,%xmm9
+ movl %edx,%edi
+ xorl %eax,%esi
+ pxor %xmm2,%xmm1
+ roll $5,%edx
+ addl %esi,%ecx
+ movdqa %xmm10,%xmm8
+ xorl %ebp,%edi
+ paddd %xmm0,%xmm10
+ xorl %eax,%ebp
+ pxor %xmm9,%xmm1
+ addl %edx,%ecx
+ addl 20(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movdqa %xmm1,%xmm9
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ movdqa %xmm10,0(%rsp)
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ pslld $2,%xmm1
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ psrld $30,%xmm9
+ addl 24(%rsp),%eax
+ andl %edx,%esi
+ xorl %ebp,%edx
+ por %xmm9,%xmm1
+ rorl $7,%ecx
+ movl %ebx,%edi
+ xorl %edx,%esi
+ roll $5,%ebx
+ pshufd $238,%xmm0,%xmm10
+ addl %esi,%eax
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 28(%rsp),%ebp
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ xorl %ecx,%edi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ pxor %xmm6,%xmm2
+ addl 32(%rsp),%edx
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ punpcklqdq %xmm1,%xmm10
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ pxor %xmm3,%xmm2
+ roll $5,%ebp
+ addl %esi,%edx
+ movdqa %xmm8,%xmm9
+ xorl %eax,%edi
+ paddd %xmm1,%xmm8
+ xorl %ebx,%eax
+ pxor %xmm10,%xmm2
+ addl %ebp,%edx
+ addl 36(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movdqa %xmm2,%xmm10
+ movl %edx,%esi
+ xorl %eax,%edi
+ movdqa %xmm8,16(%rsp)
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ pslld $2,%xmm2
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ psrld $30,%xmm10
+ addl 40(%rsp),%ebx
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ por %xmm10,%xmm2
+ rorl $7,%edx
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ roll $5,%ecx
+ pshufd $238,%xmm1,%xmm8
+ addl %esi,%ebx
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 44(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ addl %ebx,%eax
+ pxor %xmm7,%xmm3
+ addl 48(%rsp),%ebp
+ xorl %ecx,%esi
+ punpcklqdq %xmm2,%xmm8
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm4,%xmm3
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm9,%xmm10
+ rorl $7,%ebx
+ paddd %xmm2,%xmm9
+ addl %eax,%ebp
+ pxor %xmm8,%xmm3
+ addl 52(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm3,%xmm8
+ addl %edi,%edx
+ xorl %ebx,%esi
+ movdqa %xmm9,32(%rsp)
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 56(%rsp),%ecx
+ pslld $2,%xmm3
+ xorl %eax,%esi
+ movl %edx,%edi
+ psrld $30,%xmm8
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ por %xmm8,%xmm3
+ addl %edx,%ecx
+ addl 60(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 0(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ paddd %xmm3,%xmm10
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa %xmm10,48(%rsp)
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 4(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 8(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 12(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ cmpq %r10,%r9
+ je .Ldone_ssse3
+ movdqa 64(%r14),%xmm6
+ movdqa -64(%r14),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+.byte 102,15,56,0,206
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ paddd %xmm9,%xmm0
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ movdqa %xmm0,0(%rsp)
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ psubd %xmm9,%xmm0
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+.byte 102,15,56,0,214
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ paddd %xmm9,%xmm1
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ movdqa %xmm1,16(%rsp)
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ psubd %xmm9,%xmm1
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+.byte 102,15,56,0,222
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ paddd %xmm9,%xmm2
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ movdqa %xmm2,32(%rsp)
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ psubd %xmm9,%xmm2
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ addl 12(%r8),%edx
+ movl %eax,0(%r8)
+ addl 16(%r8),%ebp
+ movl %esi,4(%r8)
+ movl %esi,%ebx
+ movl %ecx,8(%r8)
+ movl %ecx,%edi
+ movl %edx,12(%r8)
+ xorl %edx,%edi
+ movl %ebp,16(%r8)
+ andl %edi,%esi
+ jmp .Loop_ssse3
+
+.align 16
+.Ldone_ssse3:
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ movl %eax,0(%r8)
+ addl 12(%r8),%edx
+ movl %esi,4(%r8)
+ addl 16(%r8),%ebp
+ movl %ecx,8(%r8)
+ movl %edx,12(%r8)
+ movl %ebp,16(%r8)
+ movq -40(%r11),%r14
+.cfi_restore %r14
+ movq -32(%r11),%r13
+.cfi_restore %r13
+ movq -24(%r11),%r12
+.cfi_restore %r12
+ movq -16(%r11),%rbp
+.cfi_restore %rbp
+ movq -8(%r11),%rbx
+.cfi_restore %rbx
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue_ssse3:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3
+.align 64
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-mb-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-mb-x86_64.S
new file mode 100644
index 0000000000..25dee488b8
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-mb-x86_64.S
@@ -0,0 +1,3286 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/sha/asm/sha256-mb-x86_64.pl
+#
+# Copyright 2013-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+
+.globl sha256_multi_block
+.type sha256_multi_block,@function
+.align 32
+sha256_multi_block:
+.cfi_startproc
+ movq OPENSSL_ia32cap_P+4(%rip),%rcx
+ btq $61,%rcx
+ jc _shaext_shortcut
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ subq $288,%rsp
+ andq $-256,%rsp
+ movq %rax,272(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0x90,0x02,0x06,0x23,0x08
+.Lbody:
+ leaq K256+128(%rip),%rbp
+ leaq 256(%rsp),%rbx
+ leaq 128(%rdi),%rdi
+
+.Loop_grande:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rbp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rbp,%r9
+ movq 32(%rsi),%r10
+ movl 40(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,8(%rbx)
+ cmovleq %rbp,%r10
+ movq 48(%rsi),%r11
+ movl 56(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,12(%rbx)
+ cmovleq %rbp,%r11
+ testl %edx,%edx
+ jz .Ldone
+
+ movdqu 0-128(%rdi),%xmm8
+ leaq 128(%rsp),%rax
+ movdqu 32-128(%rdi),%xmm9
+ movdqu 64-128(%rdi),%xmm10
+ movdqu 96-128(%rdi),%xmm11
+ movdqu 128-128(%rdi),%xmm12
+ movdqu 160-128(%rdi),%xmm13
+ movdqu 192-128(%rdi),%xmm14
+ movdqu 224-128(%rdi),%xmm15
+ movdqu .Lpbswap(%rip),%xmm6
+ jmp .Loop
+
+.align 32
+.Loop:
+ movdqa %xmm10,%xmm4
+ pxor %xmm9,%xmm4
+ movd 0(%r8),%xmm5
+ movd 0(%r9),%xmm0
+ movd 0(%r10),%xmm1
+ movd 0(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 4(%r8),%xmm5
+ movd 4(%r9),%xmm0
+ movd 4(%r10),%xmm1
+ movd 4(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm11,%xmm7
+
+ movdqa %xmm11,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,16-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 8(%r8),%xmm5
+ movd 8(%r9),%xmm0
+ movd 8(%r10),%xmm1
+ movd 8(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 12(%r8),%xmm5
+ movd 12(%r9),%xmm0
+ movd 12(%r10),%xmm1
+ movd 12(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm9,%xmm7
+
+ movdqa %xmm9,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,48-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 16(%r8),%xmm5
+ movd 16(%r9),%xmm0
+ movd 16(%r10),%xmm1
+ movd 16(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 20(%r8),%xmm5
+ movd 20(%r9),%xmm0
+ movd 20(%r10),%xmm1
+ movd 20(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm15,%xmm7
+
+ movdqa %xmm15,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,80-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 24(%r8),%xmm5
+ movd 24(%r9),%xmm0
+ movd 24(%r10),%xmm1
+ movd 24(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 28(%r8),%xmm5
+ movd 28(%r9),%xmm0
+ movd 28(%r10),%xmm1
+ movd 28(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm13,%xmm7
+
+ movdqa %xmm13,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,112-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movd 32(%r8),%xmm5
+ movd 32(%r9),%xmm0
+ movd 32(%r10),%xmm1
+ movd 32(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 36(%r8),%xmm5
+ movd 36(%r9),%xmm0
+ movd 36(%r10),%xmm1
+ movd 36(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm11,%xmm7
+
+ movdqa %xmm11,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,144-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 40(%r8),%xmm5
+ movd 40(%r9),%xmm0
+ movd 40(%r10),%xmm1
+ movd 40(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 44(%r8),%xmm5
+ movd 44(%r9),%xmm0
+ movd 44(%r10),%xmm1
+ movd 44(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm9,%xmm7
+
+ movdqa %xmm9,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,176-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 48(%r8),%xmm5
+ movd 48(%r9),%xmm0
+ movd 48(%r10),%xmm1
+ movd 48(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 52(%r8),%xmm5
+ movd 52(%r9),%xmm0
+ movd 52(%r10),%xmm1
+ movd 52(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm15,%xmm7
+
+ movdqa %xmm15,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,208-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 56(%r8),%xmm5
+ movd 56(%r9),%xmm0
+ movd 56(%r10),%xmm1
+ movd 56(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 60(%r8),%xmm5
+ leaq 64(%r8),%r8
+ movd 60(%r9),%xmm0
+ leaq 64(%r9),%r9
+ movd 60(%r10),%xmm1
+ leaq 64(%r10),%r10
+ movd 60(%r11),%xmm2
+ leaq 64(%r11),%r11
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+ movdqa %xmm13,%xmm7
+
+ movdqa %xmm13,%xmm2
+.byte 102,15,56,0,238
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,240-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ prefetcht0 63(%r8)
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ prefetcht0 63(%r9)
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ prefetcht0 63(%r10)
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ prefetcht0 63(%r11)
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqu 0-128(%rax),%xmm5
+ movl $3,%ecx
+ jmp .Loop_16_xx
+.align 32
+.Loop_16_xx:
+ movdqa 16-128(%rax),%xmm6
+ paddd 144-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 224-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+
+ movdqa %xmm12,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 32-128(%rax),%xmm5
+ paddd 160-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 240-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+
+ movdqa %xmm11,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,16-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 48-128(%rax),%xmm6
+ paddd 176-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 0-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+
+ movdqa %xmm10,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 64-128(%rax),%xmm5
+ paddd 192-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 16-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+
+ movdqa %xmm9,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,48-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 80-128(%rax),%xmm6
+ paddd 208-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 32-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+
+ movdqa %xmm8,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 96-128(%rax),%xmm5
+ paddd 224-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 48-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+
+ movdqa %xmm15,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,80-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 112-128(%rax),%xmm6
+ paddd 240-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 64-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+
+ movdqa %xmm14,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 128-128(%rax),%xmm5
+ paddd 0-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 80-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+
+ movdqa %xmm13,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,112-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqa 144-128(%rax),%xmm6
+ paddd 16-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 96-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+
+ movdqa %xmm12,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 160-128(%rax),%xmm5
+ paddd 32-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 112-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+
+ movdqa %xmm11,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,144-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 176-128(%rax),%xmm6
+ paddd 48-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 128-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+
+ movdqa %xmm10,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 192-128(%rax),%xmm5
+ paddd 64-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 144-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+
+ movdqa %xmm9,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,176-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 208-128(%rax),%xmm6
+ paddd 80-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 160-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+
+ movdqa %xmm8,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 224-128(%rax),%xmm5
+ paddd 96-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 176-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+
+ movdqa %xmm15,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,208-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 240-128(%rax),%xmm6
+ paddd 112-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 192-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+
+ movdqa %xmm14,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 0-128(%rax),%xmm5
+ paddd 128-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 208-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+
+ movdqa %xmm13,%xmm2
+
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,240-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ decl %ecx
+ jnz .Loop_16_xx
+
+ movl $1,%ecx
+ leaq K256+128(%rip),%rbp
+
+ movdqa (%rbx),%xmm7
+ cmpl 0(%rbx),%ecx
+ pxor %xmm0,%xmm0
+ cmovgeq %rbp,%r8
+ cmpl 4(%rbx),%ecx
+ movdqa %xmm7,%xmm6
+ cmovgeq %rbp,%r9
+ cmpl 8(%rbx),%ecx
+ pcmpgtd %xmm0,%xmm6
+ cmovgeq %rbp,%r10
+ cmpl 12(%rbx),%ecx
+ paddd %xmm6,%xmm7
+ cmovgeq %rbp,%r11
+
+ movdqu 0-128(%rdi),%xmm0
+ pand %xmm6,%xmm8
+ movdqu 32-128(%rdi),%xmm1
+ pand %xmm6,%xmm9
+ movdqu 64-128(%rdi),%xmm2
+ pand %xmm6,%xmm10
+ movdqu 96-128(%rdi),%xmm5
+ pand %xmm6,%xmm11
+ paddd %xmm0,%xmm8
+ movdqu 128-128(%rdi),%xmm0
+ pand %xmm6,%xmm12
+ paddd %xmm1,%xmm9
+ movdqu 160-128(%rdi),%xmm1
+ pand %xmm6,%xmm13
+ paddd %xmm2,%xmm10
+ movdqu 192-128(%rdi),%xmm2
+ pand %xmm6,%xmm14
+ paddd %xmm5,%xmm11
+ movdqu 224-128(%rdi),%xmm5
+ pand %xmm6,%xmm15
+ paddd %xmm0,%xmm12
+ paddd %xmm1,%xmm13
+ movdqu %xmm8,0-128(%rdi)
+ paddd %xmm2,%xmm14
+ movdqu %xmm9,32-128(%rdi)
+ paddd %xmm5,%xmm15
+ movdqu %xmm10,64-128(%rdi)
+ movdqu %xmm11,96-128(%rdi)
+ movdqu %xmm12,128-128(%rdi)
+ movdqu %xmm13,160-128(%rdi)
+ movdqu %xmm14,192-128(%rdi)
+ movdqu %xmm15,224-128(%rdi)
+
+ movdqa %xmm7,(%rbx)
+ movdqa .Lpbswap(%rip),%xmm6
+ decl %edx
+ jnz .Loop
+
+ movl 280(%rsp),%edx
+ leaq 16(%rdi),%rdi
+ leaq 64(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande
+
+.Ldone:
+ movq 272(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha256_multi_block,.-sha256_multi_block
+.type sha256_multi_block_shaext,@function
+.align 32
+sha256_multi_block_shaext:
+.cfi_startproc
+_shaext_shortcut:
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ subq $288,%rsp
+ shll $1,%edx
+ andq $-256,%rsp
+ leaq 128(%rdi),%rdi
+ movq %rax,272(%rsp)
+.Lbody_shaext:
+ leaq 256(%rsp),%rbx
+ leaq K256_shaext+128(%rip),%rbp
+
+.Loop_grande_shaext:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rsp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rsp,%r9
+ testl %edx,%edx
+ jz .Ldone_shaext
+
+ movq 0-128(%rdi),%xmm12
+ movq 32-128(%rdi),%xmm4
+ movq 64-128(%rdi),%xmm13
+ movq 96-128(%rdi),%xmm5
+ movq 128-128(%rdi),%xmm8
+ movq 160-128(%rdi),%xmm9
+ movq 192-128(%rdi),%xmm10
+ movq 224-128(%rdi),%xmm11
+
+ punpckldq %xmm4,%xmm12
+ punpckldq %xmm5,%xmm13
+ punpckldq %xmm9,%xmm8
+ punpckldq %xmm11,%xmm10
+ movdqa K256_shaext-16(%rip),%xmm3
+
+ movdqa %xmm12,%xmm14
+ movdqa %xmm13,%xmm15
+ punpcklqdq %xmm8,%xmm12
+ punpcklqdq %xmm10,%xmm13
+ punpckhqdq %xmm8,%xmm14
+ punpckhqdq %xmm10,%xmm15
+
+ pshufd $27,%xmm12,%xmm12
+ pshufd $27,%xmm13,%xmm13
+ pshufd $27,%xmm14,%xmm14
+ pshufd $27,%xmm15,%xmm15
+ jmp .Loop_shaext
+
+.align 32
+.Loop_shaext:
+ movdqu 0(%r8),%xmm4
+ movdqu 0(%r9),%xmm8
+ movdqu 16(%r8),%xmm5
+ movdqu 16(%r9),%xmm9
+ movdqu 32(%r8),%xmm6
+.byte 102,15,56,0,227
+ movdqu 32(%r9),%xmm10
+.byte 102,68,15,56,0,195
+ movdqu 48(%r8),%xmm7
+ leaq 64(%r8),%r8
+ movdqu 48(%r9),%xmm11
+ leaq 64(%r9),%r9
+
+ movdqa 0-128(%rbp),%xmm0
+.byte 102,15,56,0,235
+ paddd %xmm4,%xmm0
+ pxor %xmm12,%xmm4
+ movdqa %xmm0,%xmm1
+ movdqa 0-128(%rbp),%xmm2
+.byte 102,68,15,56,0,203
+ paddd %xmm8,%xmm2
+ movdqa %xmm13,80(%rsp)
+.byte 69,15,56,203,236
+ pxor %xmm14,%xmm8
+ movdqa %xmm2,%xmm0
+ movdqa %xmm15,112(%rsp)
+.byte 69,15,56,203,254
+ pshufd $0x0e,%xmm1,%xmm0
+ pxor %xmm12,%xmm4
+ movdqa %xmm12,64(%rsp)
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ pxor %xmm14,%xmm8
+ movdqa %xmm14,96(%rsp)
+ movdqa 16-128(%rbp),%xmm1
+ paddd %xmm5,%xmm1
+.byte 102,15,56,0,243
+.byte 69,15,56,203,247
+
+ movdqa %xmm1,%xmm0
+ movdqa 16-128(%rbp),%xmm2
+ paddd %xmm9,%xmm2
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ prefetcht0 127(%r8)
+.byte 102,15,56,0,251
+.byte 102,68,15,56,0,211
+ prefetcht0 127(%r9)
+.byte 69,15,56,203,254
+ pshufd $0x0e,%xmm1,%xmm0
+.byte 102,68,15,56,0,219
+.byte 15,56,204,229
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 32-128(%rbp),%xmm1
+ paddd %xmm6,%xmm1
+.byte 69,15,56,203,247
+
+ movdqa %xmm1,%xmm0
+ movdqa 32-128(%rbp),%xmm2
+ paddd %xmm10,%xmm2
+.byte 69,15,56,203,236
+.byte 69,15,56,204,193
+ movdqa %xmm2,%xmm0
+ movdqa %xmm7,%xmm3
+.byte 69,15,56,203,254
+ pshufd $0x0e,%xmm1,%xmm0
+.byte 102,15,58,15,222,4
+ paddd %xmm3,%xmm4
+ movdqa %xmm11,%xmm3
+.byte 102,65,15,58,15,218,4
+.byte 15,56,204,238
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 48-128(%rbp),%xmm1
+ paddd %xmm7,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,202
+
+ movdqa %xmm1,%xmm0
+ movdqa 48-128(%rbp),%xmm2
+ paddd %xmm3,%xmm8
+ paddd %xmm11,%xmm2
+.byte 15,56,205,231
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm4,%xmm3
+.byte 102,15,58,15,223,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,195
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm5
+ movdqa %xmm8,%xmm3
+.byte 102,65,15,58,15,219,4
+.byte 15,56,204,247
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 64-128(%rbp),%xmm1
+ paddd %xmm4,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,211
+ movdqa %xmm1,%xmm0
+ movdqa 64-128(%rbp),%xmm2
+ paddd %xmm3,%xmm9
+ paddd %xmm8,%xmm2
+.byte 15,56,205,236
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm5,%xmm3
+.byte 102,15,58,15,220,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,200
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm6
+ movdqa %xmm9,%xmm3
+.byte 102,65,15,58,15,216,4
+.byte 15,56,204,252
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 80-128(%rbp),%xmm1
+ paddd %xmm5,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,216
+ movdqa %xmm1,%xmm0
+ movdqa 80-128(%rbp),%xmm2
+ paddd %xmm3,%xmm10
+ paddd %xmm9,%xmm2
+.byte 15,56,205,245
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm6,%xmm3
+.byte 102,15,58,15,221,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,209
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm7
+ movdqa %xmm10,%xmm3
+.byte 102,65,15,58,15,217,4
+.byte 15,56,204,229
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 96-128(%rbp),%xmm1
+ paddd %xmm6,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,193
+ movdqa %xmm1,%xmm0
+ movdqa 96-128(%rbp),%xmm2
+ paddd %xmm3,%xmm11
+ paddd %xmm10,%xmm2
+.byte 15,56,205,254
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm7,%xmm3
+.byte 102,15,58,15,222,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,218
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm4
+ movdqa %xmm11,%xmm3
+.byte 102,65,15,58,15,218,4
+.byte 15,56,204,238
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 112-128(%rbp),%xmm1
+ paddd %xmm7,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,202
+ movdqa %xmm1,%xmm0
+ movdqa 112-128(%rbp),%xmm2
+ paddd %xmm3,%xmm8
+ paddd %xmm11,%xmm2
+.byte 15,56,205,231
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm4,%xmm3
+.byte 102,15,58,15,223,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,195
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm5
+ movdqa %xmm8,%xmm3
+.byte 102,65,15,58,15,219,4
+.byte 15,56,204,247
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 128-128(%rbp),%xmm1
+ paddd %xmm4,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,211
+ movdqa %xmm1,%xmm0
+ movdqa 128-128(%rbp),%xmm2
+ paddd %xmm3,%xmm9
+ paddd %xmm8,%xmm2
+.byte 15,56,205,236
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm5,%xmm3
+.byte 102,15,58,15,220,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,200
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm6
+ movdqa %xmm9,%xmm3
+.byte 102,65,15,58,15,216,4
+.byte 15,56,204,252
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 144-128(%rbp),%xmm1
+ paddd %xmm5,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,216
+ movdqa %xmm1,%xmm0
+ movdqa 144-128(%rbp),%xmm2
+ paddd %xmm3,%xmm10
+ paddd %xmm9,%xmm2
+.byte 15,56,205,245
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm6,%xmm3
+.byte 102,15,58,15,221,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,209
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm7
+ movdqa %xmm10,%xmm3
+.byte 102,65,15,58,15,217,4
+.byte 15,56,204,229
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 160-128(%rbp),%xmm1
+ paddd %xmm6,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,193
+ movdqa %xmm1,%xmm0
+ movdqa 160-128(%rbp),%xmm2
+ paddd %xmm3,%xmm11
+ paddd %xmm10,%xmm2
+.byte 15,56,205,254
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm7,%xmm3
+.byte 102,15,58,15,222,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,218
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm4
+ movdqa %xmm11,%xmm3
+.byte 102,65,15,58,15,218,4
+.byte 15,56,204,238
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 176-128(%rbp),%xmm1
+ paddd %xmm7,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,202
+ movdqa %xmm1,%xmm0
+ movdqa 176-128(%rbp),%xmm2
+ paddd %xmm3,%xmm8
+ paddd %xmm11,%xmm2
+.byte 15,56,205,231
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm4,%xmm3
+.byte 102,15,58,15,223,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,195
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm5
+ movdqa %xmm8,%xmm3
+.byte 102,65,15,58,15,219,4
+.byte 15,56,204,247
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 192-128(%rbp),%xmm1
+ paddd %xmm4,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,211
+ movdqa %xmm1,%xmm0
+ movdqa 192-128(%rbp),%xmm2
+ paddd %xmm3,%xmm9
+ paddd %xmm8,%xmm2
+.byte 15,56,205,236
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm5,%xmm3
+.byte 102,15,58,15,220,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,200
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm6
+ movdqa %xmm9,%xmm3
+.byte 102,65,15,58,15,216,4
+.byte 15,56,204,252
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 208-128(%rbp),%xmm1
+ paddd %xmm5,%xmm1
+.byte 69,15,56,203,247
+.byte 69,15,56,204,216
+ movdqa %xmm1,%xmm0
+ movdqa 208-128(%rbp),%xmm2
+ paddd %xmm3,%xmm10
+ paddd %xmm9,%xmm2
+.byte 15,56,205,245
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movdqa %xmm6,%xmm3
+.byte 102,15,58,15,221,4
+.byte 69,15,56,203,254
+.byte 69,15,56,205,209
+ pshufd $0x0e,%xmm1,%xmm0
+ paddd %xmm3,%xmm7
+ movdqa %xmm10,%xmm3
+.byte 102,65,15,58,15,217,4
+ nop
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 224-128(%rbp),%xmm1
+ paddd %xmm6,%xmm1
+.byte 69,15,56,203,247
+
+ movdqa %xmm1,%xmm0
+ movdqa 224-128(%rbp),%xmm2
+ paddd %xmm3,%xmm11
+ paddd %xmm10,%xmm2
+.byte 15,56,205,254
+ nop
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ movl $1,%ecx
+ pxor %xmm6,%xmm6
+.byte 69,15,56,203,254
+.byte 69,15,56,205,218
+ pshufd $0x0e,%xmm1,%xmm0
+ movdqa 240-128(%rbp),%xmm1
+ paddd %xmm7,%xmm1
+ movq (%rbx),%xmm7
+ nop
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ movdqa 240-128(%rbp),%xmm2
+ paddd %xmm11,%xmm2
+.byte 69,15,56,203,247
+
+ movdqa %xmm1,%xmm0
+ cmpl 0(%rbx),%ecx
+ cmovgeq %rsp,%r8
+ cmpl 4(%rbx),%ecx
+ cmovgeq %rsp,%r9
+ pshufd $0x00,%xmm7,%xmm9
+.byte 69,15,56,203,236
+ movdqa %xmm2,%xmm0
+ pshufd $0x55,%xmm7,%xmm10
+ movdqa %xmm7,%xmm11
+.byte 69,15,56,203,254
+ pshufd $0x0e,%xmm1,%xmm0
+ pcmpgtd %xmm6,%xmm9
+ pcmpgtd %xmm6,%xmm10
+.byte 69,15,56,203,229
+ pshufd $0x0e,%xmm2,%xmm0
+ pcmpgtd %xmm6,%xmm11
+ movdqa K256_shaext-16(%rip),%xmm3
+.byte 69,15,56,203,247
+
+ pand %xmm9,%xmm13
+ pand %xmm10,%xmm15
+ pand %xmm9,%xmm12
+ pand %xmm10,%xmm14
+ paddd %xmm7,%xmm11
+
+ paddd 80(%rsp),%xmm13
+ paddd 112(%rsp),%xmm15
+ paddd 64(%rsp),%xmm12
+ paddd 96(%rsp),%xmm14
+
+ movq %xmm11,(%rbx)
+ decl %edx
+ jnz .Loop_shaext
+
+ movl 280(%rsp),%edx
+
+ pshufd $27,%xmm12,%xmm12
+ pshufd $27,%xmm13,%xmm13
+ pshufd $27,%xmm14,%xmm14
+ pshufd $27,%xmm15,%xmm15
+
+ movdqa %xmm12,%xmm5
+ movdqa %xmm13,%xmm6
+ punpckldq %xmm14,%xmm12
+ punpckhdq %xmm14,%xmm5
+ punpckldq %xmm15,%xmm13
+ punpckhdq %xmm15,%xmm6
+
+ movq %xmm12,0-128(%rdi)
+ psrldq $8,%xmm12
+ movq %xmm5,128-128(%rdi)
+ psrldq $8,%xmm5
+ movq %xmm12,32-128(%rdi)
+ movq %xmm5,160-128(%rdi)
+
+ movq %xmm13,64-128(%rdi)
+ psrldq $8,%xmm13
+ movq %xmm6,192-128(%rdi)
+ psrldq $8,%xmm6
+ movq %xmm13,96-128(%rdi)
+ movq %xmm6,224-128(%rdi)
+
+ leaq 8(%rdi),%rdi
+ leaq 32(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande_shaext
+
+.Ldone_shaext:
+
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue_shaext:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha256_multi_block_shaext,.-sha256_multi_block_shaext
+.align 256
+K256:
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1899447441,1899447441,1899447441,1899447441
+.long 1899447441,1899447441,1899447441,1899447441
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3921009573,3921009573,3921009573,3921009573
+.long 3921009573,3921009573,3921009573,3921009573
+.long 961987163,961987163,961987163,961987163
+.long 961987163,961987163,961987163,961987163
+.long 1508970993,1508970993,1508970993,1508970993
+.long 1508970993,1508970993,1508970993,1508970993
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2870763221,2870763221,2870763221,2870763221
+.long 2870763221,2870763221,2870763221,2870763221
+.long 3624381080,3624381080,3624381080,3624381080
+.long 3624381080,3624381080,3624381080,3624381080
+.long 310598401,310598401,310598401,310598401
+.long 310598401,310598401,310598401,310598401
+.long 607225278,607225278,607225278,607225278
+.long 607225278,607225278,607225278,607225278
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1925078388,1925078388,1925078388,1925078388
+.long 1925078388,1925078388,1925078388,1925078388
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2614888103,2614888103,2614888103,2614888103
+.long 2614888103,2614888103,2614888103,2614888103
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3835390401,3835390401,3835390401,3835390401
+.long 3835390401,3835390401,3835390401,3835390401
+.long 4022224774,4022224774,4022224774,4022224774
+.long 4022224774,4022224774,4022224774,4022224774
+.long 264347078,264347078,264347078,264347078
+.long 264347078,264347078,264347078,264347078
+.long 604807628,604807628,604807628,604807628
+.long 604807628,604807628,604807628,604807628
+.long 770255983,770255983,770255983,770255983
+.long 770255983,770255983,770255983,770255983
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1996064986,1996064986,1996064986,1996064986
+.long 1996064986,1996064986,1996064986,1996064986
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2952996808,2952996808,2952996808,2952996808
+.long 2952996808,2952996808,2952996808,2952996808
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3584528711,3584528711,3584528711,3584528711
+.long 3584528711,3584528711,3584528711,3584528711
+.long 113926993,113926993,113926993,113926993
+.long 113926993,113926993,113926993,113926993
+.long 338241895,338241895,338241895,338241895
+.long 338241895,338241895,338241895,338241895
+.long 666307205,666307205,666307205,666307205
+.long 666307205,666307205,666307205,666307205
+.long 773529912,773529912,773529912,773529912
+.long 773529912,773529912,773529912,773529912
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1986661051,1986661051,1986661051,1986661051
+.long 1986661051,1986661051,1986661051,1986661051
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2820302411,2820302411,2820302411,2820302411
+.long 2820302411,2820302411,2820302411,2820302411
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3600352804,3600352804,3600352804,3600352804
+.long 3600352804,3600352804,3600352804,3600352804
+.long 4094571909,4094571909,4094571909,4094571909
+.long 4094571909,4094571909,4094571909,4094571909
+.long 275423344,275423344,275423344,275423344
+.long 275423344,275423344,275423344,275423344
+.long 430227734,430227734,430227734,430227734
+.long 430227734,430227734,430227734,430227734
+.long 506948616,506948616,506948616,506948616
+.long 506948616,506948616,506948616,506948616
+.long 659060556,659060556,659060556,659060556
+.long 659060556,659060556,659060556,659060556
+.long 883997877,883997877,883997877,883997877
+.long 883997877,883997877,883997877,883997877
+.long 958139571,958139571,958139571,958139571
+.long 958139571,958139571,958139571,958139571
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1955562222,1955562222,1955562222,1955562222
+.long 1955562222,1955562222,1955562222,1955562222
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2756734187,2756734187,2756734187,2756734187
+.long 2756734187,2756734187,2756734187,2756734187
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3329325298,3329325298,3329325298,3329325298
+.long 3329325298,3329325298,3329325298,3329325298
+.Lpbswap:
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+K256_shaext:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.byte 83,72,65,50,53,54,32,109,117,108,116,105,45,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-x86_64.S
new file mode 100644
index 0000000000..a5d3cf5068
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha256-x86_64.S
@@ -0,0 +1,3097 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/sha/asm/sha512-x86_64.pl
+#
+# Copyright 2005-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl sha256_block_data_order
+.type sha256_block_data_order,@function
+.align 16
+sha256_block_data_order:
+.cfi_startproc
+ leaq OPENSSL_ia32cap_P(%rip),%r11
+ movl 0(%r11),%r9d
+ movl 4(%r11),%r10d
+ movl 8(%r11),%r11d
+ testl $536870912,%r11d
+ jnz _shaext_shortcut
+ testl $512,%r10d
+ jnz .Lssse3_shortcut
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %rax,88(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08
+.Lprologue:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ movl %ebx,%edi
+ leaq K256(%rip),%rbp
+ xorl %ecx,%edi
+ movl 0(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ jmp .Lrounds_16_xx
+.align 16
+.Lrounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ cmpb $0,3(%rbp)
+ jnz .Lrounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ addl %r14d,%eax
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop
+
+ movq 88(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha256_block_data_order,.-sha256_block_data_order
+.align 64
+.type K256,@object
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.type sha256_block_data_order_shaext,@function
+.align 64
+sha256_block_data_order_shaext:
+_shaext_shortcut:
+.cfi_startproc
+ leaq K256+128(%rip),%rcx
+ movdqu (%rdi),%xmm1
+ movdqu 16(%rdi),%xmm2
+ movdqa 512-128(%rcx),%xmm7
+
+ pshufd $0x1b,%xmm1,%xmm0
+ pshufd $0xb1,%xmm1,%xmm1
+ pshufd $0x1b,%xmm2,%xmm2
+ movdqa %xmm7,%xmm8
+.byte 102,15,58,15,202,8
+ punpcklqdq %xmm0,%xmm2
+ jmp .Loop_shaext
+
+.align 16
+.Loop_shaext:
+ movdqu (%rsi),%xmm3
+ movdqu 16(%rsi),%xmm4
+ movdqu 32(%rsi),%xmm5
+.byte 102,15,56,0,223
+ movdqu 48(%rsi),%xmm6
+
+ movdqa 0-128(%rcx),%xmm0
+ paddd %xmm3,%xmm0
+.byte 102,15,56,0,231
+ movdqa %xmm2,%xmm10
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ nop
+ movdqa %xmm1,%xmm9
+.byte 15,56,203,202
+
+ movdqa 32-128(%rcx),%xmm0
+ paddd %xmm4,%xmm0
+.byte 102,15,56,0,239
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ leaq 64(%rsi),%rsi
+.byte 15,56,204,220
+.byte 15,56,203,202
+
+ movdqa 64-128(%rcx),%xmm0
+ paddd %xmm5,%xmm0
+.byte 102,15,56,0,247
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm6,%xmm7
+.byte 102,15,58,15,253,4
+ nop
+ paddd %xmm7,%xmm3
+.byte 15,56,204,229
+.byte 15,56,203,202
+
+ movdqa 96-128(%rcx),%xmm0
+ paddd %xmm6,%xmm0
+.byte 15,56,205,222
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm3,%xmm7
+.byte 102,15,58,15,254,4
+ nop
+ paddd %xmm7,%xmm4
+.byte 15,56,204,238
+.byte 15,56,203,202
+ movdqa 128-128(%rcx),%xmm0
+ paddd %xmm3,%xmm0
+.byte 15,56,205,227
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm4,%xmm7
+.byte 102,15,58,15,251,4
+ nop
+ paddd %xmm7,%xmm5
+.byte 15,56,204,243
+.byte 15,56,203,202
+ movdqa 160-128(%rcx),%xmm0
+ paddd %xmm4,%xmm0
+.byte 15,56,205,236
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm5,%xmm7
+.byte 102,15,58,15,252,4
+ nop
+ paddd %xmm7,%xmm6
+.byte 15,56,204,220
+.byte 15,56,203,202
+ movdqa 192-128(%rcx),%xmm0
+ paddd %xmm5,%xmm0
+.byte 15,56,205,245
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm6,%xmm7
+.byte 102,15,58,15,253,4
+ nop
+ paddd %xmm7,%xmm3
+.byte 15,56,204,229
+.byte 15,56,203,202
+ movdqa 224-128(%rcx),%xmm0
+ paddd %xmm6,%xmm0
+.byte 15,56,205,222
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm3,%xmm7
+.byte 102,15,58,15,254,4
+ nop
+ paddd %xmm7,%xmm4
+.byte 15,56,204,238
+.byte 15,56,203,202
+ movdqa 256-128(%rcx),%xmm0
+ paddd %xmm3,%xmm0
+.byte 15,56,205,227
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm4,%xmm7
+.byte 102,15,58,15,251,4
+ nop
+ paddd %xmm7,%xmm5
+.byte 15,56,204,243
+.byte 15,56,203,202
+ movdqa 288-128(%rcx),%xmm0
+ paddd %xmm4,%xmm0
+.byte 15,56,205,236
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm5,%xmm7
+.byte 102,15,58,15,252,4
+ nop
+ paddd %xmm7,%xmm6
+.byte 15,56,204,220
+.byte 15,56,203,202
+ movdqa 320-128(%rcx),%xmm0
+ paddd %xmm5,%xmm0
+.byte 15,56,205,245
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm6,%xmm7
+.byte 102,15,58,15,253,4
+ nop
+ paddd %xmm7,%xmm3
+.byte 15,56,204,229
+.byte 15,56,203,202
+ movdqa 352-128(%rcx),%xmm0
+ paddd %xmm6,%xmm0
+.byte 15,56,205,222
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm3,%xmm7
+.byte 102,15,58,15,254,4
+ nop
+ paddd %xmm7,%xmm4
+.byte 15,56,204,238
+.byte 15,56,203,202
+ movdqa 384-128(%rcx),%xmm0
+ paddd %xmm3,%xmm0
+.byte 15,56,205,227
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm4,%xmm7
+.byte 102,15,58,15,251,4
+ nop
+ paddd %xmm7,%xmm5
+.byte 15,56,204,243
+.byte 15,56,203,202
+ movdqa 416-128(%rcx),%xmm0
+ paddd %xmm4,%xmm0
+.byte 15,56,205,236
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ movdqa %xmm5,%xmm7
+.byte 102,15,58,15,252,4
+.byte 15,56,203,202
+ paddd %xmm7,%xmm6
+
+ movdqa 448-128(%rcx),%xmm0
+ paddd %xmm5,%xmm0
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+.byte 15,56,205,245
+ movdqa %xmm8,%xmm7
+.byte 15,56,203,202
+
+ movdqa 480-128(%rcx),%xmm0
+ paddd %xmm6,%xmm0
+ nop
+.byte 15,56,203,209
+ pshufd $0x0e,%xmm0,%xmm0
+ decq %rdx
+ nop
+.byte 15,56,203,202
+
+ paddd %xmm10,%xmm2
+ paddd %xmm9,%xmm1
+ jnz .Loop_shaext
+
+ pshufd $0xb1,%xmm2,%xmm2
+ pshufd $0x1b,%xmm1,%xmm7
+ pshufd $0xb1,%xmm1,%xmm1
+ punpckhqdq %xmm2,%xmm1
+.byte 102,15,58,15,215,8
+
+ movdqu %xmm1,(%rdi)
+ movdqu %xmm2,16(%rdi)
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha256_block_data_order_shaext,.-sha256_block_data_order_shaext
+.type sha256_block_data_order_ssse3,@function
+.align 64
+sha256_block_data_order_ssse3:
+.cfi_startproc
+.Lssse3_shortcut:
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+ shlq $4,%rdx
+ subq $96,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %rax,88(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08
+.Lprologue_ssse3:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+
+
+ jmp .Lloop_ssse3
+.align 16
+.Lloop_ssse3:
+ movdqa K256+512(%rip),%xmm7
+ movdqu 0(%rsi),%xmm0
+ movdqu 16(%rsi),%xmm1
+ movdqu 32(%rsi),%xmm2
+.byte 102,15,56,0,199
+ movdqu 48(%rsi),%xmm3
+ leaq K256(%rip),%rbp
+.byte 102,15,56,0,207
+ movdqa 0(%rbp),%xmm4
+ movdqa 32(%rbp),%xmm5
+.byte 102,15,56,0,215
+ paddd %xmm0,%xmm4
+ movdqa 64(%rbp),%xmm6
+.byte 102,15,56,0,223
+ movdqa 96(%rbp),%xmm7
+ paddd %xmm1,%xmm5
+ paddd %xmm2,%xmm6
+ paddd %xmm3,%xmm7
+ movdqa %xmm4,0(%rsp)
+ movl %eax,%r14d
+ movdqa %xmm5,16(%rsp)
+ movl %ebx,%edi
+ movdqa %xmm6,32(%rsp)
+ xorl %ecx,%edi
+ movdqa %xmm7,48(%rsp)
+ movl %r8d,%r13d
+ jmp .Lssse3_00_47
+
+.align 16
+.Lssse3_00_47:
+ subq $-128,%rbp
+ rorl $14,%r13d
+ movdqa %xmm1,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm3,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,224,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,250,4
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm3,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm0
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm0
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm0,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 0(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm0,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,0(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm2,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm0,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,225,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,251,4
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm0,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm1
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm1
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm1,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 32(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm1,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,16(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm3,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm1,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,226,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,248,4
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm1,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm2
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm2
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm2,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 64(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm2,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,32(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm0,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm2,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,227,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,249,4
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm2,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm3
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm3
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm3,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 96(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm3,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,48(%rsp)
+ cmpb $0,131(%rbp)
+ jne .Lssse3_00_47
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movq 64+0(%rsp),%rdi
+ movl %r14d,%eax
+
+ addl 0(%rdi),%eax
+ leaq 64(%rsi),%rsi
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop_ssse3
+
+ movq 88(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue_ssse3:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha256_block_data_order_ssse3,.-sha256_block_data_order_ssse3
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha512-x86_64.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha512-x86_64.S
new file mode 100644
index 0000000000..11e67e5ba1
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/sha/sha512-x86_64.S
@@ -0,0 +1,1811 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/sha/asm/sha512-x86_64.pl
+#
+# Copyright 2005-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+.text
+
+
+.globl sha512_block_data_order
+.type sha512_block_data_order,@function
+.align 16
+sha512_block_data_order:
+.cfi_startproc
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+ shlq $4,%rdx
+ subq $128+32,%rsp
+ leaq (%rsi,%rdx,8),%rdx
+ andq $-64,%rsp
+ movq %rdi,128+0(%rsp)
+ movq %rsi,128+8(%rsp)
+ movq %rdx,128+16(%rsp)
+ movq %rax,152(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08
+.Lprologue:
+
+ movq 0(%rdi),%rax
+ movq 8(%rdi),%rbx
+ movq 16(%rdi),%rcx
+ movq 24(%rdi),%rdx
+ movq 32(%rdi),%r8
+ movq 40(%rdi),%r9
+ movq 48(%rdi),%r10
+ movq 56(%rdi),%r11
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ movq %rbx,%rdi
+ leaq K512(%rip),%rbp
+ xorq %rcx,%rdi
+ movq 0(%rsi),%r12
+ movq %r8,%r13
+ movq %rax,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r9,%r15
+
+ xorq %r8,%r13
+ rorq $5,%r14
+ xorq %r10,%r15
+
+ movq %r12,0(%rsp)
+ xorq %rax,%r14
+ andq %r8,%r15
+
+ rorq $4,%r13
+ addq %r11,%r12
+ xorq %r10,%r15
+
+ rorq $6,%r14
+ xorq %r8,%r13
+ addq %r15,%r12
+
+ movq %rax,%r15
+ addq (%rbp),%r12
+ xorq %rax,%r14
+
+ xorq %rbx,%r15
+ rorq $14,%r13
+ movq %rbx,%r11
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r11
+ addq %r12,%rdx
+ addq %r12,%r11
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%r11
+ movq 8(%rsi),%r12
+ movq %rdx,%r13
+ movq %r11,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r8,%rdi
+
+ xorq %rdx,%r13
+ rorq $5,%r14
+ xorq %r9,%rdi
+
+ movq %r12,8(%rsp)
+ xorq %r11,%r14
+ andq %rdx,%rdi
+
+ rorq $4,%r13
+ addq %r10,%r12
+ xorq %r9,%rdi
+
+ rorq $6,%r14
+ xorq %rdx,%r13
+ addq %rdi,%r12
+
+ movq %r11,%rdi
+ addq (%rbp),%r12
+ xorq %r11,%r14
+
+ xorq %rax,%rdi
+ rorq $14,%r13
+ movq %rax,%r10
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r10
+ addq %r12,%rcx
+ addq %r12,%r10
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%r10
+ movq 16(%rsi),%r12
+ movq %rcx,%r13
+ movq %r10,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rdx,%r15
+
+ xorq %rcx,%r13
+ rorq $5,%r14
+ xorq %r8,%r15
+
+ movq %r12,16(%rsp)
+ xorq %r10,%r14
+ andq %rcx,%r15
+
+ rorq $4,%r13
+ addq %r9,%r12
+ xorq %r8,%r15
+
+ rorq $6,%r14
+ xorq %rcx,%r13
+ addq %r15,%r12
+
+ movq %r10,%r15
+ addq (%rbp),%r12
+ xorq %r10,%r14
+
+ xorq %r11,%r15
+ rorq $14,%r13
+ movq %r11,%r9
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r9
+ addq %r12,%rbx
+ addq %r12,%r9
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%r9
+ movq 24(%rsi),%r12
+ movq %rbx,%r13
+ movq %r9,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rcx,%rdi
+
+ xorq %rbx,%r13
+ rorq $5,%r14
+ xorq %rdx,%rdi
+
+ movq %r12,24(%rsp)
+ xorq %r9,%r14
+ andq %rbx,%rdi
+
+ rorq $4,%r13
+ addq %r8,%r12
+ xorq %rdx,%rdi
+
+ rorq $6,%r14
+ xorq %rbx,%r13
+ addq %rdi,%r12
+
+ movq %r9,%rdi
+ addq (%rbp),%r12
+ xorq %r9,%r14
+
+ xorq %r10,%rdi
+ rorq $14,%r13
+ movq %r10,%r8
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r8
+ addq %r12,%rax
+ addq %r12,%r8
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%r8
+ movq 32(%rsi),%r12
+ movq %rax,%r13
+ movq %r8,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rbx,%r15
+
+ xorq %rax,%r13
+ rorq $5,%r14
+ xorq %rcx,%r15
+
+ movq %r12,32(%rsp)
+ xorq %r8,%r14
+ andq %rax,%r15
+
+ rorq $4,%r13
+ addq %rdx,%r12
+ xorq %rcx,%r15
+
+ rorq $6,%r14
+ xorq %rax,%r13
+ addq %r15,%r12
+
+ movq %r8,%r15
+ addq (%rbp),%r12
+ xorq %r8,%r14
+
+ xorq %r9,%r15
+ rorq $14,%r13
+ movq %r9,%rdx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rdx
+ addq %r12,%r11
+ addq %r12,%rdx
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%rdx
+ movq 40(%rsi),%r12
+ movq %r11,%r13
+ movq %rdx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rax,%rdi
+
+ xorq %r11,%r13
+ rorq $5,%r14
+ xorq %rbx,%rdi
+
+ movq %r12,40(%rsp)
+ xorq %rdx,%r14
+ andq %r11,%rdi
+
+ rorq $4,%r13
+ addq %rcx,%r12
+ xorq %rbx,%rdi
+
+ rorq $6,%r14
+ xorq %r11,%r13
+ addq %rdi,%r12
+
+ movq %rdx,%rdi
+ addq (%rbp),%r12
+ xorq %rdx,%r14
+
+ xorq %r8,%rdi
+ rorq $14,%r13
+ movq %r8,%rcx
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rcx
+ addq %r12,%r10
+ addq %r12,%rcx
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%rcx
+ movq 48(%rsi),%r12
+ movq %r10,%r13
+ movq %rcx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r11,%r15
+
+ xorq %r10,%r13
+ rorq $5,%r14
+ xorq %rax,%r15
+
+ movq %r12,48(%rsp)
+ xorq %rcx,%r14
+ andq %r10,%r15
+
+ rorq $4,%r13
+ addq %rbx,%r12
+ xorq %rax,%r15
+
+ rorq $6,%r14
+ xorq %r10,%r13
+ addq %r15,%r12
+
+ movq %rcx,%r15
+ addq (%rbp),%r12
+ xorq %rcx,%r14
+
+ xorq %rdx,%r15
+ rorq $14,%r13
+ movq %rdx,%rbx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rbx
+ addq %r12,%r9
+ addq %r12,%rbx
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%rbx
+ movq 56(%rsi),%r12
+ movq %r9,%r13
+ movq %rbx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r10,%rdi
+
+ xorq %r9,%r13
+ rorq $5,%r14
+ xorq %r11,%rdi
+
+ movq %r12,56(%rsp)
+ xorq %rbx,%r14
+ andq %r9,%rdi
+
+ rorq $4,%r13
+ addq %rax,%r12
+ xorq %r11,%rdi
+
+ rorq $6,%r14
+ xorq %r9,%r13
+ addq %rdi,%r12
+
+ movq %rbx,%rdi
+ addq (%rbp),%r12
+ xorq %rbx,%r14
+
+ xorq %rcx,%rdi
+ rorq $14,%r13
+ movq %rcx,%rax
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rax
+ addq %r12,%r8
+ addq %r12,%rax
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%rax
+ movq 64(%rsi),%r12
+ movq %r8,%r13
+ movq %rax,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r9,%r15
+
+ xorq %r8,%r13
+ rorq $5,%r14
+ xorq %r10,%r15
+
+ movq %r12,64(%rsp)
+ xorq %rax,%r14
+ andq %r8,%r15
+
+ rorq $4,%r13
+ addq %r11,%r12
+ xorq %r10,%r15
+
+ rorq $6,%r14
+ xorq %r8,%r13
+ addq %r15,%r12
+
+ movq %rax,%r15
+ addq (%rbp),%r12
+ xorq %rax,%r14
+
+ xorq %rbx,%r15
+ rorq $14,%r13
+ movq %rbx,%r11
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r11
+ addq %r12,%rdx
+ addq %r12,%r11
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%r11
+ movq 72(%rsi),%r12
+ movq %rdx,%r13
+ movq %r11,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r8,%rdi
+
+ xorq %rdx,%r13
+ rorq $5,%r14
+ xorq %r9,%rdi
+
+ movq %r12,72(%rsp)
+ xorq %r11,%r14
+ andq %rdx,%rdi
+
+ rorq $4,%r13
+ addq %r10,%r12
+ xorq %r9,%rdi
+
+ rorq $6,%r14
+ xorq %rdx,%r13
+ addq %rdi,%r12
+
+ movq %r11,%rdi
+ addq (%rbp),%r12
+ xorq %r11,%r14
+
+ xorq %rax,%rdi
+ rorq $14,%r13
+ movq %rax,%r10
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r10
+ addq %r12,%rcx
+ addq %r12,%r10
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%r10
+ movq 80(%rsi),%r12
+ movq %rcx,%r13
+ movq %r10,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rdx,%r15
+
+ xorq %rcx,%r13
+ rorq $5,%r14
+ xorq %r8,%r15
+
+ movq %r12,80(%rsp)
+ xorq %r10,%r14
+ andq %rcx,%r15
+
+ rorq $4,%r13
+ addq %r9,%r12
+ xorq %r8,%r15
+
+ rorq $6,%r14
+ xorq %rcx,%r13
+ addq %r15,%r12
+
+ movq %r10,%r15
+ addq (%rbp),%r12
+ xorq %r10,%r14
+
+ xorq %r11,%r15
+ rorq $14,%r13
+ movq %r11,%r9
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r9
+ addq %r12,%rbx
+ addq %r12,%r9
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%r9
+ movq 88(%rsi),%r12
+ movq %rbx,%r13
+ movq %r9,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rcx,%rdi
+
+ xorq %rbx,%r13
+ rorq $5,%r14
+ xorq %rdx,%rdi
+
+ movq %r12,88(%rsp)
+ xorq %r9,%r14
+ andq %rbx,%rdi
+
+ rorq $4,%r13
+ addq %r8,%r12
+ xorq %rdx,%rdi
+
+ rorq $6,%r14
+ xorq %rbx,%r13
+ addq %rdi,%r12
+
+ movq %r9,%rdi
+ addq (%rbp),%r12
+ xorq %r9,%r14
+
+ xorq %r10,%rdi
+ rorq $14,%r13
+ movq %r10,%r8
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r8
+ addq %r12,%rax
+ addq %r12,%r8
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%r8
+ movq 96(%rsi),%r12
+ movq %rax,%r13
+ movq %r8,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rbx,%r15
+
+ xorq %rax,%r13
+ rorq $5,%r14
+ xorq %rcx,%r15
+
+ movq %r12,96(%rsp)
+ xorq %r8,%r14
+ andq %rax,%r15
+
+ rorq $4,%r13
+ addq %rdx,%r12
+ xorq %rcx,%r15
+
+ rorq $6,%r14
+ xorq %rax,%r13
+ addq %r15,%r12
+
+ movq %r8,%r15
+ addq (%rbp),%r12
+ xorq %r8,%r14
+
+ xorq %r9,%r15
+ rorq $14,%r13
+ movq %r9,%rdx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rdx
+ addq %r12,%r11
+ addq %r12,%rdx
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%rdx
+ movq 104(%rsi),%r12
+ movq %r11,%r13
+ movq %rdx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %rax,%rdi
+
+ xorq %r11,%r13
+ rorq $5,%r14
+ xorq %rbx,%rdi
+
+ movq %r12,104(%rsp)
+ xorq %rdx,%r14
+ andq %r11,%rdi
+
+ rorq $4,%r13
+ addq %rcx,%r12
+ xorq %rbx,%rdi
+
+ rorq $6,%r14
+ xorq %r11,%r13
+ addq %rdi,%r12
+
+ movq %rdx,%rdi
+ addq (%rbp),%r12
+ xorq %rdx,%r14
+
+ xorq %r8,%rdi
+ rorq $14,%r13
+ movq %r8,%rcx
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rcx
+ addq %r12,%r10
+ addq %r12,%rcx
+
+ leaq 24(%rbp),%rbp
+ addq %r14,%rcx
+ movq 112(%rsi),%r12
+ movq %r10,%r13
+ movq %rcx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r11,%r15
+
+ xorq %r10,%r13
+ rorq $5,%r14
+ xorq %rax,%r15
+
+ movq %r12,112(%rsp)
+ xorq %rcx,%r14
+ andq %r10,%r15
+
+ rorq $4,%r13
+ addq %rbx,%r12
+ xorq %rax,%r15
+
+ rorq $6,%r14
+ xorq %r10,%r13
+ addq %r15,%r12
+
+ movq %rcx,%r15
+ addq (%rbp),%r12
+ xorq %rcx,%r14
+
+ xorq %rdx,%r15
+ rorq $14,%r13
+ movq %rdx,%rbx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rbx
+ addq %r12,%r9
+ addq %r12,%rbx
+
+ leaq 8(%rbp),%rbp
+ addq %r14,%rbx
+ movq 120(%rsi),%r12
+ movq %r9,%r13
+ movq %rbx,%r14
+ bswapq %r12
+ rorq $23,%r13
+ movq %r10,%rdi
+
+ xorq %r9,%r13
+ rorq $5,%r14
+ xorq %r11,%rdi
+
+ movq %r12,120(%rsp)
+ xorq %rbx,%r14
+ andq %r9,%rdi
+
+ rorq $4,%r13
+ addq %rax,%r12
+ xorq %r11,%rdi
+
+ rorq $6,%r14
+ xorq %r9,%r13
+ addq %rdi,%r12
+
+ movq %rbx,%rdi
+ addq (%rbp),%r12
+ xorq %rbx,%r14
+
+ xorq %rcx,%rdi
+ rorq $14,%r13
+ movq %rcx,%rax
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rax
+ addq %r12,%r8
+ addq %r12,%rax
+
+ leaq 24(%rbp),%rbp
+ jmp .Lrounds_16_xx
+.align 16
+.Lrounds_16_xx:
+ movq 8(%rsp),%r13
+ movq 112(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rax
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 72(%rsp),%r12
+
+ addq 0(%rsp),%r12
+ movq %r8,%r13
+ addq %r15,%r12
+ movq %rax,%r14
+ rorq $23,%r13
+ movq %r9,%r15
+
+ xorq %r8,%r13
+ rorq $5,%r14
+ xorq %r10,%r15
+
+ movq %r12,0(%rsp)
+ xorq %rax,%r14
+ andq %r8,%r15
+
+ rorq $4,%r13
+ addq %r11,%r12
+ xorq %r10,%r15
+
+ rorq $6,%r14
+ xorq %r8,%r13
+ addq %r15,%r12
+
+ movq %rax,%r15
+ addq (%rbp),%r12
+ xorq %rax,%r14
+
+ xorq %rbx,%r15
+ rorq $14,%r13
+ movq %rbx,%r11
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r11
+ addq %r12,%rdx
+ addq %r12,%r11
+
+ leaq 8(%rbp),%rbp
+ movq 16(%rsp),%r13
+ movq 120(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r11
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 80(%rsp),%r12
+
+ addq 8(%rsp),%r12
+ movq %rdx,%r13
+ addq %rdi,%r12
+ movq %r11,%r14
+ rorq $23,%r13
+ movq %r8,%rdi
+
+ xorq %rdx,%r13
+ rorq $5,%r14
+ xorq %r9,%rdi
+
+ movq %r12,8(%rsp)
+ xorq %r11,%r14
+ andq %rdx,%rdi
+
+ rorq $4,%r13
+ addq %r10,%r12
+ xorq %r9,%rdi
+
+ rorq $6,%r14
+ xorq %rdx,%r13
+ addq %rdi,%r12
+
+ movq %r11,%rdi
+ addq (%rbp),%r12
+ xorq %r11,%r14
+
+ xorq %rax,%rdi
+ rorq $14,%r13
+ movq %rax,%r10
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r10
+ addq %r12,%rcx
+ addq %r12,%r10
+
+ leaq 24(%rbp),%rbp
+ movq 24(%rsp),%r13
+ movq 0(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r10
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 88(%rsp),%r12
+
+ addq 16(%rsp),%r12
+ movq %rcx,%r13
+ addq %r15,%r12
+ movq %r10,%r14
+ rorq $23,%r13
+ movq %rdx,%r15
+
+ xorq %rcx,%r13
+ rorq $5,%r14
+ xorq %r8,%r15
+
+ movq %r12,16(%rsp)
+ xorq %r10,%r14
+ andq %rcx,%r15
+
+ rorq $4,%r13
+ addq %r9,%r12
+ xorq %r8,%r15
+
+ rorq $6,%r14
+ xorq %rcx,%r13
+ addq %r15,%r12
+
+ movq %r10,%r15
+ addq (%rbp),%r12
+ xorq %r10,%r14
+
+ xorq %r11,%r15
+ rorq $14,%r13
+ movq %r11,%r9
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r9
+ addq %r12,%rbx
+ addq %r12,%r9
+
+ leaq 8(%rbp),%rbp
+ movq 32(%rsp),%r13
+ movq 8(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r9
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 96(%rsp),%r12
+
+ addq 24(%rsp),%r12
+ movq %rbx,%r13
+ addq %rdi,%r12
+ movq %r9,%r14
+ rorq $23,%r13
+ movq %rcx,%rdi
+
+ xorq %rbx,%r13
+ rorq $5,%r14
+ xorq %rdx,%rdi
+
+ movq %r12,24(%rsp)
+ xorq %r9,%r14
+ andq %rbx,%rdi
+
+ rorq $4,%r13
+ addq %r8,%r12
+ xorq %rdx,%rdi
+
+ rorq $6,%r14
+ xorq %rbx,%r13
+ addq %rdi,%r12
+
+ movq %r9,%rdi
+ addq (%rbp),%r12
+ xorq %r9,%r14
+
+ xorq %r10,%rdi
+ rorq $14,%r13
+ movq %r10,%r8
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r8
+ addq %r12,%rax
+ addq %r12,%r8
+
+ leaq 24(%rbp),%rbp
+ movq 40(%rsp),%r13
+ movq 16(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r8
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 104(%rsp),%r12
+
+ addq 32(%rsp),%r12
+ movq %rax,%r13
+ addq %r15,%r12
+ movq %r8,%r14
+ rorq $23,%r13
+ movq %rbx,%r15
+
+ xorq %rax,%r13
+ rorq $5,%r14
+ xorq %rcx,%r15
+
+ movq %r12,32(%rsp)
+ xorq %r8,%r14
+ andq %rax,%r15
+
+ rorq $4,%r13
+ addq %rdx,%r12
+ xorq %rcx,%r15
+
+ rorq $6,%r14
+ xorq %rax,%r13
+ addq %r15,%r12
+
+ movq %r8,%r15
+ addq (%rbp),%r12
+ xorq %r8,%r14
+
+ xorq %r9,%r15
+ rorq $14,%r13
+ movq %r9,%rdx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rdx
+ addq %r12,%r11
+ addq %r12,%rdx
+
+ leaq 8(%rbp),%rbp
+ movq 48(%rsp),%r13
+ movq 24(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rdx
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 112(%rsp),%r12
+
+ addq 40(%rsp),%r12
+ movq %r11,%r13
+ addq %rdi,%r12
+ movq %rdx,%r14
+ rorq $23,%r13
+ movq %rax,%rdi
+
+ xorq %r11,%r13
+ rorq $5,%r14
+ xorq %rbx,%rdi
+
+ movq %r12,40(%rsp)
+ xorq %rdx,%r14
+ andq %r11,%rdi
+
+ rorq $4,%r13
+ addq %rcx,%r12
+ xorq %rbx,%rdi
+
+ rorq $6,%r14
+ xorq %r11,%r13
+ addq %rdi,%r12
+
+ movq %rdx,%rdi
+ addq (%rbp),%r12
+ xorq %rdx,%r14
+
+ xorq %r8,%rdi
+ rorq $14,%r13
+ movq %r8,%rcx
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rcx
+ addq %r12,%r10
+ addq %r12,%rcx
+
+ leaq 24(%rbp),%rbp
+ movq 56(%rsp),%r13
+ movq 32(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rcx
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 120(%rsp),%r12
+
+ addq 48(%rsp),%r12
+ movq %r10,%r13
+ addq %r15,%r12
+ movq %rcx,%r14
+ rorq $23,%r13
+ movq %r11,%r15
+
+ xorq %r10,%r13
+ rorq $5,%r14
+ xorq %rax,%r15
+
+ movq %r12,48(%rsp)
+ xorq %rcx,%r14
+ andq %r10,%r15
+
+ rorq $4,%r13
+ addq %rbx,%r12
+ xorq %rax,%r15
+
+ rorq $6,%r14
+ xorq %r10,%r13
+ addq %r15,%r12
+
+ movq %rcx,%r15
+ addq (%rbp),%r12
+ xorq %rcx,%r14
+
+ xorq %rdx,%r15
+ rorq $14,%r13
+ movq %rdx,%rbx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rbx
+ addq %r12,%r9
+ addq %r12,%rbx
+
+ leaq 8(%rbp),%rbp
+ movq 64(%rsp),%r13
+ movq 40(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rbx
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 0(%rsp),%r12
+
+ addq 56(%rsp),%r12
+ movq %r9,%r13
+ addq %rdi,%r12
+ movq %rbx,%r14
+ rorq $23,%r13
+ movq %r10,%rdi
+
+ xorq %r9,%r13
+ rorq $5,%r14
+ xorq %r11,%rdi
+
+ movq %r12,56(%rsp)
+ xorq %rbx,%r14
+ andq %r9,%rdi
+
+ rorq $4,%r13
+ addq %rax,%r12
+ xorq %r11,%rdi
+
+ rorq $6,%r14
+ xorq %r9,%r13
+ addq %rdi,%r12
+
+ movq %rbx,%rdi
+ addq (%rbp),%r12
+ xorq %rbx,%r14
+
+ xorq %rcx,%rdi
+ rorq $14,%r13
+ movq %rcx,%rax
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rax
+ addq %r12,%r8
+ addq %r12,%rax
+
+ leaq 24(%rbp),%rbp
+ movq 72(%rsp),%r13
+ movq 48(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rax
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 8(%rsp),%r12
+
+ addq 64(%rsp),%r12
+ movq %r8,%r13
+ addq %r15,%r12
+ movq %rax,%r14
+ rorq $23,%r13
+ movq %r9,%r15
+
+ xorq %r8,%r13
+ rorq $5,%r14
+ xorq %r10,%r15
+
+ movq %r12,64(%rsp)
+ xorq %rax,%r14
+ andq %r8,%r15
+
+ rorq $4,%r13
+ addq %r11,%r12
+ xorq %r10,%r15
+
+ rorq $6,%r14
+ xorq %r8,%r13
+ addq %r15,%r12
+
+ movq %rax,%r15
+ addq (%rbp),%r12
+ xorq %rax,%r14
+
+ xorq %rbx,%r15
+ rorq $14,%r13
+ movq %rbx,%r11
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r11
+ addq %r12,%rdx
+ addq %r12,%r11
+
+ leaq 8(%rbp),%rbp
+ movq 80(%rsp),%r13
+ movq 56(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r11
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 16(%rsp),%r12
+
+ addq 72(%rsp),%r12
+ movq %rdx,%r13
+ addq %rdi,%r12
+ movq %r11,%r14
+ rorq $23,%r13
+ movq %r8,%rdi
+
+ xorq %rdx,%r13
+ rorq $5,%r14
+ xorq %r9,%rdi
+
+ movq %r12,72(%rsp)
+ xorq %r11,%r14
+ andq %rdx,%rdi
+
+ rorq $4,%r13
+ addq %r10,%r12
+ xorq %r9,%rdi
+
+ rorq $6,%r14
+ xorq %rdx,%r13
+ addq %rdi,%r12
+
+ movq %r11,%rdi
+ addq (%rbp),%r12
+ xorq %r11,%r14
+
+ xorq %rax,%rdi
+ rorq $14,%r13
+ movq %rax,%r10
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r10
+ addq %r12,%rcx
+ addq %r12,%r10
+
+ leaq 24(%rbp),%rbp
+ movq 88(%rsp),%r13
+ movq 64(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r10
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 24(%rsp),%r12
+
+ addq 80(%rsp),%r12
+ movq %rcx,%r13
+ addq %r15,%r12
+ movq %r10,%r14
+ rorq $23,%r13
+ movq %rdx,%r15
+
+ xorq %rcx,%r13
+ rorq $5,%r14
+ xorq %r8,%r15
+
+ movq %r12,80(%rsp)
+ xorq %r10,%r14
+ andq %rcx,%r15
+
+ rorq $4,%r13
+ addq %r9,%r12
+ xorq %r8,%r15
+
+ rorq $6,%r14
+ xorq %rcx,%r13
+ addq %r15,%r12
+
+ movq %r10,%r15
+ addq (%rbp),%r12
+ xorq %r10,%r14
+
+ xorq %r11,%r15
+ rorq $14,%r13
+ movq %r11,%r9
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%r9
+ addq %r12,%rbx
+ addq %r12,%r9
+
+ leaq 8(%rbp),%rbp
+ movq 96(%rsp),%r13
+ movq 72(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r9
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 32(%rsp),%r12
+
+ addq 88(%rsp),%r12
+ movq %rbx,%r13
+ addq %rdi,%r12
+ movq %r9,%r14
+ rorq $23,%r13
+ movq %rcx,%rdi
+
+ xorq %rbx,%r13
+ rorq $5,%r14
+ xorq %rdx,%rdi
+
+ movq %r12,88(%rsp)
+ xorq %r9,%r14
+ andq %rbx,%rdi
+
+ rorq $4,%r13
+ addq %r8,%r12
+ xorq %rdx,%rdi
+
+ rorq $6,%r14
+ xorq %rbx,%r13
+ addq %rdi,%r12
+
+ movq %r9,%rdi
+ addq (%rbp),%r12
+ xorq %r9,%r14
+
+ xorq %r10,%rdi
+ rorq $14,%r13
+ movq %r10,%r8
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%r8
+ addq %r12,%rax
+ addq %r12,%r8
+
+ leaq 24(%rbp),%rbp
+ movq 104(%rsp),%r13
+ movq 80(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%r8
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 40(%rsp),%r12
+
+ addq 96(%rsp),%r12
+ movq %rax,%r13
+ addq %r15,%r12
+ movq %r8,%r14
+ rorq $23,%r13
+ movq %rbx,%r15
+
+ xorq %rax,%r13
+ rorq $5,%r14
+ xorq %rcx,%r15
+
+ movq %r12,96(%rsp)
+ xorq %r8,%r14
+ andq %rax,%r15
+
+ rorq $4,%r13
+ addq %rdx,%r12
+ xorq %rcx,%r15
+
+ rorq $6,%r14
+ xorq %rax,%r13
+ addq %r15,%r12
+
+ movq %r8,%r15
+ addq (%rbp),%r12
+ xorq %r8,%r14
+
+ xorq %r9,%r15
+ rorq $14,%r13
+ movq %r9,%rdx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rdx
+ addq %r12,%r11
+ addq %r12,%rdx
+
+ leaq 8(%rbp),%rbp
+ movq 112(%rsp),%r13
+ movq 88(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rdx
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 48(%rsp),%r12
+
+ addq 104(%rsp),%r12
+ movq %r11,%r13
+ addq %rdi,%r12
+ movq %rdx,%r14
+ rorq $23,%r13
+ movq %rax,%rdi
+
+ xorq %r11,%r13
+ rorq $5,%r14
+ xorq %rbx,%rdi
+
+ movq %r12,104(%rsp)
+ xorq %rdx,%r14
+ andq %r11,%rdi
+
+ rorq $4,%r13
+ addq %rcx,%r12
+ xorq %rbx,%rdi
+
+ rorq $6,%r14
+ xorq %r11,%r13
+ addq %rdi,%r12
+
+ movq %rdx,%rdi
+ addq (%rbp),%r12
+ xorq %rdx,%r14
+
+ xorq %r8,%rdi
+ rorq $14,%r13
+ movq %r8,%rcx
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rcx
+ addq %r12,%r10
+ addq %r12,%rcx
+
+ leaq 24(%rbp),%rbp
+ movq 120(%rsp),%r13
+ movq 96(%rsp),%r15
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rcx
+ movq %r15,%r14
+ rorq $42,%r15
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%r15
+ shrq $6,%r14
+
+ rorq $19,%r15
+ xorq %r13,%r12
+ xorq %r14,%r15
+ addq 56(%rsp),%r12
+
+ addq 112(%rsp),%r12
+ movq %r10,%r13
+ addq %r15,%r12
+ movq %rcx,%r14
+ rorq $23,%r13
+ movq %r11,%r15
+
+ xorq %r10,%r13
+ rorq $5,%r14
+ xorq %rax,%r15
+
+ movq %r12,112(%rsp)
+ xorq %rcx,%r14
+ andq %r10,%r15
+
+ rorq $4,%r13
+ addq %rbx,%r12
+ xorq %rax,%r15
+
+ rorq $6,%r14
+ xorq %r10,%r13
+ addq %r15,%r12
+
+ movq %rcx,%r15
+ addq (%rbp),%r12
+ xorq %rcx,%r14
+
+ xorq %rdx,%r15
+ rorq $14,%r13
+ movq %rdx,%rbx
+
+ andq %r15,%rdi
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %rdi,%rbx
+ addq %r12,%r9
+ addq %r12,%rbx
+
+ leaq 8(%rbp),%rbp
+ movq 0(%rsp),%r13
+ movq 104(%rsp),%rdi
+
+ movq %r13,%r12
+ rorq $7,%r13
+ addq %r14,%rbx
+ movq %rdi,%r14
+ rorq $42,%rdi
+
+ xorq %r12,%r13
+ shrq $7,%r12
+ rorq $1,%r13
+ xorq %r14,%rdi
+ shrq $6,%r14
+
+ rorq $19,%rdi
+ xorq %r13,%r12
+ xorq %r14,%rdi
+ addq 64(%rsp),%r12
+
+ addq 120(%rsp),%r12
+ movq %r9,%r13
+ addq %rdi,%r12
+ movq %rbx,%r14
+ rorq $23,%r13
+ movq %r10,%rdi
+
+ xorq %r9,%r13
+ rorq $5,%r14
+ xorq %r11,%rdi
+
+ movq %r12,120(%rsp)
+ xorq %rbx,%r14
+ andq %r9,%rdi
+
+ rorq $4,%r13
+ addq %rax,%r12
+ xorq %r11,%rdi
+
+ rorq $6,%r14
+ xorq %r9,%r13
+ addq %rdi,%r12
+
+ movq %rbx,%rdi
+ addq (%rbp),%r12
+ xorq %rbx,%r14
+
+ xorq %rcx,%rdi
+ rorq $14,%r13
+ movq %rcx,%rax
+
+ andq %rdi,%r15
+ rorq $28,%r14
+ addq %r13,%r12
+
+ xorq %r15,%rax
+ addq %r12,%r8
+ addq %r12,%rax
+
+ leaq 24(%rbp),%rbp
+ cmpb $0,7(%rbp)
+ jnz .Lrounds_16_xx
+
+ movq 128+0(%rsp),%rdi
+ addq %r14,%rax
+ leaq 128(%rsi),%rsi
+
+ addq 0(%rdi),%rax
+ addq 8(%rdi),%rbx
+ addq 16(%rdi),%rcx
+ addq 24(%rdi),%rdx
+ addq 32(%rdi),%r8
+ addq 40(%rdi),%r9
+ addq 48(%rdi),%r10
+ addq 56(%rdi),%r11
+
+ cmpq 128+16(%rsp),%rsi
+
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,16(%rdi)
+ movq %rdx,24(%rdi)
+ movq %r8,32(%rdi)
+ movq %r9,40(%rdi)
+ movq %r10,48(%rdi)
+ movq %r11,56(%rdi)
+ jb .Lloop
+
+ movq 152(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size sha512_block_data_order,.-sha512_block_data_order
+.align 64
+.type K512,@object
+K512:
+.quad 0x428a2f98d728ae22,0x7137449123ef65cd
+.quad 0x428a2f98d728ae22,0x7137449123ef65cd
+.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
+.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
+.quad 0x3956c25bf348b538,0x59f111f1b605d019
+.quad 0x3956c25bf348b538,0x59f111f1b605d019
+.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
+.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
+.quad 0xd807aa98a3030242,0x12835b0145706fbe
+.quad 0xd807aa98a3030242,0x12835b0145706fbe
+.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
+.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
+.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
+.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
+.quad 0x9bdc06a725c71235,0xc19bf174cf692694
+.quad 0x9bdc06a725c71235,0xc19bf174cf692694
+.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
+.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
+.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
+.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
+.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
+.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
+.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
+.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
+.quad 0x983e5152ee66dfab,0xa831c66d2db43210
+.quad 0x983e5152ee66dfab,0xa831c66d2db43210
+.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
+.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
+.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
+.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
+.quad 0x06ca6351e003826f,0x142929670a0e6e70
+.quad 0x06ca6351e003826f,0x142929670a0e6e70
+.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
+.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
+.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
+.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
+.quad 0x650a73548baf63de,0x766a0abb3c77b2a8
+.quad 0x650a73548baf63de,0x766a0abb3c77b2a8
+.quad 0x81c2c92e47edaee6,0x92722c851482353b
+.quad 0x81c2c92e47edaee6,0x92722c851482353b
+.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
+.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
+.quad 0xc24b8b70d0f89791,0xc76c51a30654be30
+.quad 0xc24b8b70d0f89791,0xc76c51a30654be30
+.quad 0xd192e819d6ef5218,0xd69906245565a910
+.quad 0xd192e819d6ef5218,0xd69906245565a910
+.quad 0xf40e35855771202a,0x106aa07032bbd1b8
+.quad 0xf40e35855771202a,0x106aa07032bbd1b8
+.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
+.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
+.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
+.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
+.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
+.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
+.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
+.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
+.quad 0x748f82ee5defb2fc,0x78a5636f43172f60
+.quad 0x748f82ee5defb2fc,0x78a5636f43172f60
+.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
+.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
+.quad 0x90befffa23631e28,0xa4506cebde82bde9
+.quad 0x90befffa23631e28,0xa4506cebde82bde9
+.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
+.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
+.quad 0xca273eceea26619c,0xd186b8c721c0c207
+.quad 0xca273eceea26619c,0xd186b8c721c0c207
+.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
+.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
+.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
+.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
+.quad 0x113f9804bef90dae,0x1b710b35131c471b
+.quad 0x113f9804bef90dae,0x1b710b35131c471b
+.quad 0x28db77f523047d84,0x32caab7b40c72493
+.quad 0x28db77f523047d84,0x32caab7b40c72493
+.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
+.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
+.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
+.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
+.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
+.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
+
+.quad 0x0001020304050607,0x08090a0b0c0d0e0f
+.quad 0x0001020304050607,0x08090a0b0c0d0e0f
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/x86_64cpuid.S b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/x86_64cpuid.S
new file mode 100644
index 0000000000..cac5f8f32c
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/X64Gcc/crypto/x86_64cpuid.S
@@ -0,0 +1,491 @@
+# WARNING: do not edit!
+# Generated from openssl/crypto/x86_64cpuid.pl
+#
+# Copyright 2005-2020 The OpenSSL Project Authors. All Rights Reserved.
+#
+# Licensed under the OpenSSL license (the "License"). You may not use
+# this file except in compliance with the License. You can obtain a copy
+# in the file LICENSE in the source distribution or at
+# https://www.openssl.org/source/license.html
+
+
+.hidden OPENSSL_cpuid_setup
+.section .init
+ call OPENSSL_cpuid_setup
+
+.hidden OPENSSL_ia32cap_P
+.comm OPENSSL_ia32cap_P,16,4
+
+.text
+
+.globl OPENSSL_atomic_add
+.type OPENSSL_atomic_add,@function
+.align 16
+OPENSSL_atomic_add:
+.cfi_startproc
+ movl (%rdi),%eax
+.Lspin: leaq (%rsi,%rax,1),%r8
+.byte 0xf0
+ cmpxchgl %r8d,(%rdi)
+ jne .Lspin
+ movl %r8d,%eax
+.byte 0x48,0x98
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_atomic_add,.-OPENSSL_atomic_add
+
+.globl OPENSSL_rdtsc
+.type OPENSSL_rdtsc,@function
+.align 16
+OPENSSL_rdtsc:
+.cfi_startproc
+ rdtsc
+ shlq $32,%rdx
+ orq %rdx,%rax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_rdtsc,.-OPENSSL_rdtsc
+
+.globl OPENSSL_ia32_cpuid
+.type OPENSSL_ia32_cpuid,@function
+.align 16
+OPENSSL_ia32_cpuid:
+.cfi_startproc
+ movq %rbx,%r8
+.cfi_register %rbx,%r8
+
+ xorl %eax,%eax
+ movq %rax,8(%rdi)
+ cpuid
+ movl %eax,%r11d
+
+ xorl %eax,%eax
+ cmpl $0x756e6547,%ebx
+ setne %al
+ movl %eax,%r9d
+ cmpl $0x49656e69,%edx
+ setne %al
+ orl %eax,%r9d
+ cmpl $0x6c65746e,%ecx
+ setne %al
+ orl %eax,%r9d
+ jz .Lintel
+
+ cmpl $0x68747541,%ebx
+ setne %al
+ movl %eax,%r10d
+ cmpl $0x69746E65,%edx
+ setne %al
+ orl %eax,%r10d
+ cmpl $0x444D4163,%ecx
+ setne %al
+ orl %eax,%r10d
+ jnz .Lintel
+
+
+ movl $0x80000000,%eax
+ cpuid
+ cmpl $0x80000001,%eax
+ jb .Lintel
+ movl %eax,%r10d
+ movl $0x80000001,%eax
+ cpuid
+ orl %ecx,%r9d
+ andl $0x00000801,%r9d
+
+ cmpl $0x80000008,%r10d
+ jb .Lintel
+
+ movl $0x80000008,%eax
+ cpuid
+ movzbq %cl,%r10
+ incq %r10
+
+ movl $1,%eax
+ cpuid
+ btl $28,%edx
+ jnc .Lgeneric
+ shrl $16,%ebx
+ cmpb %r10b,%bl
+ ja .Lgeneric
+ andl $0xefffffff,%edx
+ jmp .Lgeneric
+
+.Lintel:
+ cmpl $4,%r11d
+ movl $-1,%r10d
+ jb .Lnocacheinfo
+
+ movl $4,%eax
+ movl $0,%ecx
+ cpuid
+ movl %eax,%r10d
+ shrl $14,%r10d
+ andl $0xfff,%r10d
+
+.Lnocacheinfo:
+ movl $1,%eax
+ cpuid
+ movd %eax,%xmm0
+ andl $0xbfefffff,%edx
+ cmpl $0,%r9d
+ jne .Lnotintel
+ orl $0x40000000,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ jne .LnotP4
+ orl $0x00100000,%edx
+.LnotP4:
+ cmpb $6,%ah
+ jne .Lnotintel
+ andl $0x0fff0ff0,%eax
+ cmpl $0x00050670,%eax
+ je .Lknights
+ cmpl $0x00080650,%eax
+ jne .Lnotintel
+.Lknights:
+ andl $0xfbffffff,%ecx
+
+.Lnotintel:
+ btl $28,%edx
+ jnc .Lgeneric
+ andl $0xefffffff,%edx
+ cmpl $0,%r10d
+ je .Lgeneric
+
+ orl $0x10000000,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja .Lgeneric
+ andl $0xefffffff,%edx
+.Lgeneric:
+ andl $0x00000800,%r9d
+ andl $0xfffff7ff,%ecx
+ orl %ecx,%r9d
+
+ movl %edx,%r10d
+
+ cmpl $7,%r11d
+ jb .Lno_extended_info
+ movl $7,%eax
+ xorl %ecx,%ecx
+ cpuid
+ btl $26,%r9d
+ jc .Lnotknights
+ andl $0xfff7ffff,%ebx
+.Lnotknights:
+ movd %xmm0,%eax
+ andl $0x0fff0ff0,%eax
+ cmpl $0x00050650,%eax
+ jne .Lnotskylakex
+ andl $0xfffeffff,%ebx
+
+.Lnotskylakex:
+ movl %ebx,8(%rdi)
+ movl %ecx,12(%rdi)
+.Lno_extended_info:
+
+ btl $27,%r9d
+ jnc .Lclear_avx
+ xorl %ecx,%ecx
+.byte 0x0f,0x01,0xd0
+ andl $0xe6,%eax
+ cmpl $0xe6,%eax
+ je .Ldone
+ andl $0x3fdeffff,8(%rdi)
+
+
+
+
+ andl $6,%eax
+ cmpl $6,%eax
+ je .Ldone
+.Lclear_avx:
+ movl $0xefffe7ff,%eax
+ andl %eax,%r9d
+ movl $0x3fdeffdf,%eax
+ andl %eax,8(%rdi)
+.Ldone:
+ shlq $32,%r9
+ movl %r10d,%eax
+ movq %r8,%rbx
+.cfi_restore %rbx
+ orq %r9,%rax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid
+
+.globl OPENSSL_cleanse
+.type OPENSSL_cleanse,@function
+.align 16
+OPENSSL_cleanse:
+.cfi_startproc
+ xorq %rax,%rax
+ cmpq $15,%rsi
+ jae .Lot
+ cmpq $0,%rsi
+ je .Lret
+.Little:
+ movb %al,(%rdi)
+ subq $1,%rsi
+ leaq 1(%rdi),%rdi
+ jnz .Little
+.Lret:
+ .byte 0xf3,0xc3
+.align 16
+.Lot:
+ testq $7,%rdi
+ jz .Laligned
+ movb %al,(%rdi)
+ leaq -1(%rsi),%rsi
+ leaq 1(%rdi),%rdi
+ jmp .Lot
+.Laligned:
+ movq %rax,(%rdi)
+ leaq -8(%rsi),%rsi
+ testq $-8,%rsi
+ leaq 8(%rdi),%rdi
+ jnz .Laligned
+ cmpq $0,%rsi
+ jne .Little
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_cleanse,.-OPENSSL_cleanse
+
+.globl CRYPTO_memcmp
+.type CRYPTO_memcmp,@function
+.align 16
+CRYPTO_memcmp:
+.cfi_startproc
+ xorq %rax,%rax
+ xorq %r10,%r10
+ cmpq $0,%rdx
+ je .Lno_data
+ cmpq $16,%rdx
+ jne .Loop_cmp
+ movq (%rdi),%r10
+ movq 8(%rdi),%r11
+ movq $1,%rdx
+ xorq (%rsi),%r10
+ xorq 8(%rsi),%r11
+ orq %r11,%r10
+ cmovnzq %rdx,%rax
+ .byte 0xf3,0xc3
+
+.align 16
+.Loop_cmp:
+ movb (%rdi),%r10b
+ leaq 1(%rdi),%rdi
+ xorb (%rsi),%r10b
+ leaq 1(%rsi),%rsi
+ orb %r10b,%al
+ decq %rdx
+ jnz .Loop_cmp
+ negq %rax
+ shrq $63,%rax
+.Lno_data:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size CRYPTO_memcmp,.-CRYPTO_memcmp
+.globl OPENSSL_wipe_cpu
+.type OPENSSL_wipe_cpu,@function
+.align 16
+OPENSSL_wipe_cpu:
+.cfi_startproc
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ xorq %rsi,%rsi
+ xorq %rdi,%rdi
+ xorq %r8,%r8
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+ leaq 8(%rsp),%rax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu
+.globl OPENSSL_instrument_bus
+.type OPENSSL_instrument_bus,@function
+.align 16
+OPENSSL_instrument_bus:
+.cfi_startproc
+ movq %rdi,%r10
+ movq %rsi,%rcx
+ movq %rsi,%r11
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+ jmp .Loop
+.align 16
+.Loop: rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+ leaq 4(%r10),%r10
+ subq $1,%rcx
+ jnz .Loop
+
+ movq %r11,%rax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_instrument_bus,.-OPENSSL_instrument_bus
+
+.globl OPENSSL_instrument_bus2
+.type OPENSSL_instrument_bus2,@function
+.align 16
+OPENSSL_instrument_bus2:
+.cfi_startproc
+ movq %rdi,%r10
+ movq %rsi,%rcx
+ movq %rdx,%r11
+ movq %rcx,8(%rsp)
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+.Loop2:
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+
+ subq $1,%r11
+ jz .Ldone2
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ cmpl %r9d,%eax
+ movl %eax,%r9d
+ movl $0,%edx
+ setne %dl
+ subq %rdx,%rcx
+ leaq (%r10,%rdx,4),%r10
+ jnz .Loop2
+
+.Ldone2:
+ movq 8(%rsp),%rax
+ subq %rcx,%rax
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_instrument_bus2,.-OPENSSL_instrument_bus2
+.globl OPENSSL_ia32_rdrand_bytes
+.type OPENSSL_ia32_rdrand_bytes,@function
+.align 16
+OPENSSL_ia32_rdrand_bytes:
+.cfi_startproc
+ xorq %rax,%rax
+ cmpq $0,%rsi
+ je .Ldone_rdrand_bytes
+
+ movq $8,%r11
+.Loop_rdrand_bytes:
+.byte 73,15,199,242
+ jc .Lbreak_rdrand_bytes
+ decq %r11
+ jnz .Loop_rdrand_bytes
+ jmp .Ldone_rdrand_bytes
+
+.align 16
+.Lbreak_rdrand_bytes:
+ cmpq $8,%rsi
+ jb .Ltail_rdrand_bytes
+ movq %r10,(%rdi)
+ leaq 8(%rdi),%rdi
+ addq $8,%rax
+ subq $8,%rsi
+ jz .Ldone_rdrand_bytes
+ movq $8,%r11
+ jmp .Loop_rdrand_bytes
+
+.align 16
+.Ltail_rdrand_bytes:
+ movb %r10b,(%rdi)
+ leaq 1(%rdi),%rdi
+ incq %rax
+ shrq $8,%r10
+ decq %rsi
+ jnz .Ltail_rdrand_bytes
+
+.Ldone_rdrand_bytes:
+ xorq %r10,%r10
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_ia32_rdrand_bytes,.-OPENSSL_ia32_rdrand_bytes
+.globl OPENSSL_ia32_rdseed_bytes
+.type OPENSSL_ia32_rdseed_bytes,@function
+.align 16
+OPENSSL_ia32_rdseed_bytes:
+.cfi_startproc
+ xorq %rax,%rax
+ cmpq $0,%rsi
+ je .Ldone_rdseed_bytes
+
+ movq $8,%r11
+.Loop_rdseed_bytes:
+.byte 73,15,199,250
+ jc .Lbreak_rdseed_bytes
+ decq %r11
+ jnz .Loop_rdseed_bytes
+ jmp .Ldone_rdseed_bytes
+
+.align 16
+.Lbreak_rdseed_bytes:
+ cmpq $8,%rsi
+ jb .Ltail_rdseed_bytes
+ movq %r10,(%rdi)
+ leaq 8(%rdi),%rdi
+ addq $8,%rax
+ subq $8,%rsi
+ jz .Ldone_rdseed_bytes
+ movq $8,%r11
+ jmp .Loop_rdseed_bytes
+
+.align 16
+.Ltail_rdseed_bytes:
+ movb %r10b,(%rdi)
+ leaq 1(%rdi),%rdi
+ incq %rax
+ shrq $8,%r10
+ decq %rsi
+ jnz .Ltail_rdseed_bytes
+
+.Ldone_rdseed_bytes:
+ xorq %r10,%r10
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size OPENSSL_ia32_rdseed_bytes,.-OPENSSL_ia32_rdseed_bytes