summaryrefslogtreecommitdiffstats
path: root/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes
diff options
context:
space:
mode:
Diffstat (limited to 'CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes')
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aes-x86_64.nasm2969
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-mb-x86_64.nasm1846
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha1-x86_64.nasm3268
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha256-x86_64.nasm4708
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-x86_64.nasm5104
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/bsaes-x86_64.nasm2823
-rw-r--r--CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/vpaes-x86_64.nasm1168
7 files changed, 21886 insertions, 0 deletions
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aes-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aes-x86_64.nasm
new file mode 100644
index 0000000000..5884b5bb2d
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aes-x86_64.nasm
@@ -0,0 +1,2969 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+
+ALIGN 16
+_x86_64_AES_encrypt:
+
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+
+ mov r13d,DWORD[240+r15]
+ sub r13d,1
+ jmp NEAR $L$enc_loop
+ALIGN 16
+$L$enc_loop:
+
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ mov r10d,DWORD[rsi*8+r14]
+ mov r11d,DWORD[rdi*8+r14]
+ mov r12d,DWORD[rbp*8+r14]
+
+ movzx esi,bh
+ movzx edi,ch
+ movzx ebp,dl
+ xor r10d,DWORD[3+rsi*8+r14]
+ xor r11d,DWORD[3+rdi*8+r14]
+ mov r8d,DWORD[rbp*8+r14]
+
+ movzx esi,dh
+ shr ecx,16
+ movzx ebp,ah
+ xor r12d,DWORD[3+rsi*8+r14]
+ shr edx,16
+ xor r8d,DWORD[3+rbp*8+r14]
+
+ shr ebx,16
+ lea r15,[16+r15]
+ shr eax,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ xor r10d,DWORD[2+rsi*8+r14]
+ xor r11d,DWORD[2+rdi*8+r14]
+ xor r12d,DWORD[2+rbp*8+r14]
+
+ movzx esi,dh
+ movzx edi,ah
+ movzx ebp,bl
+ xor r10d,DWORD[1+rsi*8+r14]
+ xor r11d,DWORD[1+rdi*8+r14]
+ xor r8d,DWORD[2+rbp*8+r14]
+
+ mov edx,DWORD[12+r15]
+ movzx edi,bh
+ movzx ebp,ch
+ mov eax,DWORD[r15]
+ xor r12d,DWORD[1+rdi*8+r14]
+ xor r8d,DWORD[1+rbp*8+r14]
+
+ mov ebx,DWORD[4+r15]
+ mov ecx,DWORD[8+r15]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+ sub r13d,1
+ jnz NEAR $L$enc_loop
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ movzx r10d,BYTE[2+rsi*8+r14]
+ movzx r11d,BYTE[2+rdi*8+r14]
+ movzx r12d,BYTE[2+rbp*8+r14]
+
+ movzx esi,dl
+ movzx edi,bh
+ movzx ebp,ch
+ movzx r8d,BYTE[2+rsi*8+r14]
+ mov edi,DWORD[rdi*8+r14]
+ mov ebp,DWORD[rbp*8+r14]
+
+ and edi,0x0000ff00
+ and ebp,0x0000ff00
+
+ xor r10d,edi
+ xor r11d,ebp
+ shr ecx,16
+
+ movzx esi,dh
+ movzx edi,ah
+ shr edx,16
+ mov esi,DWORD[rsi*8+r14]
+ mov edi,DWORD[rdi*8+r14]
+
+ and esi,0x0000ff00
+ and edi,0x0000ff00
+ shr ebx,16
+ xor r12d,esi
+ xor r8d,edi
+ shr eax,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ mov esi,DWORD[rsi*8+r14]
+ mov edi,DWORD[rdi*8+r14]
+ mov ebp,DWORD[rbp*8+r14]
+
+ and esi,0x00ff0000
+ and edi,0x00ff0000
+ and ebp,0x00ff0000
+
+ xor r10d,esi
+ xor r11d,edi
+ xor r12d,ebp
+
+ movzx esi,bl
+ movzx edi,dh
+ movzx ebp,ah
+ mov esi,DWORD[rsi*8+r14]
+ mov edi,DWORD[2+rdi*8+r14]
+ mov ebp,DWORD[2+rbp*8+r14]
+
+ and esi,0x00ff0000
+ and edi,0xff000000
+ and ebp,0xff000000
+
+ xor r8d,esi
+ xor r10d,edi
+ xor r11d,ebp
+
+ movzx esi,bh
+ movzx edi,ch
+ mov edx,DWORD[((16+12))+r15]
+ mov esi,DWORD[2+rsi*8+r14]
+ mov edi,DWORD[2+rdi*8+r14]
+ mov eax,DWORD[((16+0))+r15]
+
+ and esi,0xff000000
+ and edi,0xff000000
+
+ xor r12d,esi
+ xor r8d,edi
+
+ mov ebx,DWORD[((16+4))+r15]
+ mov ecx,DWORD[((16+8))+r15]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+DB 0xf3,0xc3
+
+
+
+ALIGN 16
+_x86_64_AES_encrypt_compact:
+
+ lea r8,[128+r14]
+ mov edi,DWORD[((0-128))+r8]
+ mov ebp,DWORD[((32-128))+r8]
+ mov r10d,DWORD[((64-128))+r8]
+ mov r11d,DWORD[((96-128))+r8]
+ mov edi,DWORD[((128-128))+r8]
+ mov ebp,DWORD[((160-128))+r8]
+ mov r10d,DWORD[((192-128))+r8]
+ mov r11d,DWORD[((224-128))+r8]
+ jmp NEAR $L$enc_loop_compact
+ALIGN 16
+$L$enc_loop_compact:
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+ lea r15,[16+r15]
+ movzx r10d,al
+ movzx r11d,bl
+ movzx r12d,cl
+ movzx r8d,dl
+ movzx esi,bh
+ movzx edi,ch
+ shr ecx,16
+ movzx ebp,dh
+ movzx r10d,BYTE[r10*1+r14]
+ movzx r11d,BYTE[r11*1+r14]
+ movzx r12d,BYTE[r12*1+r14]
+ movzx r8d,BYTE[r8*1+r14]
+
+ movzx r9d,BYTE[rsi*1+r14]
+ movzx esi,ah
+ movzx r13d,BYTE[rdi*1+r14]
+ movzx edi,cl
+ movzx ebp,BYTE[rbp*1+r14]
+ movzx esi,BYTE[rsi*1+r14]
+
+ shl r9d,8
+ shr edx,16
+ shl r13d,8
+ xor r10d,r9d
+ shr eax,16
+ movzx r9d,dl
+ shr ebx,16
+ xor r11d,r13d
+ shl ebp,8
+ movzx r13d,al
+ movzx edi,BYTE[rdi*1+r14]
+ xor r12d,ebp
+
+ shl esi,8
+ movzx ebp,bl
+ shl edi,16
+ xor r8d,esi
+ movzx r9d,BYTE[r9*1+r14]
+ movzx esi,dh
+ movzx r13d,BYTE[r13*1+r14]
+ xor r10d,edi
+
+ shr ecx,8
+ movzx edi,ah
+ shl r9d,16
+ shr ebx,8
+ shl r13d,16
+ xor r11d,r9d
+ movzx ebp,BYTE[rbp*1+r14]
+ movzx esi,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+ movzx edx,BYTE[rcx*1+r14]
+ movzx ecx,BYTE[rbx*1+r14]
+
+ shl ebp,16
+ xor r12d,r13d
+ shl esi,24
+ xor r8d,ebp
+ shl edi,24
+ xor r10d,esi
+ shl edx,24
+ xor r11d,edi
+ shl ecx,24
+ mov eax,r10d
+ mov ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+ cmp r15,QWORD[16+rsp]
+ je NEAR $L$enc_compact_done
+ mov r10d,0x80808080
+ mov r11d,0x80808080
+ and r10d,eax
+ and r11d,ebx
+ mov esi,r10d
+ mov edi,r11d
+ shr r10d,7
+ lea r8d,[rax*1+rax]
+ shr r11d,7
+ lea r9d,[rbx*1+rbx]
+ sub esi,r10d
+ sub edi,r11d
+ and r8d,0xfefefefe
+ and r9d,0xfefefefe
+ and esi,0x1b1b1b1b
+ and edi,0x1b1b1b1b
+ mov r10d,eax
+ mov r11d,ebx
+ xor r8d,esi
+ xor r9d,edi
+
+ xor eax,r8d
+ xor ebx,r9d
+ mov r12d,0x80808080
+ rol eax,24
+ mov ebp,0x80808080
+ rol ebx,24
+ and r12d,ecx
+ and ebp,edx
+ xor eax,r8d
+ xor ebx,r9d
+ mov esi,r12d
+ ror r10d,16
+ mov edi,ebp
+ ror r11d,16
+ lea r8d,[rcx*1+rcx]
+ shr r12d,7
+ xor eax,r10d
+ shr ebp,7
+ xor ebx,r11d
+ ror r10d,8
+ lea r9d,[rdx*1+rdx]
+ ror r11d,8
+ sub esi,r12d
+ sub edi,ebp
+ xor eax,r10d
+ xor ebx,r11d
+
+ and r8d,0xfefefefe
+ and r9d,0xfefefefe
+ and esi,0x1b1b1b1b
+ and edi,0x1b1b1b1b
+ mov r12d,ecx
+ mov ebp,edx
+ xor r8d,esi
+ xor r9d,edi
+
+ ror r12d,16
+ xor ecx,r8d
+ ror ebp,16
+ xor edx,r9d
+ rol ecx,24
+ mov esi,DWORD[r14]
+ rol edx,24
+ xor ecx,r8d
+ mov edi,DWORD[64+r14]
+ xor edx,r9d
+ mov r8d,DWORD[128+r14]
+ xor ecx,r12d
+ ror r12d,8
+ xor edx,ebp
+ ror ebp,8
+ xor ecx,r12d
+ mov r9d,DWORD[192+r14]
+ xor edx,ebp
+ jmp NEAR $L$enc_loop_compact
+ALIGN 16
+$L$enc_compact_done:
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+DB 0xf3,0xc3
+
+
+global AES_encrypt
+
+ALIGN 16
+global asm_AES_encrypt
+
+asm_AES_encrypt:
+AES_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+
+
+ lea rcx,[((-63))+rdx]
+ and rsp,-64
+ sub rcx,rsp
+ neg rcx
+ and rcx,0x3c0
+ sub rsp,rcx
+ sub rsp,32
+
+ mov QWORD[16+rsp],rsi
+ mov QWORD[24+rsp],rax
+
+$L$enc_prologue:
+
+ mov r15,rdx
+ mov r13d,DWORD[240+r15]
+
+ mov eax,DWORD[rdi]
+ mov ebx,DWORD[4+rdi]
+ mov ecx,DWORD[8+rdi]
+ mov edx,DWORD[12+rdi]
+
+ shl r13d,4
+ lea rbp,[r13*1+r15]
+ mov QWORD[rsp],r15
+ mov QWORD[8+rsp],rbp
+
+
+ lea r14,[(($L$AES_Te+2048))]
+ lea rbp,[768+rsp]
+ sub rbp,r14
+ and rbp,0x300
+ lea r14,[rbp*1+r14]
+
+ call _x86_64_AES_encrypt_compact
+
+ mov r9,QWORD[16+rsp]
+ mov rsi,QWORD[24+rsp]
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ mov r15,QWORD[((-48))+rsi]
+
+ mov r14,QWORD[((-40))+rsi]
+
+ mov r13,QWORD[((-32))+rsi]
+
+ mov r12,QWORD[((-24))+rsi]
+
+ mov rbp,QWORD[((-16))+rsi]
+
+ mov rbx,QWORD[((-8))+rsi]
+
+ lea rsp,[rsi]
+
+$L$enc_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_AES_encrypt:
+
+ALIGN 16
+_x86_64_AES_decrypt:
+
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+
+ mov r13d,DWORD[240+r15]
+ sub r13d,1
+ jmp NEAR $L$dec_loop
+ALIGN 16
+$L$dec_loop:
+
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ mov r10d,DWORD[rsi*8+r14]
+ mov r11d,DWORD[rdi*8+r14]
+ mov r12d,DWORD[rbp*8+r14]
+
+ movzx esi,dh
+ movzx edi,ah
+ movzx ebp,dl
+ xor r10d,DWORD[3+rsi*8+r14]
+ xor r11d,DWORD[3+rdi*8+r14]
+ mov r8d,DWORD[rbp*8+r14]
+
+ movzx esi,bh
+ shr eax,16
+ movzx ebp,ch
+ xor r12d,DWORD[3+rsi*8+r14]
+ shr edx,16
+ xor r8d,DWORD[3+rbp*8+r14]
+
+ shr ebx,16
+ lea r15,[16+r15]
+ shr ecx,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ xor r10d,DWORD[2+rsi*8+r14]
+ xor r11d,DWORD[2+rdi*8+r14]
+ xor r12d,DWORD[2+rbp*8+r14]
+
+ movzx esi,bh
+ movzx edi,ch
+ movzx ebp,bl
+ xor r10d,DWORD[1+rsi*8+r14]
+ xor r11d,DWORD[1+rdi*8+r14]
+ xor r8d,DWORD[2+rbp*8+r14]
+
+ movzx esi,dh
+ mov edx,DWORD[12+r15]
+ movzx ebp,ah
+ xor r12d,DWORD[1+rsi*8+r14]
+ mov eax,DWORD[r15]
+ xor r8d,DWORD[1+rbp*8+r14]
+
+ xor eax,r10d
+ mov ebx,DWORD[4+r15]
+ mov ecx,DWORD[8+r15]
+ xor ecx,r12d
+ xor ebx,r11d
+ xor edx,r8d
+ sub r13d,1
+ jnz NEAR $L$dec_loop
+ lea r14,[2048+r14]
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ movzx r10d,BYTE[rsi*1+r14]
+ movzx r11d,BYTE[rdi*1+r14]
+ movzx r12d,BYTE[rbp*1+r14]
+
+ movzx esi,dl
+ movzx edi,dh
+ movzx ebp,ah
+ movzx r8d,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+ movzx ebp,BYTE[rbp*1+r14]
+
+ shl edi,8
+ shl ebp,8
+
+ xor r10d,edi
+ xor r11d,ebp
+ shr edx,16
+
+ movzx esi,bh
+ movzx edi,ch
+ shr eax,16
+ movzx esi,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+
+ shl esi,8
+ shl edi,8
+ shr ebx,16
+ xor r12d,esi
+ xor r8d,edi
+ shr ecx,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ movzx esi,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+ movzx ebp,BYTE[rbp*1+r14]
+
+ shl esi,16
+ shl edi,16
+ shl ebp,16
+
+ xor r10d,esi
+ xor r11d,edi
+ xor r12d,ebp
+
+ movzx esi,bl
+ movzx edi,bh
+ movzx ebp,ch
+ movzx esi,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+ movzx ebp,BYTE[rbp*1+r14]
+
+ shl esi,16
+ shl edi,24
+ shl ebp,24
+
+ xor r8d,esi
+ xor r10d,edi
+ xor r11d,ebp
+
+ movzx esi,dh
+ movzx edi,ah
+ mov edx,DWORD[((16+12))+r15]
+ movzx esi,BYTE[rsi*1+r14]
+ movzx edi,BYTE[rdi*1+r14]
+ mov eax,DWORD[((16+0))+r15]
+
+ shl esi,24
+ shl edi,24
+
+ xor r12d,esi
+ xor r8d,edi
+
+ mov ebx,DWORD[((16+4))+r15]
+ mov ecx,DWORD[((16+8))+r15]
+ lea r14,[((-2048))+r14]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+DB 0xf3,0xc3
+
+
+
+ALIGN 16
+_x86_64_AES_decrypt_compact:
+
+ lea r8,[128+r14]
+ mov edi,DWORD[((0-128))+r8]
+ mov ebp,DWORD[((32-128))+r8]
+ mov r10d,DWORD[((64-128))+r8]
+ mov r11d,DWORD[((96-128))+r8]
+ mov edi,DWORD[((128-128))+r8]
+ mov ebp,DWORD[((160-128))+r8]
+ mov r10d,DWORD[((192-128))+r8]
+ mov r11d,DWORD[((224-128))+r8]
+ jmp NEAR $L$dec_loop_compact
+
+ALIGN 16
+$L$dec_loop_compact:
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+ lea r15,[16+r15]
+ movzx r10d,al
+ movzx r11d,bl
+ movzx r12d,cl
+ movzx r8d,dl
+ movzx esi,dh
+ movzx edi,ah
+ shr edx,16
+ movzx ebp,bh
+ movzx r10d,BYTE[r10*1+r14]
+ movzx r11d,BYTE[r11*1+r14]
+ movzx r12d,BYTE[r12*1+r14]
+ movzx r8d,BYTE[r8*1+r14]
+
+ movzx r9d,BYTE[rsi*1+r14]
+ movzx esi,ch
+ movzx r13d,BYTE[rdi*1+r14]
+ movzx ebp,BYTE[rbp*1+r14]
+ movzx esi,BYTE[rsi*1+r14]
+
+ shr ecx,16
+ shl r13d,8
+ shl r9d,8
+ movzx edi,cl
+ shr eax,16
+ xor r10d,r9d
+ shr ebx,16
+ movzx r9d,dl
+
+ shl ebp,8
+ xor r11d,r13d
+ shl esi,8
+ movzx r13d,al
+ movzx edi,BYTE[rdi*1+r14]
+ xor r12d,ebp
+ movzx ebp,bl
+
+ shl edi,16
+ xor r8d,esi
+ movzx r9d,BYTE[r9*1+r14]
+ movzx esi,bh
+ movzx ebp,BYTE[rbp*1+r14]
+ xor r10d,edi
+ movzx r13d,BYTE[r13*1+r14]
+ movzx edi,ch
+
+ shl ebp,16
+ shl r9d,16
+ shl r13d,16
+ xor r8d,ebp
+ movzx ebp,dh
+ xor r11d,r9d
+ shr eax,8
+ xor r12d,r13d
+
+ movzx esi,BYTE[rsi*1+r14]
+ movzx ebx,BYTE[rdi*1+r14]
+ movzx ecx,BYTE[rbp*1+r14]
+ movzx edx,BYTE[rax*1+r14]
+
+ mov eax,r10d
+ shl esi,24
+ shl ebx,24
+ shl ecx,24
+ xor eax,esi
+ shl edx,24
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+ cmp r15,QWORD[16+rsp]
+ je NEAR $L$dec_compact_done
+
+ mov rsi,QWORD[((256+0))+r14]
+ shl rbx,32
+ shl rdx,32
+ mov rdi,QWORD[((256+8))+r14]
+ or rax,rbx
+ or rcx,rdx
+ mov rbp,QWORD[((256+16))+r14]
+ mov r9,rsi
+ mov r12,rsi
+ and r9,rax
+ and r12,rcx
+ mov rbx,r9
+ mov rdx,r12
+ shr r9,7
+ lea r8,[rax*1+rax]
+ shr r12,7
+ lea r11,[rcx*1+rcx]
+ sub rbx,r9
+ sub rdx,r12
+ and r8,rdi
+ and r11,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r8,rbx
+ xor r11,rdx
+ mov r10,rsi
+ mov r13,rsi
+
+ and r10,r8
+ and r13,r11
+ mov rbx,r10
+ mov rdx,r13
+ shr r10,7
+ lea r9,[r8*1+r8]
+ shr r13,7
+ lea r12,[r11*1+r11]
+ sub rbx,r10
+ sub rdx,r13
+ and r9,rdi
+ and r12,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r9,rbx
+ xor r12,rdx
+ mov r10,rsi
+ mov r13,rsi
+
+ and r10,r9
+ and r13,r12
+ mov rbx,r10
+ mov rdx,r13
+ shr r10,7
+ xor r8,rax
+ shr r13,7
+ xor r11,rcx
+ sub rbx,r10
+ sub rdx,r13
+ lea r10,[r9*1+r9]
+ lea r13,[r12*1+r12]
+ xor r9,rax
+ xor r12,rcx
+ and r10,rdi
+ and r13,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r10,rbx
+ xor r13,rdx
+
+ xor rax,r10
+ xor rcx,r13
+ xor r8,r10
+ xor r11,r13
+ mov rbx,rax
+ mov rdx,rcx
+ xor r9,r10
+ shr rbx,32
+ xor r12,r13
+ shr rdx,32
+ xor r10,r8
+ rol eax,8
+ xor r13,r11
+ rol ecx,8
+ xor r10,r9
+ rol ebx,8
+ xor r13,r12
+
+ rol edx,8
+ xor eax,r10d
+ shr r10,32
+ xor ecx,r13d
+ shr r13,32
+ xor ebx,r10d
+ xor edx,r13d
+
+ mov r10,r8
+ rol r8d,24
+ mov r13,r11
+ rol r11d,24
+ shr r10,32
+ xor eax,r8d
+ shr r13,32
+ xor ecx,r11d
+ rol r10d,24
+ mov r8,r9
+ rol r13d,24
+ mov r11,r12
+ shr r8,32
+ xor ebx,r10d
+ shr r11,32
+ xor edx,r13d
+
+ mov rsi,QWORD[r14]
+ rol r9d,16
+ mov rdi,QWORD[64+r14]
+ rol r12d,16
+ mov rbp,QWORD[128+r14]
+ rol r8d,16
+ mov r10,QWORD[192+r14]
+ xor eax,r9d
+ rol r11d,16
+ xor ecx,r12d
+ mov r13,QWORD[256+r14]
+ xor ebx,r8d
+ xor edx,r11d
+ jmp NEAR $L$dec_loop_compact
+ALIGN 16
+$L$dec_compact_done:
+ xor eax,DWORD[r15]
+ xor ebx,DWORD[4+r15]
+ xor ecx,DWORD[8+r15]
+ xor edx,DWORD[12+r15]
+DB 0xf3,0xc3
+
+
+global AES_decrypt
+
+ALIGN 16
+global asm_AES_decrypt
+
+asm_AES_decrypt:
+AES_decrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_decrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+
+
+ lea rcx,[((-63))+rdx]
+ and rsp,-64
+ sub rcx,rsp
+ neg rcx
+ and rcx,0x3c0
+ sub rsp,rcx
+ sub rsp,32
+
+ mov QWORD[16+rsp],rsi
+ mov QWORD[24+rsp],rax
+
+$L$dec_prologue:
+
+ mov r15,rdx
+ mov r13d,DWORD[240+r15]
+
+ mov eax,DWORD[rdi]
+ mov ebx,DWORD[4+rdi]
+ mov ecx,DWORD[8+rdi]
+ mov edx,DWORD[12+rdi]
+
+ shl r13d,4
+ lea rbp,[r13*1+r15]
+ mov QWORD[rsp],r15
+ mov QWORD[8+rsp],rbp
+
+
+ lea r14,[(($L$AES_Td+2048))]
+ lea rbp,[768+rsp]
+ sub rbp,r14
+ and rbp,0x300
+ lea r14,[rbp*1+r14]
+ shr rbp,3
+ add r14,rbp
+
+ call _x86_64_AES_decrypt_compact
+
+ mov r9,QWORD[16+rsp]
+ mov rsi,QWORD[24+rsp]
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ mov r15,QWORD[((-48))+rsi]
+
+ mov r14,QWORD[((-40))+rsi]
+
+ mov r13,QWORD[((-32))+rsi]
+
+ mov r12,QWORD[((-24))+rsi]
+
+ mov rbp,QWORD[((-16))+rsi]
+
+ mov rbx,QWORD[((-8))+rsi]
+
+ lea rsp,[rsi]
+
+$L$dec_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_AES_decrypt:
+global AES_set_encrypt_key
+
+ALIGN 16
+AES_set_encrypt_key:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_set_encrypt_key:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ sub rsp,8
+
+$L$enc_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+
+ mov rbp,QWORD[40+rsp]
+
+ mov rbx,QWORD[48+rsp]
+
+ add rsp,56
+
+$L$enc_key_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_AES_set_encrypt_key:
+
+
+ALIGN 16
+_x86_64_AES_set_encrypt_key:
+
+ mov ecx,esi
+ mov rsi,rdi
+ mov rdi,rdx
+
+ test rsi,-1
+ jz NEAR $L$badpointer
+ test rdi,-1
+ jz NEAR $L$badpointer
+
+ lea rbp,[$L$AES_Te]
+ lea rbp,[((2048+128))+rbp]
+
+
+ mov eax,DWORD[((0-128))+rbp]
+ mov ebx,DWORD[((32-128))+rbp]
+ mov r8d,DWORD[((64-128))+rbp]
+ mov edx,DWORD[((96-128))+rbp]
+ mov eax,DWORD[((128-128))+rbp]
+ mov ebx,DWORD[((160-128))+rbp]
+ mov r8d,DWORD[((192-128))+rbp]
+ mov edx,DWORD[((224-128))+rbp]
+
+ cmp ecx,128
+ je NEAR $L$10rounds
+ cmp ecx,192
+ je NEAR $L$12rounds
+ cmp ecx,256
+ je NEAR $L$14rounds
+ mov rax,-2
+ jmp NEAR $L$exit
+
+$L$10rounds:
+ mov rax,QWORD[rsi]
+ mov rdx,QWORD[8+rsi]
+ mov QWORD[rdi],rax
+ mov QWORD[8+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp NEAR $L$10shortcut
+ALIGN 4
+$L$10loop:
+ mov eax,DWORD[rdi]
+ mov edx,DWORD[12+rdi]
+$L$10shortcut:
+ movzx esi,dl
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD[((1024-128))+rcx*4+rbp]
+ mov DWORD[16+rdi],eax
+ xor eax,DWORD[4+rdi]
+ mov DWORD[20+rdi],eax
+ xor eax,DWORD[8+rdi]
+ mov DWORD[24+rdi],eax
+ xor eax,DWORD[12+rdi]
+ mov DWORD[28+rdi],eax
+ add ecx,1
+ lea rdi,[16+rdi]
+ cmp ecx,10
+ jl NEAR $L$10loop
+
+ mov DWORD[80+rdi],10
+ xor rax,rax
+ jmp NEAR $L$exit
+
+$L$12rounds:
+ mov rax,QWORD[rsi]
+ mov rbx,QWORD[8+rsi]
+ mov rdx,QWORD[16+rsi]
+ mov QWORD[rdi],rax
+ mov QWORD[8+rdi],rbx
+ mov QWORD[16+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp NEAR $L$12shortcut
+ALIGN 4
+$L$12loop:
+ mov eax,DWORD[rdi]
+ mov edx,DWORD[20+rdi]
+$L$12shortcut:
+ movzx esi,dl
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD[((1024-128))+rcx*4+rbp]
+ mov DWORD[24+rdi],eax
+ xor eax,DWORD[4+rdi]
+ mov DWORD[28+rdi],eax
+ xor eax,DWORD[8+rdi]
+ mov DWORD[32+rdi],eax
+ xor eax,DWORD[12+rdi]
+ mov DWORD[36+rdi],eax
+
+ cmp ecx,7
+ je NEAR $L$12break
+ add ecx,1
+
+ xor eax,DWORD[16+rdi]
+ mov DWORD[40+rdi],eax
+ xor eax,DWORD[20+rdi]
+ mov DWORD[44+rdi],eax
+
+ lea rdi,[24+rdi]
+ jmp NEAR $L$12loop
+$L$12break:
+ mov DWORD[72+rdi],12
+ xor rax,rax
+ jmp NEAR $L$exit
+
+$L$14rounds:
+ mov rax,QWORD[rsi]
+ mov rbx,QWORD[8+rsi]
+ mov rcx,QWORD[16+rsi]
+ mov rdx,QWORD[24+rsi]
+ mov QWORD[rdi],rax
+ mov QWORD[8+rdi],rbx
+ mov QWORD[16+rdi],rcx
+ mov QWORD[24+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp NEAR $L$14shortcut
+ALIGN 4
+$L$14loop:
+ mov eax,DWORD[rdi]
+ mov edx,DWORD[28+rdi]
+$L$14shortcut:
+ movzx esi,dl
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD[((1024-128))+rcx*4+rbp]
+ mov DWORD[32+rdi],eax
+ xor eax,DWORD[4+rdi]
+ mov DWORD[36+rdi],eax
+ xor eax,DWORD[8+rdi]
+ mov DWORD[40+rdi],eax
+ xor eax,DWORD[12+rdi]
+ mov DWORD[44+rdi],eax
+
+ cmp ecx,6
+ je NEAR $L$14break
+ add ecx,1
+
+ mov edx,eax
+ mov eax,DWORD[16+rdi]
+ movzx esi,dl
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shr edx,16
+ shl ebx,8
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,16
+ xor eax,ebx
+
+ movzx ebx,BYTE[((-128))+rsi*1+rbp]
+ shl ebx,24
+ xor eax,ebx
+
+ mov DWORD[48+rdi],eax
+ xor eax,DWORD[20+rdi]
+ mov DWORD[52+rdi],eax
+ xor eax,DWORD[24+rdi]
+ mov DWORD[56+rdi],eax
+ xor eax,DWORD[28+rdi]
+ mov DWORD[60+rdi],eax
+
+ lea rdi,[32+rdi]
+ jmp NEAR $L$14loop
+$L$14break:
+ mov DWORD[48+rdi],14
+ xor rax,rax
+ jmp NEAR $L$exit
+
+$L$badpointer:
+ mov rax,-1
+$L$exit:
+DB 0xf3,0xc3
+
+
+global AES_set_decrypt_key
+
+ALIGN 16
+AES_set_decrypt_key:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_set_decrypt_key:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ push rdx
+
+$L$dec_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+ mov r8,QWORD[rsp]
+ cmp eax,0
+ jne NEAR $L$abort
+
+ mov r14d,DWORD[240+r8]
+ xor rdi,rdi
+ lea rcx,[r14*4+rdi]
+ mov rsi,r8
+ lea rdi,[rcx*4+r8]
+ALIGN 4
+$L$invert:
+ mov rax,QWORD[rsi]
+ mov rbx,QWORD[8+rsi]
+ mov rcx,QWORD[rdi]
+ mov rdx,QWORD[8+rdi]
+ mov QWORD[rdi],rax
+ mov QWORD[8+rdi],rbx
+ mov QWORD[rsi],rcx
+ mov QWORD[8+rsi],rdx
+ lea rsi,[16+rsi]
+ lea rdi,[((-16))+rdi]
+ cmp rdi,rsi
+ jne NEAR $L$invert
+
+ lea rax,[(($L$AES_Te+2048+1024))]
+
+ mov rsi,QWORD[40+rax]
+ mov rdi,QWORD[48+rax]
+ mov rbp,QWORD[56+rax]
+
+ mov r15,r8
+ sub r14d,1
+ALIGN 4
+$L$permute:
+ lea r15,[16+r15]
+ mov rax,QWORD[r15]
+ mov rcx,QWORD[8+r15]
+ mov r9,rsi
+ mov r12,rsi
+ and r9,rax
+ and r12,rcx
+ mov rbx,r9
+ mov rdx,r12
+ shr r9,7
+ lea r8,[rax*1+rax]
+ shr r12,7
+ lea r11,[rcx*1+rcx]
+ sub rbx,r9
+ sub rdx,r12
+ and r8,rdi
+ and r11,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r8,rbx
+ xor r11,rdx
+ mov r10,rsi
+ mov r13,rsi
+
+ and r10,r8
+ and r13,r11
+ mov rbx,r10
+ mov rdx,r13
+ shr r10,7
+ lea r9,[r8*1+r8]
+ shr r13,7
+ lea r12,[r11*1+r11]
+ sub rbx,r10
+ sub rdx,r13
+ and r9,rdi
+ and r12,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r9,rbx
+ xor r12,rdx
+ mov r10,rsi
+ mov r13,rsi
+
+ and r10,r9
+ and r13,r12
+ mov rbx,r10
+ mov rdx,r13
+ shr r10,7
+ xor r8,rax
+ shr r13,7
+ xor r11,rcx
+ sub rbx,r10
+ sub rdx,r13
+ lea r10,[r9*1+r9]
+ lea r13,[r12*1+r12]
+ xor r9,rax
+ xor r12,rcx
+ and r10,rdi
+ and r13,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r10,rbx
+ xor r13,rdx
+
+ xor rax,r10
+ xor rcx,r13
+ xor r8,r10
+ xor r11,r13
+ mov rbx,rax
+ mov rdx,rcx
+ xor r9,r10
+ shr rbx,32
+ xor r12,r13
+ shr rdx,32
+ xor r10,r8
+ rol eax,8
+ xor r13,r11
+ rol ecx,8
+ xor r10,r9
+ rol ebx,8
+ xor r13,r12
+
+ rol edx,8
+ xor eax,r10d
+ shr r10,32
+ xor ecx,r13d
+ shr r13,32
+ xor ebx,r10d
+ xor edx,r13d
+
+ mov r10,r8
+ rol r8d,24
+ mov r13,r11
+ rol r11d,24
+ shr r10,32
+ xor eax,r8d
+ shr r13,32
+ xor ecx,r11d
+ rol r10d,24
+ mov r8,r9
+ rol r13d,24
+ mov r11,r12
+ shr r8,32
+ xor ebx,r10d
+ shr r11,32
+ xor edx,r13d
+
+
+ rol r9d,16
+
+ rol r12d,16
+
+ rol r8d,16
+
+ xor eax,r9d
+ rol r11d,16
+ xor ecx,r12d
+
+ xor ebx,r8d
+ xor edx,r11d
+ mov DWORD[r15],eax
+ mov DWORD[4+r15],ebx
+ mov DWORD[8+r15],ecx
+ mov DWORD[12+r15],edx
+ sub r14d,1
+ jnz NEAR $L$permute
+
+ xor rax,rax
+$L$abort:
+ mov r15,QWORD[8+rsp]
+
+ mov r14,QWORD[16+rsp]
+
+ mov r13,QWORD[24+rsp]
+
+ mov r12,QWORD[32+rsp]
+
+ mov rbp,QWORD[40+rsp]
+
+ mov rbx,QWORD[48+rsp]
+
+ add rsp,56
+
+$L$dec_key_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_AES_set_decrypt_key:
+global AES_cbc_encrypt
+
+ALIGN 16
+EXTERN OPENSSL_ia32cap_P
+global asm_AES_cbc_encrypt
+
+asm_AES_cbc_encrypt:
+AES_cbc_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_cbc_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ cmp rdx,0
+ je NEAR $L$cbc_epilogue
+ pushfq
+
+
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+$L$cbc_prologue:
+
+ cld
+ mov r9d,r9d
+
+ lea r14,[$L$AES_Te]
+ lea r10,[$L$AES_Td]
+ cmp r9,0
+ cmove r14,r10
+
+
+ mov r10d,DWORD[OPENSSL_ia32cap_P]
+ cmp rdx,512
+ jb NEAR $L$cbc_slow_prologue
+ test rdx,15
+ jnz NEAR $L$cbc_slow_prologue
+ bt r10d,28
+ jc NEAR $L$cbc_slow_prologue
+
+
+ lea r15,[((-88-248))+rsp]
+ and r15,-64
+
+
+ mov r10,r14
+ lea r11,[2304+r14]
+ mov r12,r15
+ and r10,0xFFF
+ and r11,0xFFF
+ and r12,0xFFF
+
+ cmp r12,r11
+ jb NEAR $L$cbc_te_break_out
+ sub r12,r11
+ sub r15,r12
+ jmp NEAR $L$cbc_te_ok
+$L$cbc_te_break_out:
+ sub r12,r10
+ and r12,0xFFF
+ add r12,320
+ sub r15,r12
+ALIGN 4
+$L$cbc_te_ok:
+
+ xchg r15,rsp
+
+
+ mov QWORD[16+rsp],r15
+
+$L$cbc_fast_body:
+ mov QWORD[24+rsp],rdi
+ mov QWORD[32+rsp],rsi
+ mov QWORD[40+rsp],rdx
+ mov QWORD[48+rsp],rcx
+ mov QWORD[56+rsp],r8
+ mov DWORD[((80+240))+rsp],0
+ mov rbp,r8
+ mov rbx,r9
+ mov r9,rsi
+ mov r8,rdi
+ mov r15,rcx
+
+ mov eax,DWORD[240+r15]
+
+ mov r10,r15
+ sub r10,r14
+ and r10,0xfff
+ cmp r10,2304
+ jb NEAR $L$cbc_do_ecopy
+ cmp r10,4096-248
+ jb NEAR $L$cbc_skip_ecopy
+ALIGN 4
+$L$cbc_do_ecopy:
+ mov rsi,r15
+ lea rdi,[80+rsp]
+ lea r15,[80+rsp]
+ mov ecx,240/8
+ DD 0x90A548F3
+ mov DWORD[rdi],eax
+$L$cbc_skip_ecopy:
+ mov QWORD[rsp],r15
+
+ mov ecx,18
+ALIGN 4
+$L$cbc_prefetch_te:
+ mov r10,QWORD[r14]
+ mov r11,QWORD[32+r14]
+ mov r12,QWORD[64+r14]
+ mov r13,QWORD[96+r14]
+ lea r14,[128+r14]
+ sub ecx,1
+ jnz NEAR $L$cbc_prefetch_te
+ lea r14,[((-2304))+r14]
+
+ cmp rbx,0
+ je NEAR $L$FAST_DECRYPT
+
+
+ mov eax,DWORD[rbp]
+ mov ebx,DWORD[4+rbp]
+ mov ecx,DWORD[8+rbp]
+ mov edx,DWORD[12+rbp]
+
+ALIGN 4
+$L$cbc_fast_enc_loop:
+ xor eax,DWORD[r8]
+ xor ebx,DWORD[4+r8]
+ xor ecx,DWORD[8+r8]
+ xor edx,DWORD[12+r8]
+ mov r15,QWORD[rsp]
+ mov QWORD[24+rsp],r8
+
+ call _x86_64_AES_encrypt
+
+ mov r8,QWORD[24+rsp]
+ mov r10,QWORD[40+rsp]
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ lea r8,[16+r8]
+ lea r9,[16+r9]
+ sub r10,16
+ test r10,-16
+ mov QWORD[40+rsp],r10
+ jnz NEAR $L$cbc_fast_enc_loop
+ mov rbp,QWORD[56+rsp]
+ mov DWORD[rbp],eax
+ mov DWORD[4+rbp],ebx
+ mov DWORD[8+rbp],ecx
+ mov DWORD[12+rbp],edx
+
+ jmp NEAR $L$cbc_fast_cleanup
+
+
+ALIGN 16
+$L$FAST_DECRYPT:
+ cmp r9,r8
+ je NEAR $L$cbc_fast_dec_in_place
+
+ mov QWORD[64+rsp],rbp
+ALIGN 4
+$L$cbc_fast_dec_loop:
+ mov eax,DWORD[r8]
+ mov ebx,DWORD[4+r8]
+ mov ecx,DWORD[8+r8]
+ mov edx,DWORD[12+r8]
+ mov r15,QWORD[rsp]
+ mov QWORD[24+rsp],r8
+
+ call _x86_64_AES_decrypt
+
+ mov rbp,QWORD[64+rsp]
+ mov r8,QWORD[24+rsp]
+ mov r10,QWORD[40+rsp]
+ xor eax,DWORD[rbp]
+ xor ebx,DWORD[4+rbp]
+ xor ecx,DWORD[8+rbp]
+ xor edx,DWORD[12+rbp]
+ mov rbp,r8
+
+ sub r10,16
+ mov QWORD[40+rsp],r10
+ mov QWORD[64+rsp],rbp
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ lea r8,[16+r8]
+ lea r9,[16+r9]
+ jnz NEAR $L$cbc_fast_dec_loop
+ mov r12,QWORD[56+rsp]
+ mov r10,QWORD[rbp]
+ mov r11,QWORD[8+rbp]
+ mov QWORD[r12],r10
+ mov QWORD[8+r12],r11
+ jmp NEAR $L$cbc_fast_cleanup
+
+ALIGN 16
+$L$cbc_fast_dec_in_place:
+ mov r10,QWORD[rbp]
+ mov r11,QWORD[8+rbp]
+ mov QWORD[((0+64))+rsp],r10
+ mov QWORD[((8+64))+rsp],r11
+ALIGN 4
+$L$cbc_fast_dec_in_place_loop:
+ mov eax,DWORD[r8]
+ mov ebx,DWORD[4+r8]
+ mov ecx,DWORD[8+r8]
+ mov edx,DWORD[12+r8]
+ mov r15,QWORD[rsp]
+ mov QWORD[24+rsp],r8
+
+ call _x86_64_AES_decrypt
+
+ mov r8,QWORD[24+rsp]
+ mov r10,QWORD[40+rsp]
+ xor eax,DWORD[((0+64))+rsp]
+ xor ebx,DWORD[((4+64))+rsp]
+ xor ecx,DWORD[((8+64))+rsp]
+ xor edx,DWORD[((12+64))+rsp]
+
+ mov r11,QWORD[r8]
+ mov r12,QWORD[8+r8]
+ sub r10,16
+ jz NEAR $L$cbc_fast_dec_in_place_done
+
+ mov QWORD[((0+64))+rsp],r11
+ mov QWORD[((8+64))+rsp],r12
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ lea r8,[16+r8]
+ lea r9,[16+r9]
+ mov QWORD[40+rsp],r10
+ jmp NEAR $L$cbc_fast_dec_in_place_loop
+$L$cbc_fast_dec_in_place_done:
+ mov rdi,QWORD[56+rsp]
+ mov QWORD[rdi],r11
+ mov QWORD[8+rdi],r12
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ALIGN 4
+$L$cbc_fast_cleanup:
+ cmp DWORD[((80+240))+rsp],0
+ lea rdi,[80+rsp]
+ je NEAR $L$cbc_exit
+ mov ecx,240/8
+ xor rax,rax
+ DD 0x90AB48F3
+
+ jmp NEAR $L$cbc_exit
+
+
+ALIGN 16
+$L$cbc_slow_prologue:
+
+
+ lea rbp,[((-88))+rsp]
+ and rbp,-64
+
+ lea r10,[((-88-63))+rcx]
+ sub r10,rbp
+ neg r10
+ and r10,0x3c0
+ sub rbp,r10
+
+ xchg rbp,rsp
+
+
+ mov QWORD[16+rsp],rbp
+
+$L$cbc_slow_body:
+
+
+
+
+ mov QWORD[56+rsp],r8
+ mov rbp,r8
+ mov rbx,r9
+ mov r9,rsi
+ mov r8,rdi
+ mov r15,rcx
+ mov r10,rdx
+
+ mov eax,DWORD[240+r15]
+ mov QWORD[rsp],r15
+ shl eax,4
+ lea rax,[rax*1+r15]
+ mov QWORD[8+rsp],rax
+
+
+ lea r14,[2048+r14]
+ lea rax,[((768-8))+rsp]
+ sub rax,r14
+ and rax,0x300
+ lea r14,[rax*1+r14]
+
+ cmp rbx,0
+ je NEAR $L$SLOW_DECRYPT
+
+
+ test r10,-16
+ mov eax,DWORD[rbp]
+ mov ebx,DWORD[4+rbp]
+ mov ecx,DWORD[8+rbp]
+ mov edx,DWORD[12+rbp]
+ jz NEAR $L$cbc_slow_enc_tail
+
+ALIGN 4
+$L$cbc_slow_enc_loop:
+ xor eax,DWORD[r8]
+ xor ebx,DWORD[4+r8]
+ xor ecx,DWORD[8+r8]
+ xor edx,DWORD[12+r8]
+ mov r15,QWORD[rsp]
+ mov QWORD[24+rsp],r8
+ mov QWORD[32+rsp],r9
+ mov QWORD[40+rsp],r10
+
+ call _x86_64_AES_encrypt_compact
+
+ mov r8,QWORD[24+rsp]
+ mov r9,QWORD[32+rsp]
+ mov r10,QWORD[40+rsp]
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ lea r8,[16+r8]
+ lea r9,[16+r9]
+ sub r10,16
+ test r10,-16
+ jnz NEAR $L$cbc_slow_enc_loop
+ test r10,15
+ jnz NEAR $L$cbc_slow_enc_tail
+ mov rbp,QWORD[56+rsp]
+ mov DWORD[rbp],eax
+ mov DWORD[4+rbp],ebx
+ mov DWORD[8+rbp],ecx
+ mov DWORD[12+rbp],edx
+
+ jmp NEAR $L$cbc_exit
+
+ALIGN 4
+$L$cbc_slow_enc_tail:
+ mov r11,rax
+ mov r12,rcx
+ mov rcx,r10
+ mov rsi,r8
+ mov rdi,r9
+ DD 0x9066A4F3
+ mov rcx,16
+ sub rcx,r10
+ xor rax,rax
+ DD 0x9066AAF3
+ mov r8,r9
+ mov r10,16
+ mov rax,r11
+ mov rcx,r12
+ jmp NEAR $L$cbc_slow_enc_loop
+
+ALIGN 16
+$L$SLOW_DECRYPT:
+ shr rax,3
+ add r14,rax
+
+ mov r11,QWORD[rbp]
+ mov r12,QWORD[8+rbp]
+ mov QWORD[((0+64))+rsp],r11
+ mov QWORD[((8+64))+rsp],r12
+
+ALIGN 4
+$L$cbc_slow_dec_loop:
+ mov eax,DWORD[r8]
+ mov ebx,DWORD[4+r8]
+ mov ecx,DWORD[8+r8]
+ mov edx,DWORD[12+r8]
+ mov r15,QWORD[rsp]
+ mov QWORD[24+rsp],r8
+ mov QWORD[32+rsp],r9
+ mov QWORD[40+rsp],r10
+
+ call _x86_64_AES_decrypt_compact
+
+ mov r8,QWORD[24+rsp]
+ mov r9,QWORD[32+rsp]
+ mov r10,QWORD[40+rsp]
+ xor eax,DWORD[((0+64))+rsp]
+ xor ebx,DWORD[((4+64))+rsp]
+ xor ecx,DWORD[((8+64))+rsp]
+ xor edx,DWORD[((12+64))+rsp]
+
+ mov r11,QWORD[r8]
+ mov r12,QWORD[8+r8]
+ sub r10,16
+ jc NEAR $L$cbc_slow_dec_partial
+ jz NEAR $L$cbc_slow_dec_done
+
+ mov QWORD[((0+64))+rsp],r11
+ mov QWORD[((8+64))+rsp],r12
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ lea r8,[16+r8]
+ lea r9,[16+r9]
+ jmp NEAR $L$cbc_slow_dec_loop
+$L$cbc_slow_dec_done:
+ mov rdi,QWORD[56+rsp]
+ mov QWORD[rdi],r11
+ mov QWORD[8+rdi],r12
+
+ mov DWORD[r9],eax
+ mov DWORD[4+r9],ebx
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+
+ jmp NEAR $L$cbc_exit
+
+ALIGN 4
+$L$cbc_slow_dec_partial:
+ mov rdi,QWORD[56+rsp]
+ mov QWORD[rdi],r11
+ mov QWORD[8+rdi],r12
+
+ mov DWORD[((0+64))+rsp],eax
+ mov DWORD[((4+64))+rsp],ebx
+ mov DWORD[((8+64))+rsp],ecx
+ mov DWORD[((12+64))+rsp],edx
+
+ mov rdi,r9
+ lea rsi,[64+rsp]
+ lea rcx,[16+r10]
+ DD 0x9066A4F3
+ jmp NEAR $L$cbc_exit
+
+ALIGN 16
+$L$cbc_exit:
+ mov rsi,QWORD[16+rsp]
+
+ mov r15,QWORD[rsi]
+
+ mov r14,QWORD[8+rsi]
+
+ mov r13,QWORD[16+rsi]
+
+ mov r12,QWORD[24+rsi]
+
+ mov rbp,QWORD[32+rsi]
+
+ mov rbx,QWORD[40+rsi]
+
+ lea rsp,[48+rsi]
+
+$L$cbc_popfq:
+ popfq
+
+
+
+$L$cbc_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_AES_cbc_encrypt:
+ALIGN 64
+$L$AES_Te:
+ DD 0xa56363c6,0xa56363c6
+ DD 0x847c7cf8,0x847c7cf8
+ DD 0x997777ee,0x997777ee
+ DD 0x8d7b7bf6,0x8d7b7bf6
+ DD 0x0df2f2ff,0x0df2f2ff
+ DD 0xbd6b6bd6,0xbd6b6bd6
+ DD 0xb16f6fde,0xb16f6fde
+ DD 0x54c5c591,0x54c5c591
+ DD 0x50303060,0x50303060
+ DD 0x03010102,0x03010102
+ DD 0xa96767ce,0xa96767ce
+ DD 0x7d2b2b56,0x7d2b2b56
+ DD 0x19fefee7,0x19fefee7
+ DD 0x62d7d7b5,0x62d7d7b5
+ DD 0xe6abab4d,0xe6abab4d
+ DD 0x9a7676ec,0x9a7676ec
+ DD 0x45caca8f,0x45caca8f
+ DD 0x9d82821f,0x9d82821f
+ DD 0x40c9c989,0x40c9c989
+ DD 0x877d7dfa,0x877d7dfa
+ DD 0x15fafaef,0x15fafaef
+ DD 0xeb5959b2,0xeb5959b2
+ DD 0xc947478e,0xc947478e
+ DD 0x0bf0f0fb,0x0bf0f0fb
+ DD 0xecadad41,0xecadad41
+ DD 0x67d4d4b3,0x67d4d4b3
+ DD 0xfda2a25f,0xfda2a25f
+ DD 0xeaafaf45,0xeaafaf45
+ DD 0xbf9c9c23,0xbf9c9c23
+ DD 0xf7a4a453,0xf7a4a453
+ DD 0x967272e4,0x967272e4
+ DD 0x5bc0c09b,0x5bc0c09b
+ DD 0xc2b7b775,0xc2b7b775
+ DD 0x1cfdfde1,0x1cfdfde1
+ DD 0xae93933d,0xae93933d
+ DD 0x6a26264c,0x6a26264c
+ DD 0x5a36366c,0x5a36366c
+ DD 0x413f3f7e,0x413f3f7e
+ DD 0x02f7f7f5,0x02f7f7f5
+ DD 0x4fcccc83,0x4fcccc83
+ DD 0x5c343468,0x5c343468
+ DD 0xf4a5a551,0xf4a5a551
+ DD 0x34e5e5d1,0x34e5e5d1
+ DD 0x08f1f1f9,0x08f1f1f9
+ DD 0x937171e2,0x937171e2
+ DD 0x73d8d8ab,0x73d8d8ab
+ DD 0x53313162,0x53313162
+ DD 0x3f15152a,0x3f15152a
+ DD 0x0c040408,0x0c040408
+ DD 0x52c7c795,0x52c7c795
+ DD 0x65232346,0x65232346
+ DD 0x5ec3c39d,0x5ec3c39d
+ DD 0x28181830,0x28181830
+ DD 0xa1969637,0xa1969637
+ DD 0x0f05050a,0x0f05050a
+ DD 0xb59a9a2f,0xb59a9a2f
+ DD 0x0907070e,0x0907070e
+ DD 0x36121224,0x36121224
+ DD 0x9b80801b,0x9b80801b
+ DD 0x3de2e2df,0x3de2e2df
+ DD 0x26ebebcd,0x26ebebcd
+ DD 0x6927274e,0x6927274e
+ DD 0xcdb2b27f,0xcdb2b27f
+ DD 0x9f7575ea,0x9f7575ea
+ DD 0x1b090912,0x1b090912
+ DD 0x9e83831d,0x9e83831d
+ DD 0x742c2c58,0x742c2c58
+ DD 0x2e1a1a34,0x2e1a1a34
+ DD 0x2d1b1b36,0x2d1b1b36
+ DD 0xb26e6edc,0xb26e6edc
+ DD 0xee5a5ab4,0xee5a5ab4
+ DD 0xfba0a05b,0xfba0a05b
+ DD 0xf65252a4,0xf65252a4
+ DD 0x4d3b3b76,0x4d3b3b76
+ DD 0x61d6d6b7,0x61d6d6b7
+ DD 0xceb3b37d,0xceb3b37d
+ DD 0x7b292952,0x7b292952
+ DD 0x3ee3e3dd,0x3ee3e3dd
+ DD 0x712f2f5e,0x712f2f5e
+ DD 0x97848413,0x97848413
+ DD 0xf55353a6,0xf55353a6
+ DD 0x68d1d1b9,0x68d1d1b9
+ DD 0x00000000,0x00000000
+ DD 0x2cededc1,0x2cededc1
+ DD 0x60202040,0x60202040
+ DD 0x1ffcfce3,0x1ffcfce3
+ DD 0xc8b1b179,0xc8b1b179
+ DD 0xed5b5bb6,0xed5b5bb6
+ DD 0xbe6a6ad4,0xbe6a6ad4
+ DD 0x46cbcb8d,0x46cbcb8d
+ DD 0xd9bebe67,0xd9bebe67
+ DD 0x4b393972,0x4b393972
+ DD 0xde4a4a94,0xde4a4a94
+ DD 0xd44c4c98,0xd44c4c98
+ DD 0xe85858b0,0xe85858b0
+ DD 0x4acfcf85,0x4acfcf85
+ DD 0x6bd0d0bb,0x6bd0d0bb
+ DD 0x2aefefc5,0x2aefefc5
+ DD 0xe5aaaa4f,0xe5aaaa4f
+ DD 0x16fbfbed,0x16fbfbed
+ DD 0xc5434386,0xc5434386
+ DD 0xd74d4d9a,0xd74d4d9a
+ DD 0x55333366,0x55333366
+ DD 0x94858511,0x94858511
+ DD 0xcf45458a,0xcf45458a
+ DD 0x10f9f9e9,0x10f9f9e9
+ DD 0x06020204,0x06020204
+ DD 0x817f7ffe,0x817f7ffe
+ DD 0xf05050a0,0xf05050a0
+ DD 0x443c3c78,0x443c3c78
+ DD 0xba9f9f25,0xba9f9f25
+ DD 0xe3a8a84b,0xe3a8a84b
+ DD 0xf35151a2,0xf35151a2
+ DD 0xfea3a35d,0xfea3a35d
+ DD 0xc0404080,0xc0404080
+ DD 0x8a8f8f05,0x8a8f8f05
+ DD 0xad92923f,0xad92923f
+ DD 0xbc9d9d21,0xbc9d9d21
+ DD 0x48383870,0x48383870
+ DD 0x04f5f5f1,0x04f5f5f1
+ DD 0xdfbcbc63,0xdfbcbc63
+ DD 0xc1b6b677,0xc1b6b677
+ DD 0x75dadaaf,0x75dadaaf
+ DD 0x63212142,0x63212142
+ DD 0x30101020,0x30101020
+ DD 0x1affffe5,0x1affffe5
+ DD 0x0ef3f3fd,0x0ef3f3fd
+ DD 0x6dd2d2bf,0x6dd2d2bf
+ DD 0x4ccdcd81,0x4ccdcd81
+ DD 0x140c0c18,0x140c0c18
+ DD 0x35131326,0x35131326
+ DD 0x2fececc3,0x2fececc3
+ DD 0xe15f5fbe,0xe15f5fbe
+ DD 0xa2979735,0xa2979735
+ DD 0xcc444488,0xcc444488
+ DD 0x3917172e,0x3917172e
+ DD 0x57c4c493,0x57c4c493
+ DD 0xf2a7a755,0xf2a7a755
+ DD 0x827e7efc,0x827e7efc
+ DD 0x473d3d7a,0x473d3d7a
+ DD 0xac6464c8,0xac6464c8
+ DD 0xe75d5dba,0xe75d5dba
+ DD 0x2b191932,0x2b191932
+ DD 0x957373e6,0x957373e6
+ DD 0xa06060c0,0xa06060c0
+ DD 0x98818119,0x98818119
+ DD 0xd14f4f9e,0xd14f4f9e
+ DD 0x7fdcdca3,0x7fdcdca3
+ DD 0x66222244,0x66222244
+ DD 0x7e2a2a54,0x7e2a2a54
+ DD 0xab90903b,0xab90903b
+ DD 0x8388880b,0x8388880b
+ DD 0xca46468c,0xca46468c
+ DD 0x29eeeec7,0x29eeeec7
+ DD 0xd3b8b86b,0xd3b8b86b
+ DD 0x3c141428,0x3c141428
+ DD 0x79dedea7,0x79dedea7
+ DD 0xe25e5ebc,0xe25e5ebc
+ DD 0x1d0b0b16,0x1d0b0b16
+ DD 0x76dbdbad,0x76dbdbad
+ DD 0x3be0e0db,0x3be0e0db
+ DD 0x56323264,0x56323264
+ DD 0x4e3a3a74,0x4e3a3a74
+ DD 0x1e0a0a14,0x1e0a0a14
+ DD 0xdb494992,0xdb494992
+ DD 0x0a06060c,0x0a06060c
+ DD 0x6c242448,0x6c242448
+ DD 0xe45c5cb8,0xe45c5cb8
+ DD 0x5dc2c29f,0x5dc2c29f
+ DD 0x6ed3d3bd,0x6ed3d3bd
+ DD 0xefacac43,0xefacac43
+ DD 0xa66262c4,0xa66262c4
+ DD 0xa8919139,0xa8919139
+ DD 0xa4959531,0xa4959531
+ DD 0x37e4e4d3,0x37e4e4d3
+ DD 0x8b7979f2,0x8b7979f2
+ DD 0x32e7e7d5,0x32e7e7d5
+ DD 0x43c8c88b,0x43c8c88b
+ DD 0x5937376e,0x5937376e
+ DD 0xb76d6dda,0xb76d6dda
+ DD 0x8c8d8d01,0x8c8d8d01
+ DD 0x64d5d5b1,0x64d5d5b1
+ DD 0xd24e4e9c,0xd24e4e9c
+ DD 0xe0a9a949,0xe0a9a949
+ DD 0xb46c6cd8,0xb46c6cd8
+ DD 0xfa5656ac,0xfa5656ac
+ DD 0x07f4f4f3,0x07f4f4f3
+ DD 0x25eaeacf,0x25eaeacf
+ DD 0xaf6565ca,0xaf6565ca
+ DD 0x8e7a7af4,0x8e7a7af4
+ DD 0xe9aeae47,0xe9aeae47
+ DD 0x18080810,0x18080810
+ DD 0xd5baba6f,0xd5baba6f
+ DD 0x887878f0,0x887878f0
+ DD 0x6f25254a,0x6f25254a
+ DD 0x722e2e5c,0x722e2e5c
+ DD 0x241c1c38,0x241c1c38
+ DD 0xf1a6a657,0xf1a6a657
+ DD 0xc7b4b473,0xc7b4b473
+ DD 0x51c6c697,0x51c6c697
+ DD 0x23e8e8cb,0x23e8e8cb
+ DD 0x7cdddda1,0x7cdddda1
+ DD 0x9c7474e8,0x9c7474e8
+ DD 0x211f1f3e,0x211f1f3e
+ DD 0xdd4b4b96,0xdd4b4b96
+ DD 0xdcbdbd61,0xdcbdbd61
+ DD 0x868b8b0d,0x868b8b0d
+ DD 0x858a8a0f,0x858a8a0f
+ DD 0x907070e0,0x907070e0
+ DD 0x423e3e7c,0x423e3e7c
+ DD 0xc4b5b571,0xc4b5b571
+ DD 0xaa6666cc,0xaa6666cc
+ DD 0xd8484890,0xd8484890
+ DD 0x05030306,0x05030306
+ DD 0x01f6f6f7,0x01f6f6f7
+ DD 0x120e0e1c,0x120e0e1c
+ DD 0xa36161c2,0xa36161c2
+ DD 0x5f35356a,0x5f35356a
+ DD 0xf95757ae,0xf95757ae
+ DD 0xd0b9b969,0xd0b9b969
+ DD 0x91868617,0x91868617
+ DD 0x58c1c199,0x58c1c199
+ DD 0x271d1d3a,0x271d1d3a
+ DD 0xb99e9e27,0xb99e9e27
+ DD 0x38e1e1d9,0x38e1e1d9
+ DD 0x13f8f8eb,0x13f8f8eb
+ DD 0xb398982b,0xb398982b
+ DD 0x33111122,0x33111122
+ DD 0xbb6969d2,0xbb6969d2
+ DD 0x70d9d9a9,0x70d9d9a9
+ DD 0x898e8e07,0x898e8e07
+ DD 0xa7949433,0xa7949433
+ DD 0xb69b9b2d,0xb69b9b2d
+ DD 0x221e1e3c,0x221e1e3c
+ DD 0x92878715,0x92878715
+ DD 0x20e9e9c9,0x20e9e9c9
+ DD 0x49cece87,0x49cece87
+ DD 0xff5555aa,0xff5555aa
+ DD 0x78282850,0x78282850
+ DD 0x7adfdfa5,0x7adfdfa5
+ DD 0x8f8c8c03,0x8f8c8c03
+ DD 0xf8a1a159,0xf8a1a159
+ DD 0x80898909,0x80898909
+ DD 0x170d0d1a,0x170d0d1a
+ DD 0xdabfbf65,0xdabfbf65
+ DD 0x31e6e6d7,0x31e6e6d7
+ DD 0xc6424284,0xc6424284
+ DD 0xb86868d0,0xb86868d0
+ DD 0xc3414182,0xc3414182
+ DD 0xb0999929,0xb0999929
+ DD 0x772d2d5a,0x772d2d5a
+ DD 0x110f0f1e,0x110f0f1e
+ DD 0xcbb0b07b,0xcbb0b07b
+ DD 0xfc5454a8,0xfc5454a8
+ DD 0xd6bbbb6d,0xd6bbbb6d
+ DD 0x3a16162c,0x3a16162c
+DB 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+DB 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+DB 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+DB 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+DB 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+DB 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+DB 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+DB 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+DB 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+DB 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+DB 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+DB 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+DB 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+DB 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+DB 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+DB 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+DB 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+DB 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+DB 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+DB 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+DB 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+DB 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+DB 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+DB 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+DB 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+DB 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+DB 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+DB 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+DB 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+DB 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+DB 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+DB 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+DB 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+DB 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+DB 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+DB 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+DB 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+DB 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+DB 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+DB 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+DB 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+DB 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+DB 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+DB 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+DB 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+DB 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+DB 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+DB 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+DB 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+DB 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+DB 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+DB 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+DB 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+DB 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+DB 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+DB 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+DB 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+DB 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+DB 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+DB 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+DB 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+DB 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+DB 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+DB 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+DB 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+DB 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+DB 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+DB 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+DB 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+DB 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+DB 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+DB 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+DB 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+DB 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+DB 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+DB 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+DB 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+DB 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+DB 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+DB 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+DB 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+DB 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+DB 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+DB 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+DB 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+DB 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+DB 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+DB 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+DB 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+DB 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+DB 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+DB 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+DB 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+DB 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+DB 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+DB 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+DB 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+DB 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+DB 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+DB 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+DB 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+DB 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+DB 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+DB 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+DB 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+DB 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+DB 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+DB 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+DB 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+DB 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+DB 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+DB 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+DB 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+DB 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+DB 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+DB 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+DB 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+DB 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+DB 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+DB 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+DB 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+DB 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+DB 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+DB 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+DB 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+DB 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+DB 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+DB 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+ DD 0x00000001,0x00000002,0x00000004,0x00000008
+ DD 0x00000010,0x00000020,0x00000040,0x00000080
+ DD 0x0000001b,0x00000036,0x80808080,0x80808080
+ DD 0xfefefefe,0xfefefefe,0x1b1b1b1b,0x1b1b1b1b
+ALIGN 64
+$L$AES_Td:
+ DD 0x50a7f451,0x50a7f451
+ DD 0x5365417e,0x5365417e
+ DD 0xc3a4171a,0xc3a4171a
+ DD 0x965e273a,0x965e273a
+ DD 0xcb6bab3b,0xcb6bab3b
+ DD 0xf1459d1f,0xf1459d1f
+ DD 0xab58faac,0xab58faac
+ DD 0x9303e34b,0x9303e34b
+ DD 0x55fa3020,0x55fa3020
+ DD 0xf66d76ad,0xf66d76ad
+ DD 0x9176cc88,0x9176cc88
+ DD 0x254c02f5,0x254c02f5
+ DD 0xfcd7e54f,0xfcd7e54f
+ DD 0xd7cb2ac5,0xd7cb2ac5
+ DD 0x80443526,0x80443526
+ DD 0x8fa362b5,0x8fa362b5
+ DD 0x495ab1de,0x495ab1de
+ DD 0x671bba25,0x671bba25
+ DD 0x980eea45,0x980eea45
+ DD 0xe1c0fe5d,0xe1c0fe5d
+ DD 0x02752fc3,0x02752fc3
+ DD 0x12f04c81,0x12f04c81
+ DD 0xa397468d,0xa397468d
+ DD 0xc6f9d36b,0xc6f9d36b
+ DD 0xe75f8f03,0xe75f8f03
+ DD 0x959c9215,0x959c9215
+ DD 0xeb7a6dbf,0xeb7a6dbf
+ DD 0xda595295,0xda595295
+ DD 0x2d83bed4,0x2d83bed4
+ DD 0xd3217458,0xd3217458
+ DD 0x2969e049,0x2969e049
+ DD 0x44c8c98e,0x44c8c98e
+ DD 0x6a89c275,0x6a89c275
+ DD 0x78798ef4,0x78798ef4
+ DD 0x6b3e5899,0x6b3e5899
+ DD 0xdd71b927,0xdd71b927
+ DD 0xb64fe1be,0xb64fe1be
+ DD 0x17ad88f0,0x17ad88f0
+ DD 0x66ac20c9,0x66ac20c9
+ DD 0xb43ace7d,0xb43ace7d
+ DD 0x184adf63,0x184adf63
+ DD 0x82311ae5,0x82311ae5
+ DD 0x60335197,0x60335197
+ DD 0x457f5362,0x457f5362
+ DD 0xe07764b1,0xe07764b1
+ DD 0x84ae6bbb,0x84ae6bbb
+ DD 0x1ca081fe,0x1ca081fe
+ DD 0x942b08f9,0x942b08f9
+ DD 0x58684870,0x58684870
+ DD 0x19fd458f,0x19fd458f
+ DD 0x876cde94,0x876cde94
+ DD 0xb7f87b52,0xb7f87b52
+ DD 0x23d373ab,0x23d373ab
+ DD 0xe2024b72,0xe2024b72
+ DD 0x578f1fe3,0x578f1fe3
+ DD 0x2aab5566,0x2aab5566
+ DD 0x0728ebb2,0x0728ebb2
+ DD 0x03c2b52f,0x03c2b52f
+ DD 0x9a7bc586,0x9a7bc586
+ DD 0xa50837d3,0xa50837d3
+ DD 0xf2872830,0xf2872830
+ DD 0xb2a5bf23,0xb2a5bf23
+ DD 0xba6a0302,0xba6a0302
+ DD 0x5c8216ed,0x5c8216ed
+ DD 0x2b1ccf8a,0x2b1ccf8a
+ DD 0x92b479a7,0x92b479a7
+ DD 0xf0f207f3,0xf0f207f3
+ DD 0xa1e2694e,0xa1e2694e
+ DD 0xcdf4da65,0xcdf4da65
+ DD 0xd5be0506,0xd5be0506
+ DD 0x1f6234d1,0x1f6234d1
+ DD 0x8afea6c4,0x8afea6c4
+ DD 0x9d532e34,0x9d532e34
+ DD 0xa055f3a2,0xa055f3a2
+ DD 0x32e18a05,0x32e18a05
+ DD 0x75ebf6a4,0x75ebf6a4
+ DD 0x39ec830b,0x39ec830b
+ DD 0xaaef6040,0xaaef6040
+ DD 0x069f715e,0x069f715e
+ DD 0x51106ebd,0x51106ebd
+ DD 0xf98a213e,0xf98a213e
+ DD 0x3d06dd96,0x3d06dd96
+ DD 0xae053edd,0xae053edd
+ DD 0x46bde64d,0x46bde64d
+ DD 0xb58d5491,0xb58d5491
+ DD 0x055dc471,0x055dc471
+ DD 0x6fd40604,0x6fd40604
+ DD 0xff155060,0xff155060
+ DD 0x24fb9819,0x24fb9819
+ DD 0x97e9bdd6,0x97e9bdd6
+ DD 0xcc434089,0xcc434089
+ DD 0x779ed967,0x779ed967
+ DD 0xbd42e8b0,0xbd42e8b0
+ DD 0x888b8907,0x888b8907
+ DD 0x385b19e7,0x385b19e7
+ DD 0xdbeec879,0xdbeec879
+ DD 0x470a7ca1,0x470a7ca1
+ DD 0xe90f427c,0xe90f427c
+ DD 0xc91e84f8,0xc91e84f8
+ DD 0x00000000,0x00000000
+ DD 0x83868009,0x83868009
+ DD 0x48ed2b32,0x48ed2b32
+ DD 0xac70111e,0xac70111e
+ DD 0x4e725a6c,0x4e725a6c
+ DD 0xfbff0efd,0xfbff0efd
+ DD 0x5638850f,0x5638850f
+ DD 0x1ed5ae3d,0x1ed5ae3d
+ DD 0x27392d36,0x27392d36
+ DD 0x64d90f0a,0x64d90f0a
+ DD 0x21a65c68,0x21a65c68
+ DD 0xd1545b9b,0xd1545b9b
+ DD 0x3a2e3624,0x3a2e3624
+ DD 0xb1670a0c,0xb1670a0c
+ DD 0x0fe75793,0x0fe75793
+ DD 0xd296eeb4,0xd296eeb4
+ DD 0x9e919b1b,0x9e919b1b
+ DD 0x4fc5c080,0x4fc5c080
+ DD 0xa220dc61,0xa220dc61
+ DD 0x694b775a,0x694b775a
+ DD 0x161a121c,0x161a121c
+ DD 0x0aba93e2,0x0aba93e2
+ DD 0xe52aa0c0,0xe52aa0c0
+ DD 0x43e0223c,0x43e0223c
+ DD 0x1d171b12,0x1d171b12
+ DD 0x0b0d090e,0x0b0d090e
+ DD 0xadc78bf2,0xadc78bf2
+ DD 0xb9a8b62d,0xb9a8b62d
+ DD 0xc8a91e14,0xc8a91e14
+ DD 0x8519f157,0x8519f157
+ DD 0x4c0775af,0x4c0775af
+ DD 0xbbdd99ee,0xbbdd99ee
+ DD 0xfd607fa3,0xfd607fa3
+ DD 0x9f2601f7,0x9f2601f7
+ DD 0xbcf5725c,0xbcf5725c
+ DD 0xc53b6644,0xc53b6644
+ DD 0x347efb5b,0x347efb5b
+ DD 0x7629438b,0x7629438b
+ DD 0xdcc623cb,0xdcc623cb
+ DD 0x68fcedb6,0x68fcedb6
+ DD 0x63f1e4b8,0x63f1e4b8
+ DD 0xcadc31d7,0xcadc31d7
+ DD 0x10856342,0x10856342
+ DD 0x40229713,0x40229713
+ DD 0x2011c684,0x2011c684
+ DD 0x7d244a85,0x7d244a85
+ DD 0xf83dbbd2,0xf83dbbd2
+ DD 0x1132f9ae,0x1132f9ae
+ DD 0x6da129c7,0x6da129c7
+ DD 0x4b2f9e1d,0x4b2f9e1d
+ DD 0xf330b2dc,0xf330b2dc
+ DD 0xec52860d,0xec52860d
+ DD 0xd0e3c177,0xd0e3c177
+ DD 0x6c16b32b,0x6c16b32b
+ DD 0x99b970a9,0x99b970a9
+ DD 0xfa489411,0xfa489411
+ DD 0x2264e947,0x2264e947
+ DD 0xc48cfca8,0xc48cfca8
+ DD 0x1a3ff0a0,0x1a3ff0a0
+ DD 0xd82c7d56,0xd82c7d56
+ DD 0xef903322,0xef903322
+ DD 0xc74e4987,0xc74e4987
+ DD 0xc1d138d9,0xc1d138d9
+ DD 0xfea2ca8c,0xfea2ca8c
+ DD 0x360bd498,0x360bd498
+ DD 0xcf81f5a6,0xcf81f5a6
+ DD 0x28de7aa5,0x28de7aa5
+ DD 0x268eb7da,0x268eb7da
+ DD 0xa4bfad3f,0xa4bfad3f
+ DD 0xe49d3a2c,0xe49d3a2c
+ DD 0x0d927850,0x0d927850
+ DD 0x9bcc5f6a,0x9bcc5f6a
+ DD 0x62467e54,0x62467e54
+ DD 0xc2138df6,0xc2138df6
+ DD 0xe8b8d890,0xe8b8d890
+ DD 0x5ef7392e,0x5ef7392e
+ DD 0xf5afc382,0xf5afc382
+ DD 0xbe805d9f,0xbe805d9f
+ DD 0x7c93d069,0x7c93d069
+ DD 0xa92dd56f,0xa92dd56f
+ DD 0xb31225cf,0xb31225cf
+ DD 0x3b99acc8,0x3b99acc8
+ DD 0xa77d1810,0xa77d1810
+ DD 0x6e639ce8,0x6e639ce8
+ DD 0x7bbb3bdb,0x7bbb3bdb
+ DD 0x097826cd,0x097826cd
+ DD 0xf418596e,0xf418596e
+ DD 0x01b79aec,0x01b79aec
+ DD 0xa89a4f83,0xa89a4f83
+ DD 0x656e95e6,0x656e95e6
+ DD 0x7ee6ffaa,0x7ee6ffaa
+ DD 0x08cfbc21,0x08cfbc21
+ DD 0xe6e815ef,0xe6e815ef
+ DD 0xd99be7ba,0xd99be7ba
+ DD 0xce366f4a,0xce366f4a
+ DD 0xd4099fea,0xd4099fea
+ DD 0xd67cb029,0xd67cb029
+ DD 0xafb2a431,0xafb2a431
+ DD 0x31233f2a,0x31233f2a
+ DD 0x3094a5c6,0x3094a5c6
+ DD 0xc066a235,0xc066a235
+ DD 0x37bc4e74,0x37bc4e74
+ DD 0xa6ca82fc,0xa6ca82fc
+ DD 0xb0d090e0,0xb0d090e0
+ DD 0x15d8a733,0x15d8a733
+ DD 0x4a9804f1,0x4a9804f1
+ DD 0xf7daec41,0xf7daec41
+ DD 0x0e50cd7f,0x0e50cd7f
+ DD 0x2ff69117,0x2ff69117
+ DD 0x8dd64d76,0x8dd64d76
+ DD 0x4db0ef43,0x4db0ef43
+ DD 0x544daacc,0x544daacc
+ DD 0xdf0496e4,0xdf0496e4
+ DD 0xe3b5d19e,0xe3b5d19e
+ DD 0x1b886a4c,0x1b886a4c
+ DD 0xb81f2cc1,0xb81f2cc1
+ DD 0x7f516546,0x7f516546
+ DD 0x04ea5e9d,0x04ea5e9d
+ DD 0x5d358c01,0x5d358c01
+ DD 0x737487fa,0x737487fa
+ DD 0x2e410bfb,0x2e410bfb
+ DD 0x5a1d67b3,0x5a1d67b3
+ DD 0x52d2db92,0x52d2db92
+ DD 0x335610e9,0x335610e9
+ DD 0x1347d66d,0x1347d66d
+ DD 0x8c61d79a,0x8c61d79a
+ DD 0x7a0ca137,0x7a0ca137
+ DD 0x8e14f859,0x8e14f859
+ DD 0x893c13eb,0x893c13eb
+ DD 0xee27a9ce,0xee27a9ce
+ DD 0x35c961b7,0x35c961b7
+ DD 0xede51ce1,0xede51ce1
+ DD 0x3cb1477a,0x3cb1477a
+ DD 0x59dfd29c,0x59dfd29c
+ DD 0x3f73f255,0x3f73f255
+ DD 0x79ce1418,0x79ce1418
+ DD 0xbf37c773,0xbf37c773
+ DD 0xeacdf753,0xeacdf753
+ DD 0x5baafd5f,0x5baafd5f
+ DD 0x146f3ddf,0x146f3ddf
+ DD 0x86db4478,0x86db4478
+ DD 0x81f3afca,0x81f3afca
+ DD 0x3ec468b9,0x3ec468b9
+ DD 0x2c342438,0x2c342438
+ DD 0x5f40a3c2,0x5f40a3c2
+ DD 0x72c31d16,0x72c31d16
+ DD 0x0c25e2bc,0x0c25e2bc
+ DD 0x8b493c28,0x8b493c28
+ DD 0x41950dff,0x41950dff
+ DD 0x7101a839,0x7101a839
+ DD 0xdeb30c08,0xdeb30c08
+ DD 0x9ce4b4d8,0x9ce4b4d8
+ DD 0x90c15664,0x90c15664
+ DD 0x6184cb7b,0x6184cb7b
+ DD 0x70b632d5,0x70b632d5
+ DD 0x745c6c48,0x745c6c48
+ DD 0x4257b8d0,0x4257b8d0
+DB 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+DB 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+DB 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+DB 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+DB 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+DB 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+DB 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+DB 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+DB 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+DB 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+DB 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+DB 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+DB 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+DB 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+DB 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+DB 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+DB 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+DB 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+DB 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+DB 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+DB 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+DB 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+DB 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+DB 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+DB 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+DB 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+DB 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+DB 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+DB 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+DB 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+DB 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+DB 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+ DD 0x80808080,0x80808080,0xfefefefe,0xfefefefe
+ DD 0x1b1b1b1b,0x1b1b1b1b,0,0
+DB 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+DB 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+DB 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+DB 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+DB 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+DB 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+DB 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+DB 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+DB 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+DB 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+DB 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+DB 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+DB 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+DB 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+DB 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+DB 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+DB 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+DB 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+DB 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+DB 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+DB 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+DB 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+DB 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+DB 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+DB 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+DB 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+DB 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+DB 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+DB 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+DB 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+DB 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+DB 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+ DD 0x80808080,0x80808080,0xfefefefe,0xfefefefe
+ DD 0x1b1b1b1b,0x1b1b1b1b,0,0
+DB 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+DB 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+DB 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+DB 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+DB 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+DB 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+DB 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+DB 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+DB 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+DB 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+DB 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+DB 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+DB 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+DB 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+DB 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+DB 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+DB 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+DB 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+DB 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+DB 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+DB 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+DB 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+DB 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+DB 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+DB 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+DB 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+DB 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+DB 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+DB 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+DB 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+DB 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+DB 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+ DD 0x80808080,0x80808080,0xfefefefe,0xfefefefe
+ DD 0x1b1b1b1b,0x1b1b1b1b,0,0
+DB 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+DB 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+DB 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+DB 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+DB 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+DB 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+DB 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+DB 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+DB 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+DB 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+DB 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+DB 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+DB 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+DB 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+DB 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+DB 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+DB 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+DB 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+DB 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+DB 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+DB 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+DB 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+DB 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+DB 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+DB 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+DB 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+DB 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+DB 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+DB 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+DB 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+DB 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+DB 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+ DD 0x80808080,0x80808080,0xfefefefe,0xfefefefe
+ DD 0x1b1b1b1b,0x1b1b1b1b,0,0
+DB 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32
+DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+DB 62,0
+ALIGN 64
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+block_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$in_block_prologue
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_block_prologue
+
+ mov rax,QWORD[24+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+ mov rbp,QWORD[((-16))+rax]
+ mov r12,QWORD[((-24))+rax]
+ mov r13,QWORD[((-32))+rax]
+ mov r14,QWORD[((-40))+rax]
+ mov r15,QWORD[((-48))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+$L$in_block_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ jmp NEAR $L$common_seh_exit
+
+
+
+ALIGN 16
+key_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$in_key_prologue
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_key_prologue
+
+ lea rax,[56+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+ mov rbp,QWORD[((-16))+rax]
+ mov r12,QWORD[((-24))+rax]
+ mov r13,QWORD[((-32))+rax]
+ mov r14,QWORD[((-40))+rax]
+ mov r15,QWORD[((-48))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+$L$in_key_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ jmp NEAR $L$common_seh_exit
+
+
+
+ALIGN 16
+cbc_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ lea r10,[$L$cbc_prologue]
+ cmp rbx,r10
+ jb NEAR $L$in_cbc_prologue
+
+ lea r10,[$L$cbc_fast_body]
+ cmp rbx,r10
+ jb NEAR $L$in_cbc_frame_setup
+
+ lea r10,[$L$cbc_slow_prologue]
+ cmp rbx,r10
+ jb NEAR $L$in_cbc_body
+
+ lea r10,[$L$cbc_slow_body]
+ cmp rbx,r10
+ jb NEAR $L$in_cbc_frame_setup
+
+$L$in_cbc_body:
+ mov rax,QWORD[152+r8]
+
+ lea r10,[$L$cbc_epilogue]
+ cmp rbx,r10
+ jae NEAR $L$in_cbc_prologue
+
+ lea rax,[8+rax]
+
+ lea r10,[$L$cbc_popfq]
+ cmp rbx,r10
+ jae NEAR $L$in_cbc_prologue
+
+ mov rax,QWORD[8+rax]
+ lea rax,[56+rax]
+
+$L$in_cbc_frame_setup:
+ mov rbx,QWORD[((-16))+rax]
+ mov rbp,QWORD[((-24))+rax]
+ mov r12,QWORD[((-32))+rax]
+ mov r13,QWORD[((-40))+rax]
+ mov r14,QWORD[((-48))+rax]
+ mov r15,QWORD[((-56))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+$L$in_cbc_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+$L$common_seh_exit:
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$SEH_begin_AES_encrypt wrt ..imagebase
+ DD $L$SEH_end_AES_encrypt wrt ..imagebase
+ DD $L$SEH_info_AES_encrypt wrt ..imagebase
+
+ DD $L$SEH_begin_AES_decrypt wrt ..imagebase
+ DD $L$SEH_end_AES_decrypt wrt ..imagebase
+ DD $L$SEH_info_AES_decrypt wrt ..imagebase
+
+ DD $L$SEH_begin_AES_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_end_AES_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_info_AES_set_encrypt_key wrt ..imagebase
+
+ DD $L$SEH_begin_AES_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_end_AES_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_info_AES_set_decrypt_key wrt ..imagebase
+
+ DD $L$SEH_begin_AES_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_end_AES_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_info_AES_cbc_encrypt wrt ..imagebase
+
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_AES_encrypt:
+DB 9,0,0,0
+ DD block_se_handler wrt ..imagebase
+ DD $L$enc_prologue wrt ..imagebase,$L$enc_epilogue wrt ..imagebase
+$L$SEH_info_AES_decrypt:
+DB 9,0,0,0
+ DD block_se_handler wrt ..imagebase
+ DD $L$dec_prologue wrt ..imagebase,$L$dec_epilogue wrt ..imagebase
+$L$SEH_info_AES_set_encrypt_key:
+DB 9,0,0,0
+ DD key_se_handler wrt ..imagebase
+ DD $L$enc_key_prologue wrt ..imagebase,$L$enc_key_epilogue wrt ..imagebase
+$L$SEH_info_AES_set_decrypt_key:
+DB 9,0,0,0
+ DD key_se_handler wrt ..imagebase
+ DD $L$dec_key_prologue wrt ..imagebase,$L$dec_key_epilogue wrt ..imagebase
+$L$SEH_info_AES_cbc_encrypt:
+DB 9,0,0,0
+ DD cbc_se_handler wrt ..imagebase
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-mb-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-mb-x86_64.nasm
new file mode 100644
index 0000000000..7908342cf4
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-mb-x86_64.nasm
@@ -0,0 +1,1846 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+
+EXTERN OPENSSL_ia32cap_P
+
+global aesni_multi_cbc_encrypt
+
+ALIGN 32
+aesni_multi_cbc_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_multi_cbc_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+ cmp edx,2
+ jb NEAR $L$enc_non_avx
+ mov ecx,DWORD[((OPENSSL_ia32cap_P+4))]
+ test ecx,268435456
+ jnz NEAR _avx_cbc_enc_shortcut
+ jmp NEAR $L$enc_non_avx
+ALIGN 16
+$L$enc_non_avx:
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[96+rsp],xmm12
+ movaps XMMWORD[(-104)+rax],xmm13
+ movaps XMMWORD[(-88)+rax],xmm14
+ movaps XMMWORD[(-72)+rax],xmm15
+
+
+
+
+
+
+ sub rsp,48
+ and rsp,-64
+ mov QWORD[16+rsp],rax
+
+
+$L$enc4x_body:
+ movdqu xmm12,XMMWORD[rsi]
+ lea rsi,[120+rsi]
+ lea rdi,[80+rdi]
+
+$L$enc4x_loop_grande:
+ mov DWORD[24+rsp],edx
+ xor edx,edx
+
+ mov ecx,DWORD[((-64))+rdi]
+ mov r8,QWORD[((-80))+rdi]
+ cmp ecx,edx
+ mov r12,QWORD[((-72))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm2,XMMWORD[((-56))+rdi]
+ mov DWORD[32+rsp],ecx
+ cmovle r8,rsp
+
+ mov ecx,DWORD[((-24))+rdi]
+ mov r9,QWORD[((-40))+rdi]
+ cmp ecx,edx
+ mov r13,QWORD[((-32))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm3,XMMWORD[((-16))+rdi]
+ mov DWORD[36+rsp],ecx
+ cmovle r9,rsp
+
+ mov ecx,DWORD[16+rdi]
+ mov r10,QWORD[rdi]
+ cmp ecx,edx
+ mov r14,QWORD[8+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm4,XMMWORD[24+rdi]
+ mov DWORD[40+rsp],ecx
+ cmovle r10,rsp
+
+ mov ecx,DWORD[56+rdi]
+ mov r11,QWORD[40+rdi]
+ cmp ecx,edx
+ mov r15,QWORD[48+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm5,XMMWORD[64+rdi]
+ mov DWORD[44+rsp],ecx
+ cmovle r11,rsp
+ test edx,edx
+ jz NEAR $L$enc4x_done
+
+ movups xmm1,XMMWORD[((16-120))+rsi]
+ pxor xmm2,xmm12
+ movups xmm0,XMMWORD[((32-120))+rsi]
+ pxor xmm3,xmm12
+ mov eax,DWORD[((240-120))+rsi]
+ pxor xmm4,xmm12
+ movdqu xmm6,XMMWORD[r8]
+ pxor xmm5,xmm12
+ movdqu xmm7,XMMWORD[r9]
+ pxor xmm2,xmm6
+ movdqu xmm8,XMMWORD[r10]
+ pxor xmm3,xmm7
+ movdqu xmm9,XMMWORD[r11]
+ pxor xmm4,xmm8
+ pxor xmm5,xmm9
+ movdqa xmm10,XMMWORD[32+rsp]
+ xor rbx,rbx
+ jmp NEAR $L$oop_enc4x
+
+ALIGN 32
+$L$oop_enc4x:
+ add rbx,16
+ lea rbp,[16+rsp]
+ mov ecx,1
+ sub rbp,rbx
+
+DB 102,15,56,220,209
+ prefetcht0 [31+rbx*1+r8]
+ prefetcht0 [31+rbx*1+r9]
+DB 102,15,56,220,217
+ prefetcht0 [31+rbx*1+r10]
+ prefetcht0 [31+rbx*1+r10]
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((48-120))+rsi]
+ cmp ecx,DWORD[32+rsp]
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+ cmovge r8,rbp
+ cmovg r12,rbp
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((-56))+rsi]
+ cmp ecx,DWORD[36+rsp]
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+ cmovge r9,rbp
+ cmovg r13,rbp
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((-40))+rsi]
+ cmp ecx,DWORD[40+rsp]
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+ cmovge r10,rbp
+ cmovg r14,rbp
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((-24))+rsi]
+ cmp ecx,DWORD[44+rsp]
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+ cmovge r11,rbp
+ cmovg r15,rbp
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((-8))+rsi]
+ movdqa xmm11,xmm10
+DB 102,15,56,220,208
+ prefetcht0 [15+rbx*1+r12]
+ prefetcht0 [15+rbx*1+r13]
+DB 102,15,56,220,216
+ prefetcht0 [15+rbx*1+r14]
+ prefetcht0 [15+rbx*1+r15]
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((128-120))+rsi]
+ pxor xmm12,xmm12
+
+DB 102,15,56,220,209
+ pcmpgtd xmm11,xmm12
+ movdqu xmm12,XMMWORD[((-120))+rsi]
+DB 102,15,56,220,217
+ paddd xmm10,xmm11
+ movdqa XMMWORD[32+rsp],xmm10
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((144-120))+rsi]
+
+ cmp eax,11
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((160-120))+rsi]
+
+ jb NEAR $L$enc4x_tail
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((176-120))+rsi]
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((192-120))+rsi]
+
+ je NEAR $L$enc4x_tail
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[((208-120))+rsi]
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((224-120))+rsi]
+ jmp NEAR $L$enc4x_tail
+
+ALIGN 32
+$L$enc4x_tail:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movdqu xmm6,XMMWORD[rbx*1+r8]
+ movdqu xmm1,XMMWORD[((16-120))+rsi]
+
+DB 102,15,56,221,208
+ movdqu xmm7,XMMWORD[rbx*1+r9]
+ pxor xmm6,xmm12
+DB 102,15,56,221,216
+ movdqu xmm8,XMMWORD[rbx*1+r10]
+ pxor xmm7,xmm12
+DB 102,15,56,221,224
+ movdqu xmm9,XMMWORD[rbx*1+r11]
+ pxor xmm8,xmm12
+DB 102,15,56,221,232
+ movdqu xmm0,XMMWORD[((32-120))+rsi]
+ pxor xmm9,xmm12
+
+ movups XMMWORD[(-16)+rbx*1+r12],xmm2
+ pxor xmm2,xmm6
+ movups XMMWORD[(-16)+rbx*1+r13],xmm3
+ pxor xmm3,xmm7
+ movups XMMWORD[(-16)+rbx*1+r14],xmm4
+ pxor xmm4,xmm8
+ movups XMMWORD[(-16)+rbx*1+r15],xmm5
+ pxor xmm5,xmm9
+
+ dec edx
+ jnz NEAR $L$oop_enc4x
+
+ mov rax,QWORD[16+rsp]
+
+ mov edx,DWORD[24+rsp]
+
+
+
+
+
+
+
+
+
+
+
+ lea rdi,[160+rdi]
+ dec edx
+ jnz NEAR $L$enc4x_loop_grande
+
+$L$enc4x_done:
+ movaps xmm6,XMMWORD[((-216))+rax]
+ movaps xmm7,XMMWORD[((-200))+rax]
+ movaps xmm8,XMMWORD[((-184))+rax]
+ movaps xmm9,XMMWORD[((-168))+rax]
+ movaps xmm10,XMMWORD[((-152))+rax]
+ movaps xmm11,XMMWORD[((-136))+rax]
+ movaps xmm12,XMMWORD[((-120))+rax]
+
+
+
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$enc4x_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_multi_cbc_encrypt:
+
+global aesni_multi_cbc_decrypt
+
+ALIGN 32
+aesni_multi_cbc_decrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_multi_cbc_decrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+ cmp edx,2
+ jb NEAR $L$dec_non_avx
+ mov ecx,DWORD[((OPENSSL_ia32cap_P+4))]
+ test ecx,268435456
+ jnz NEAR _avx_cbc_dec_shortcut
+ jmp NEAR $L$dec_non_avx
+ALIGN 16
+$L$dec_non_avx:
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[96+rsp],xmm12
+ movaps XMMWORD[(-104)+rax],xmm13
+ movaps XMMWORD[(-88)+rax],xmm14
+ movaps XMMWORD[(-72)+rax],xmm15
+
+
+
+
+
+
+ sub rsp,48
+ and rsp,-64
+ mov QWORD[16+rsp],rax
+
+
+$L$dec4x_body:
+ movdqu xmm12,XMMWORD[rsi]
+ lea rsi,[120+rsi]
+ lea rdi,[80+rdi]
+
+$L$dec4x_loop_grande:
+ mov DWORD[24+rsp],edx
+ xor edx,edx
+
+ mov ecx,DWORD[((-64))+rdi]
+ mov r8,QWORD[((-80))+rdi]
+ cmp ecx,edx
+ mov r12,QWORD[((-72))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm6,XMMWORD[((-56))+rdi]
+ mov DWORD[32+rsp],ecx
+ cmovle r8,rsp
+
+ mov ecx,DWORD[((-24))+rdi]
+ mov r9,QWORD[((-40))+rdi]
+ cmp ecx,edx
+ mov r13,QWORD[((-32))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm7,XMMWORD[((-16))+rdi]
+ mov DWORD[36+rsp],ecx
+ cmovle r9,rsp
+
+ mov ecx,DWORD[16+rdi]
+ mov r10,QWORD[rdi]
+ cmp ecx,edx
+ mov r14,QWORD[8+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm8,XMMWORD[24+rdi]
+ mov DWORD[40+rsp],ecx
+ cmovle r10,rsp
+
+ mov ecx,DWORD[56+rdi]
+ mov r11,QWORD[40+rdi]
+ cmp ecx,edx
+ mov r15,QWORD[48+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ movdqu xmm9,XMMWORD[64+rdi]
+ mov DWORD[44+rsp],ecx
+ cmovle r11,rsp
+ test edx,edx
+ jz NEAR $L$dec4x_done
+
+ movups xmm1,XMMWORD[((16-120))+rsi]
+ movups xmm0,XMMWORD[((32-120))+rsi]
+ mov eax,DWORD[((240-120))+rsi]
+ movdqu xmm2,XMMWORD[r8]
+ movdqu xmm3,XMMWORD[r9]
+ pxor xmm2,xmm12
+ movdqu xmm4,XMMWORD[r10]
+ pxor xmm3,xmm12
+ movdqu xmm5,XMMWORD[r11]
+ pxor xmm4,xmm12
+ pxor xmm5,xmm12
+ movdqa xmm10,XMMWORD[32+rsp]
+ xor rbx,rbx
+ jmp NEAR $L$oop_dec4x
+
+ALIGN 32
+$L$oop_dec4x:
+ add rbx,16
+ lea rbp,[16+rsp]
+ mov ecx,1
+ sub rbp,rbx
+
+DB 102,15,56,222,209
+ prefetcht0 [31+rbx*1+r8]
+ prefetcht0 [31+rbx*1+r9]
+DB 102,15,56,222,217
+ prefetcht0 [31+rbx*1+r10]
+ prefetcht0 [31+rbx*1+r11]
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((48-120))+rsi]
+ cmp ecx,DWORD[32+rsp]
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+ cmovge r8,rbp
+ cmovg r12,rbp
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((-56))+rsi]
+ cmp ecx,DWORD[36+rsp]
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+ cmovge r9,rbp
+ cmovg r13,rbp
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((-40))+rsi]
+ cmp ecx,DWORD[40+rsp]
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+ cmovge r10,rbp
+ cmovg r14,rbp
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((-24))+rsi]
+ cmp ecx,DWORD[44+rsp]
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+ cmovge r11,rbp
+ cmovg r15,rbp
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((-8))+rsi]
+ movdqa xmm11,xmm10
+DB 102,15,56,222,208
+ prefetcht0 [15+rbx*1+r12]
+ prefetcht0 [15+rbx*1+r13]
+DB 102,15,56,222,216
+ prefetcht0 [15+rbx*1+r14]
+ prefetcht0 [15+rbx*1+r15]
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((128-120))+rsi]
+ pxor xmm12,xmm12
+
+DB 102,15,56,222,209
+ pcmpgtd xmm11,xmm12
+ movdqu xmm12,XMMWORD[((-120))+rsi]
+DB 102,15,56,222,217
+ paddd xmm10,xmm11
+ movdqa XMMWORD[32+rsp],xmm10
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((144-120))+rsi]
+
+ cmp eax,11
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((160-120))+rsi]
+
+ jb NEAR $L$dec4x_tail
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((176-120))+rsi]
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((192-120))+rsi]
+
+ je NEAR $L$dec4x_tail
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[((208-120))+rsi]
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((224-120))+rsi]
+ jmp NEAR $L$dec4x_tail
+
+ALIGN 32
+$L$dec4x_tail:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+ pxor xmm6,xmm0
+ pxor xmm7,xmm0
+DB 102,15,56,222,233
+ movdqu xmm1,XMMWORD[((16-120))+rsi]
+ pxor xmm8,xmm0
+ pxor xmm9,xmm0
+ movdqu xmm0,XMMWORD[((32-120))+rsi]
+
+DB 102,15,56,223,214
+DB 102,15,56,223,223
+ movdqu xmm6,XMMWORD[((-16))+rbx*1+r8]
+ movdqu xmm7,XMMWORD[((-16))+rbx*1+r9]
+DB 102,65,15,56,223,224
+DB 102,65,15,56,223,233
+ movdqu xmm8,XMMWORD[((-16))+rbx*1+r10]
+ movdqu xmm9,XMMWORD[((-16))+rbx*1+r11]
+
+ movups XMMWORD[(-16)+rbx*1+r12],xmm2
+ movdqu xmm2,XMMWORD[rbx*1+r8]
+ movups XMMWORD[(-16)+rbx*1+r13],xmm3
+ movdqu xmm3,XMMWORD[rbx*1+r9]
+ pxor xmm2,xmm12
+ movups XMMWORD[(-16)+rbx*1+r14],xmm4
+ movdqu xmm4,XMMWORD[rbx*1+r10]
+ pxor xmm3,xmm12
+ movups XMMWORD[(-16)+rbx*1+r15],xmm5
+ movdqu xmm5,XMMWORD[rbx*1+r11]
+ pxor xmm4,xmm12
+ pxor xmm5,xmm12
+
+ dec edx
+ jnz NEAR $L$oop_dec4x
+
+ mov rax,QWORD[16+rsp]
+
+ mov edx,DWORD[24+rsp]
+
+ lea rdi,[160+rdi]
+ dec edx
+ jnz NEAR $L$dec4x_loop_grande
+
+$L$dec4x_done:
+ movaps xmm6,XMMWORD[((-216))+rax]
+ movaps xmm7,XMMWORD[((-200))+rax]
+ movaps xmm8,XMMWORD[((-184))+rax]
+ movaps xmm9,XMMWORD[((-168))+rax]
+ movaps xmm10,XMMWORD[((-152))+rax]
+ movaps xmm11,XMMWORD[((-136))+rax]
+ movaps xmm12,XMMWORD[((-120))+rax]
+
+
+
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$dec4x_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_multi_cbc_decrypt:
+
+ALIGN 32
+aesni_multi_cbc_encrypt_avx:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_multi_cbc_encrypt_avx:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+_avx_cbc_enc_shortcut:
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[(-120)+rax],xmm12
+ movaps XMMWORD[(-104)+rax],xmm13
+ movaps XMMWORD[(-88)+rax],xmm14
+ movaps XMMWORD[(-72)+rax],xmm15
+
+
+
+
+
+
+
+
+ sub rsp,192
+ and rsp,-128
+ mov QWORD[16+rsp],rax
+
+
+$L$enc8x_body:
+ vzeroupper
+ vmovdqu xmm15,XMMWORD[rsi]
+ lea rsi,[120+rsi]
+ lea rdi,[160+rdi]
+ shr edx,1
+
+$L$enc8x_loop_grande:
+
+ xor edx,edx
+
+ mov ecx,DWORD[((-144))+rdi]
+
+ mov r8,QWORD[((-160))+rdi]
+ cmp ecx,edx
+
+ mov rbx,QWORD[((-152))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm2,XMMWORD[((-136))+rdi]
+ mov DWORD[32+rsp],ecx
+ cmovle r8,rsp
+ sub rbx,r8
+ mov QWORD[64+rsp],rbx
+
+ mov ecx,DWORD[((-104))+rdi]
+
+ mov r9,QWORD[((-120))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-112))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm3,XMMWORD[((-96))+rdi]
+ mov DWORD[36+rsp],ecx
+ cmovle r9,rsp
+ sub rbp,r9
+ mov QWORD[72+rsp],rbp
+
+ mov ecx,DWORD[((-64))+rdi]
+
+ mov r10,QWORD[((-80))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-72))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm4,XMMWORD[((-56))+rdi]
+ mov DWORD[40+rsp],ecx
+ cmovle r10,rsp
+ sub rbp,r10
+ mov QWORD[80+rsp],rbp
+
+ mov ecx,DWORD[((-24))+rdi]
+
+ mov r11,QWORD[((-40))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-32))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm5,XMMWORD[((-16))+rdi]
+ mov DWORD[44+rsp],ecx
+ cmovle r11,rsp
+ sub rbp,r11
+ mov QWORD[88+rsp],rbp
+
+ mov ecx,DWORD[16+rdi]
+
+ mov r12,QWORD[rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[8+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm6,XMMWORD[24+rdi]
+ mov DWORD[48+rsp],ecx
+ cmovle r12,rsp
+ sub rbp,r12
+ mov QWORD[96+rsp],rbp
+
+ mov ecx,DWORD[56+rdi]
+
+ mov r13,QWORD[40+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[48+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm7,XMMWORD[64+rdi]
+ mov DWORD[52+rsp],ecx
+ cmovle r13,rsp
+ sub rbp,r13
+ mov QWORD[104+rsp],rbp
+
+ mov ecx,DWORD[96+rdi]
+
+ mov r14,QWORD[80+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[88+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm8,XMMWORD[104+rdi]
+ mov DWORD[56+rsp],ecx
+ cmovle r14,rsp
+ sub rbp,r14
+ mov QWORD[112+rsp],rbp
+
+ mov ecx,DWORD[136+rdi]
+
+ mov r15,QWORD[120+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[128+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm9,XMMWORD[144+rdi]
+ mov DWORD[60+rsp],ecx
+ cmovle r15,rsp
+ sub rbp,r15
+ mov QWORD[120+rsp],rbp
+ test edx,edx
+ jz NEAR $L$enc8x_done
+
+ vmovups xmm1,XMMWORD[((16-120))+rsi]
+ vmovups xmm0,XMMWORD[((32-120))+rsi]
+ mov eax,DWORD[((240-120))+rsi]
+
+ vpxor xmm10,xmm15,XMMWORD[r8]
+ lea rbp,[128+rsp]
+ vpxor xmm11,xmm15,XMMWORD[r9]
+ vpxor xmm12,xmm15,XMMWORD[r10]
+ vpxor xmm13,xmm15,XMMWORD[r11]
+ vpxor xmm2,xmm2,xmm10
+ vpxor xmm10,xmm15,XMMWORD[r12]
+ vpxor xmm3,xmm3,xmm11
+ vpxor xmm11,xmm15,XMMWORD[r13]
+ vpxor xmm4,xmm4,xmm12
+ vpxor xmm12,xmm15,XMMWORD[r14]
+ vpxor xmm5,xmm5,xmm13
+ vpxor xmm13,xmm15,XMMWORD[r15]
+ vpxor xmm6,xmm6,xmm10
+ mov ecx,1
+ vpxor xmm7,xmm7,xmm11
+ vpxor xmm8,xmm8,xmm12
+ vpxor xmm9,xmm9,xmm13
+ jmp NEAR $L$oop_enc8x
+
+ALIGN 32
+$L$oop_enc8x:
+ vaesenc xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+0))+rsp]
+ vaesenc xmm3,xmm3,xmm1
+ prefetcht0 [31+r8]
+ vaesenc xmm4,xmm4,xmm1
+ vaesenc xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r8]
+ cmovge r8,rsp
+ vaesenc xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm1
+ sub rbx,r8
+ vaesenc xmm8,xmm8,xmm1
+ vpxor xmm10,xmm15,XMMWORD[16+r8]
+ mov QWORD[((64+0))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-72))+rsi]
+ lea r8,[16+rbx*1+r8]
+ vmovdqu XMMWORD[rbp],xmm10
+ vaesenc xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+4))+rsp]
+ mov rbx,QWORD[((64+8))+rsp]
+ vaesenc xmm3,xmm3,xmm0
+ prefetcht0 [31+r9]
+ vaesenc xmm4,xmm4,xmm0
+ vaesenc xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r9]
+ cmovge r9,rsp
+ vaesenc xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm0
+ sub rbx,r9
+ vaesenc xmm8,xmm8,xmm0
+ vpxor xmm11,xmm15,XMMWORD[16+r9]
+ mov QWORD[((64+8))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((-56))+rsi]
+ lea r9,[16+rbx*1+r9]
+ vmovdqu XMMWORD[16+rbp],xmm11
+ vaesenc xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+8))+rsp]
+ mov rbx,QWORD[((64+16))+rsp]
+ vaesenc xmm3,xmm3,xmm1
+ prefetcht0 [31+r10]
+ vaesenc xmm4,xmm4,xmm1
+ prefetcht0 [15+r8]
+ vaesenc xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r10]
+ cmovge r10,rsp
+ vaesenc xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm1
+ sub rbx,r10
+ vaesenc xmm8,xmm8,xmm1
+ vpxor xmm12,xmm15,XMMWORD[16+r10]
+ mov QWORD[((64+16))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-40))+rsi]
+ lea r10,[16+rbx*1+r10]
+ vmovdqu XMMWORD[32+rbp],xmm12
+ vaesenc xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+12))+rsp]
+ mov rbx,QWORD[((64+24))+rsp]
+ vaesenc xmm3,xmm3,xmm0
+ prefetcht0 [31+r11]
+ vaesenc xmm4,xmm4,xmm0
+ prefetcht0 [15+r9]
+ vaesenc xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r11]
+ cmovge r11,rsp
+ vaesenc xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm0
+ sub rbx,r11
+ vaesenc xmm8,xmm8,xmm0
+ vpxor xmm13,xmm15,XMMWORD[16+r11]
+ mov QWORD[((64+24))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((-24))+rsi]
+ lea r11,[16+rbx*1+r11]
+ vmovdqu XMMWORD[48+rbp],xmm13
+ vaesenc xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+16))+rsp]
+ mov rbx,QWORD[((64+32))+rsp]
+ vaesenc xmm3,xmm3,xmm1
+ prefetcht0 [31+r12]
+ vaesenc xmm4,xmm4,xmm1
+ prefetcht0 [15+r10]
+ vaesenc xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r12]
+ cmovge r12,rsp
+ vaesenc xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm1
+ sub rbx,r12
+ vaesenc xmm8,xmm8,xmm1
+ vpxor xmm10,xmm15,XMMWORD[16+r12]
+ mov QWORD[((64+32))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-8))+rsi]
+ lea r12,[16+rbx*1+r12]
+ vaesenc xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+20))+rsp]
+ mov rbx,QWORD[((64+40))+rsp]
+ vaesenc xmm3,xmm3,xmm0
+ prefetcht0 [31+r13]
+ vaesenc xmm4,xmm4,xmm0
+ prefetcht0 [15+r11]
+ vaesenc xmm5,xmm5,xmm0
+ lea rbx,[r13*1+rbx]
+ cmovge r13,rsp
+ vaesenc xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm0
+ sub rbx,r13
+ vaesenc xmm8,xmm8,xmm0
+ vpxor xmm11,xmm15,XMMWORD[16+r13]
+ mov QWORD[((64+40))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[8+rsi]
+ lea r13,[16+rbx*1+r13]
+ vaesenc xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+24))+rsp]
+ mov rbx,QWORD[((64+48))+rsp]
+ vaesenc xmm3,xmm3,xmm1
+ prefetcht0 [31+r14]
+ vaesenc xmm4,xmm4,xmm1
+ prefetcht0 [15+r12]
+ vaesenc xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r14]
+ cmovge r14,rsp
+ vaesenc xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm1
+ sub rbx,r14
+ vaesenc xmm8,xmm8,xmm1
+ vpxor xmm12,xmm15,XMMWORD[16+r14]
+ mov QWORD[((64+48))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[24+rsi]
+ lea r14,[16+rbx*1+r14]
+ vaesenc xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+28))+rsp]
+ mov rbx,QWORD[((64+56))+rsp]
+ vaesenc xmm3,xmm3,xmm0
+ prefetcht0 [31+r15]
+ vaesenc xmm4,xmm4,xmm0
+ prefetcht0 [15+r13]
+ vaesenc xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r15]
+ cmovge r15,rsp
+ vaesenc xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesenc xmm7,xmm7,xmm0
+ sub rbx,r15
+ vaesenc xmm8,xmm8,xmm0
+ vpxor xmm13,xmm15,XMMWORD[16+r15]
+ mov QWORD[((64+56))+rsp],rbx
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[40+rsi]
+ lea r15,[16+rbx*1+r15]
+ vmovdqu xmm14,XMMWORD[32+rsp]
+ prefetcht0 [15+r14]
+ prefetcht0 [15+r15]
+ cmp eax,11
+ jb NEAR $L$enc8x_tail
+
+ vaesenc xmm2,xmm2,xmm1
+ vaesenc xmm3,xmm3,xmm1
+ vaesenc xmm4,xmm4,xmm1
+ vaesenc xmm5,xmm5,xmm1
+ vaesenc xmm6,xmm6,xmm1
+ vaesenc xmm7,xmm7,xmm1
+ vaesenc xmm8,xmm8,xmm1
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((176-120))+rsi]
+
+ vaesenc xmm2,xmm2,xmm0
+ vaesenc xmm3,xmm3,xmm0
+ vaesenc xmm4,xmm4,xmm0
+ vaesenc xmm5,xmm5,xmm0
+ vaesenc xmm6,xmm6,xmm0
+ vaesenc xmm7,xmm7,xmm0
+ vaesenc xmm8,xmm8,xmm0
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((192-120))+rsi]
+ je NEAR $L$enc8x_tail
+
+ vaesenc xmm2,xmm2,xmm1
+ vaesenc xmm3,xmm3,xmm1
+ vaesenc xmm4,xmm4,xmm1
+ vaesenc xmm5,xmm5,xmm1
+ vaesenc xmm6,xmm6,xmm1
+ vaesenc xmm7,xmm7,xmm1
+ vaesenc xmm8,xmm8,xmm1
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((208-120))+rsi]
+
+ vaesenc xmm2,xmm2,xmm0
+ vaesenc xmm3,xmm3,xmm0
+ vaesenc xmm4,xmm4,xmm0
+ vaesenc xmm5,xmm5,xmm0
+ vaesenc xmm6,xmm6,xmm0
+ vaesenc xmm7,xmm7,xmm0
+ vaesenc xmm8,xmm8,xmm0
+ vaesenc xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((224-120))+rsi]
+
+$L$enc8x_tail:
+ vaesenc xmm2,xmm2,xmm1
+ vpxor xmm15,xmm15,xmm15
+ vaesenc xmm3,xmm3,xmm1
+ vaesenc xmm4,xmm4,xmm1
+ vpcmpgtd xmm15,xmm14,xmm15
+ vaesenc xmm5,xmm5,xmm1
+ vaesenc xmm6,xmm6,xmm1
+ vpaddd xmm15,xmm15,xmm14
+ vmovdqu xmm14,XMMWORD[48+rsp]
+ vaesenc xmm7,xmm7,xmm1
+ mov rbx,QWORD[64+rsp]
+ vaesenc xmm8,xmm8,xmm1
+ vaesenc xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((16-120))+rsi]
+
+ vaesenclast xmm2,xmm2,xmm0
+ vmovdqa XMMWORD[32+rsp],xmm15
+ vpxor xmm15,xmm15,xmm15
+ vaesenclast xmm3,xmm3,xmm0
+ vaesenclast xmm4,xmm4,xmm0
+ vpcmpgtd xmm15,xmm14,xmm15
+ vaesenclast xmm5,xmm5,xmm0
+ vaesenclast xmm6,xmm6,xmm0
+ vpaddd xmm14,xmm14,xmm15
+ vmovdqu xmm15,XMMWORD[((-120))+rsi]
+ vaesenclast xmm7,xmm7,xmm0
+ vaesenclast xmm8,xmm8,xmm0
+ vmovdqa XMMWORD[48+rsp],xmm14
+ vaesenclast xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((32-120))+rsi]
+
+ vmovups XMMWORD[(-16)+r8],xmm2
+ sub r8,rbx
+ vpxor xmm2,xmm2,XMMWORD[rbp]
+ vmovups XMMWORD[(-16)+r9],xmm3
+ sub r9,QWORD[72+rsp]
+ vpxor xmm3,xmm3,XMMWORD[16+rbp]
+ vmovups XMMWORD[(-16)+r10],xmm4
+ sub r10,QWORD[80+rsp]
+ vpxor xmm4,xmm4,XMMWORD[32+rbp]
+ vmovups XMMWORD[(-16)+r11],xmm5
+ sub r11,QWORD[88+rsp]
+ vpxor xmm5,xmm5,XMMWORD[48+rbp]
+ vmovups XMMWORD[(-16)+r12],xmm6
+ sub r12,QWORD[96+rsp]
+ vpxor xmm6,xmm6,xmm10
+ vmovups XMMWORD[(-16)+r13],xmm7
+ sub r13,QWORD[104+rsp]
+ vpxor xmm7,xmm7,xmm11
+ vmovups XMMWORD[(-16)+r14],xmm8
+ sub r14,QWORD[112+rsp]
+ vpxor xmm8,xmm8,xmm12
+ vmovups XMMWORD[(-16)+r15],xmm9
+ sub r15,QWORD[120+rsp]
+ vpxor xmm9,xmm9,xmm13
+
+ dec edx
+ jnz NEAR $L$oop_enc8x
+
+ mov rax,QWORD[16+rsp]
+
+
+
+
+
+
+$L$enc8x_done:
+ vzeroupper
+ movaps xmm6,XMMWORD[((-216))+rax]
+ movaps xmm7,XMMWORD[((-200))+rax]
+ movaps xmm8,XMMWORD[((-184))+rax]
+ movaps xmm9,XMMWORD[((-168))+rax]
+ movaps xmm10,XMMWORD[((-152))+rax]
+ movaps xmm11,XMMWORD[((-136))+rax]
+ movaps xmm12,XMMWORD[((-120))+rax]
+ movaps xmm13,XMMWORD[((-104))+rax]
+ movaps xmm14,XMMWORD[((-88))+rax]
+ movaps xmm15,XMMWORD[((-72))+rax]
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$enc8x_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_multi_cbc_encrypt_avx:
+
+
+ALIGN 32
+aesni_multi_cbc_decrypt_avx:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_multi_cbc_decrypt_avx:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+_avx_cbc_dec_shortcut:
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[(-120)+rax],xmm12
+ movaps XMMWORD[(-104)+rax],xmm13
+ movaps XMMWORD[(-88)+rax],xmm14
+ movaps XMMWORD[(-72)+rax],xmm15
+
+
+
+
+
+
+
+
+
+ sub rsp,256
+ and rsp,-256
+ sub rsp,192
+ mov QWORD[16+rsp],rax
+
+
+$L$dec8x_body:
+ vzeroupper
+ vmovdqu xmm15,XMMWORD[rsi]
+ lea rsi,[120+rsi]
+ lea rdi,[160+rdi]
+ shr edx,1
+
+$L$dec8x_loop_grande:
+
+ xor edx,edx
+
+ mov ecx,DWORD[((-144))+rdi]
+
+ mov r8,QWORD[((-160))+rdi]
+ cmp ecx,edx
+
+ mov rbx,QWORD[((-152))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm2,XMMWORD[((-136))+rdi]
+ mov DWORD[32+rsp],ecx
+ cmovle r8,rsp
+ sub rbx,r8
+ mov QWORD[64+rsp],rbx
+ vmovdqu XMMWORD[192+rsp],xmm2
+
+ mov ecx,DWORD[((-104))+rdi]
+
+ mov r9,QWORD[((-120))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-112))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm3,XMMWORD[((-96))+rdi]
+ mov DWORD[36+rsp],ecx
+ cmovle r9,rsp
+ sub rbp,r9
+ mov QWORD[72+rsp],rbp
+ vmovdqu XMMWORD[208+rsp],xmm3
+
+ mov ecx,DWORD[((-64))+rdi]
+
+ mov r10,QWORD[((-80))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-72))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm4,XMMWORD[((-56))+rdi]
+ mov DWORD[40+rsp],ecx
+ cmovle r10,rsp
+ sub rbp,r10
+ mov QWORD[80+rsp],rbp
+ vmovdqu XMMWORD[224+rsp],xmm4
+
+ mov ecx,DWORD[((-24))+rdi]
+
+ mov r11,QWORD[((-40))+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[((-32))+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm5,XMMWORD[((-16))+rdi]
+ mov DWORD[44+rsp],ecx
+ cmovle r11,rsp
+ sub rbp,r11
+ mov QWORD[88+rsp],rbp
+ vmovdqu XMMWORD[240+rsp],xmm5
+
+ mov ecx,DWORD[16+rdi]
+
+ mov r12,QWORD[rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[8+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm6,XMMWORD[24+rdi]
+ mov DWORD[48+rsp],ecx
+ cmovle r12,rsp
+ sub rbp,r12
+ mov QWORD[96+rsp],rbp
+ vmovdqu XMMWORD[256+rsp],xmm6
+
+ mov ecx,DWORD[56+rdi]
+
+ mov r13,QWORD[40+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[48+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm7,XMMWORD[64+rdi]
+ mov DWORD[52+rsp],ecx
+ cmovle r13,rsp
+ sub rbp,r13
+ mov QWORD[104+rsp],rbp
+ vmovdqu XMMWORD[272+rsp],xmm7
+
+ mov ecx,DWORD[96+rdi]
+
+ mov r14,QWORD[80+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[88+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm8,XMMWORD[104+rdi]
+ mov DWORD[56+rsp],ecx
+ cmovle r14,rsp
+ sub rbp,r14
+ mov QWORD[112+rsp],rbp
+ vmovdqu XMMWORD[288+rsp],xmm8
+
+ mov ecx,DWORD[136+rdi]
+
+ mov r15,QWORD[120+rdi]
+ cmp ecx,edx
+
+ mov rbp,QWORD[128+rdi]
+ cmovg edx,ecx
+ test ecx,ecx
+
+ vmovdqu xmm9,XMMWORD[144+rdi]
+ mov DWORD[60+rsp],ecx
+ cmovle r15,rsp
+ sub rbp,r15
+ mov QWORD[120+rsp],rbp
+ vmovdqu XMMWORD[304+rsp],xmm9
+ test edx,edx
+ jz NEAR $L$dec8x_done
+
+ vmovups xmm1,XMMWORD[((16-120))+rsi]
+ vmovups xmm0,XMMWORD[((32-120))+rsi]
+ mov eax,DWORD[((240-120))+rsi]
+ lea rbp,[((192+128))+rsp]
+
+ vmovdqu xmm2,XMMWORD[r8]
+ vmovdqu xmm3,XMMWORD[r9]
+ vmovdqu xmm4,XMMWORD[r10]
+ vmovdqu xmm5,XMMWORD[r11]
+ vmovdqu xmm6,XMMWORD[r12]
+ vmovdqu xmm7,XMMWORD[r13]
+ vmovdqu xmm8,XMMWORD[r14]
+ vmovdqu xmm9,XMMWORD[r15]
+ vmovdqu XMMWORD[rbp],xmm2
+ vpxor xmm2,xmm2,xmm15
+ vmovdqu XMMWORD[16+rbp],xmm3
+ vpxor xmm3,xmm3,xmm15
+ vmovdqu XMMWORD[32+rbp],xmm4
+ vpxor xmm4,xmm4,xmm15
+ vmovdqu XMMWORD[48+rbp],xmm5
+ vpxor xmm5,xmm5,xmm15
+ vmovdqu XMMWORD[64+rbp],xmm6
+ vpxor xmm6,xmm6,xmm15
+ vmovdqu XMMWORD[80+rbp],xmm7
+ vpxor xmm7,xmm7,xmm15
+ vmovdqu XMMWORD[96+rbp],xmm8
+ vpxor xmm8,xmm8,xmm15
+ vmovdqu XMMWORD[112+rbp],xmm9
+ vpxor xmm9,xmm9,xmm15
+ xor rbp,0x80
+ mov ecx,1
+ jmp NEAR $L$oop_dec8x
+
+ALIGN 32
+$L$oop_dec8x:
+ vaesdec xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+0))+rsp]
+ vaesdec xmm3,xmm3,xmm1
+ prefetcht0 [31+r8]
+ vaesdec xmm4,xmm4,xmm1
+ vaesdec xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r8]
+ cmovge r8,rsp
+ vaesdec xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm1
+ sub rbx,r8
+ vaesdec xmm8,xmm8,xmm1
+ vmovdqu xmm10,XMMWORD[16+r8]
+ mov QWORD[((64+0))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-72))+rsi]
+ lea r8,[16+rbx*1+r8]
+ vmovdqu XMMWORD[128+rsp],xmm10
+ vaesdec xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+4))+rsp]
+ mov rbx,QWORD[((64+8))+rsp]
+ vaesdec xmm3,xmm3,xmm0
+ prefetcht0 [31+r9]
+ vaesdec xmm4,xmm4,xmm0
+ vaesdec xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r9]
+ cmovge r9,rsp
+ vaesdec xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm0
+ sub rbx,r9
+ vaesdec xmm8,xmm8,xmm0
+ vmovdqu xmm11,XMMWORD[16+r9]
+ mov QWORD[((64+8))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((-56))+rsi]
+ lea r9,[16+rbx*1+r9]
+ vmovdqu XMMWORD[144+rsp],xmm11
+ vaesdec xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+8))+rsp]
+ mov rbx,QWORD[((64+16))+rsp]
+ vaesdec xmm3,xmm3,xmm1
+ prefetcht0 [31+r10]
+ vaesdec xmm4,xmm4,xmm1
+ prefetcht0 [15+r8]
+ vaesdec xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r10]
+ cmovge r10,rsp
+ vaesdec xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm1
+ sub rbx,r10
+ vaesdec xmm8,xmm8,xmm1
+ vmovdqu xmm12,XMMWORD[16+r10]
+ mov QWORD[((64+16))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-40))+rsi]
+ lea r10,[16+rbx*1+r10]
+ vmovdqu XMMWORD[160+rsp],xmm12
+ vaesdec xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+12))+rsp]
+ mov rbx,QWORD[((64+24))+rsp]
+ vaesdec xmm3,xmm3,xmm0
+ prefetcht0 [31+r11]
+ vaesdec xmm4,xmm4,xmm0
+ prefetcht0 [15+r9]
+ vaesdec xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r11]
+ cmovge r11,rsp
+ vaesdec xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm0
+ sub rbx,r11
+ vaesdec xmm8,xmm8,xmm0
+ vmovdqu xmm13,XMMWORD[16+r11]
+ mov QWORD[((64+24))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((-24))+rsi]
+ lea r11,[16+rbx*1+r11]
+ vmovdqu XMMWORD[176+rsp],xmm13
+ vaesdec xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+16))+rsp]
+ mov rbx,QWORD[((64+32))+rsp]
+ vaesdec xmm3,xmm3,xmm1
+ prefetcht0 [31+r12]
+ vaesdec xmm4,xmm4,xmm1
+ prefetcht0 [15+r10]
+ vaesdec xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r12]
+ cmovge r12,rsp
+ vaesdec xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm1
+ sub rbx,r12
+ vaesdec xmm8,xmm8,xmm1
+ vmovdqu xmm10,XMMWORD[16+r12]
+ mov QWORD[((64+32))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((-8))+rsi]
+ lea r12,[16+rbx*1+r12]
+ vaesdec xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+20))+rsp]
+ mov rbx,QWORD[((64+40))+rsp]
+ vaesdec xmm3,xmm3,xmm0
+ prefetcht0 [31+r13]
+ vaesdec xmm4,xmm4,xmm0
+ prefetcht0 [15+r11]
+ vaesdec xmm5,xmm5,xmm0
+ lea rbx,[r13*1+rbx]
+ cmovge r13,rsp
+ vaesdec xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm0
+ sub rbx,r13
+ vaesdec xmm8,xmm8,xmm0
+ vmovdqu xmm11,XMMWORD[16+r13]
+ mov QWORD[((64+40))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[8+rsi]
+ lea r13,[16+rbx*1+r13]
+ vaesdec xmm2,xmm2,xmm1
+ cmp ecx,DWORD[((32+24))+rsp]
+ mov rbx,QWORD[((64+48))+rsp]
+ vaesdec xmm3,xmm3,xmm1
+ prefetcht0 [31+r14]
+ vaesdec xmm4,xmm4,xmm1
+ prefetcht0 [15+r12]
+ vaesdec xmm5,xmm5,xmm1
+ lea rbx,[rbx*1+r14]
+ cmovge r14,rsp
+ vaesdec xmm6,xmm6,xmm1
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm1
+ sub rbx,r14
+ vaesdec xmm8,xmm8,xmm1
+ vmovdqu xmm12,XMMWORD[16+r14]
+ mov QWORD[((64+48))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[24+rsi]
+ lea r14,[16+rbx*1+r14]
+ vaesdec xmm2,xmm2,xmm0
+ cmp ecx,DWORD[((32+28))+rsp]
+ mov rbx,QWORD[((64+56))+rsp]
+ vaesdec xmm3,xmm3,xmm0
+ prefetcht0 [31+r15]
+ vaesdec xmm4,xmm4,xmm0
+ prefetcht0 [15+r13]
+ vaesdec xmm5,xmm5,xmm0
+ lea rbx,[rbx*1+r15]
+ cmovge r15,rsp
+ vaesdec xmm6,xmm6,xmm0
+ cmovg rbx,rsp
+ vaesdec xmm7,xmm7,xmm0
+ sub rbx,r15
+ vaesdec xmm8,xmm8,xmm0
+ vmovdqu xmm13,XMMWORD[16+r15]
+ mov QWORD[((64+56))+rsp],rbx
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[40+rsi]
+ lea r15,[16+rbx*1+r15]
+ vmovdqu xmm14,XMMWORD[32+rsp]
+ prefetcht0 [15+r14]
+ prefetcht0 [15+r15]
+ cmp eax,11
+ jb NEAR $L$dec8x_tail
+
+ vaesdec xmm2,xmm2,xmm1
+ vaesdec xmm3,xmm3,xmm1
+ vaesdec xmm4,xmm4,xmm1
+ vaesdec xmm5,xmm5,xmm1
+ vaesdec xmm6,xmm6,xmm1
+ vaesdec xmm7,xmm7,xmm1
+ vaesdec xmm8,xmm8,xmm1
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((176-120))+rsi]
+
+ vaesdec xmm2,xmm2,xmm0
+ vaesdec xmm3,xmm3,xmm0
+ vaesdec xmm4,xmm4,xmm0
+ vaesdec xmm5,xmm5,xmm0
+ vaesdec xmm6,xmm6,xmm0
+ vaesdec xmm7,xmm7,xmm0
+ vaesdec xmm8,xmm8,xmm0
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((192-120))+rsi]
+ je NEAR $L$dec8x_tail
+
+ vaesdec xmm2,xmm2,xmm1
+ vaesdec xmm3,xmm3,xmm1
+ vaesdec xmm4,xmm4,xmm1
+ vaesdec xmm5,xmm5,xmm1
+ vaesdec xmm6,xmm6,xmm1
+ vaesdec xmm7,xmm7,xmm1
+ vaesdec xmm8,xmm8,xmm1
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((208-120))+rsi]
+
+ vaesdec xmm2,xmm2,xmm0
+ vaesdec xmm3,xmm3,xmm0
+ vaesdec xmm4,xmm4,xmm0
+ vaesdec xmm5,xmm5,xmm0
+ vaesdec xmm6,xmm6,xmm0
+ vaesdec xmm7,xmm7,xmm0
+ vaesdec xmm8,xmm8,xmm0
+ vaesdec xmm9,xmm9,xmm0
+ vmovups xmm0,XMMWORD[((224-120))+rsi]
+
+$L$dec8x_tail:
+ vaesdec xmm2,xmm2,xmm1
+ vpxor xmm15,xmm15,xmm15
+ vaesdec xmm3,xmm3,xmm1
+ vaesdec xmm4,xmm4,xmm1
+ vpcmpgtd xmm15,xmm14,xmm15
+ vaesdec xmm5,xmm5,xmm1
+ vaesdec xmm6,xmm6,xmm1
+ vpaddd xmm15,xmm15,xmm14
+ vmovdqu xmm14,XMMWORD[48+rsp]
+ vaesdec xmm7,xmm7,xmm1
+ mov rbx,QWORD[64+rsp]
+ vaesdec xmm8,xmm8,xmm1
+ vaesdec xmm9,xmm9,xmm1
+ vmovups xmm1,XMMWORD[((16-120))+rsi]
+
+ vaesdeclast xmm2,xmm2,xmm0
+ vmovdqa XMMWORD[32+rsp],xmm15
+ vpxor xmm15,xmm15,xmm15
+ vaesdeclast xmm3,xmm3,xmm0
+ vpxor xmm2,xmm2,XMMWORD[rbp]
+ vaesdeclast xmm4,xmm4,xmm0
+ vpxor xmm3,xmm3,XMMWORD[16+rbp]
+ vpcmpgtd xmm15,xmm14,xmm15
+ vaesdeclast xmm5,xmm5,xmm0
+ vpxor xmm4,xmm4,XMMWORD[32+rbp]
+ vaesdeclast xmm6,xmm6,xmm0
+ vpxor xmm5,xmm5,XMMWORD[48+rbp]
+ vpaddd xmm14,xmm14,xmm15
+ vmovdqu xmm15,XMMWORD[((-120))+rsi]
+ vaesdeclast xmm7,xmm7,xmm0
+ vpxor xmm6,xmm6,XMMWORD[64+rbp]
+ vaesdeclast xmm8,xmm8,xmm0
+ vpxor xmm7,xmm7,XMMWORD[80+rbp]
+ vmovdqa XMMWORD[48+rsp],xmm14
+ vaesdeclast xmm9,xmm9,xmm0
+ vpxor xmm8,xmm8,XMMWORD[96+rbp]
+ vmovups xmm0,XMMWORD[((32-120))+rsi]
+
+ vmovups XMMWORD[(-16)+r8],xmm2
+ sub r8,rbx
+ vmovdqu xmm2,XMMWORD[((128+0))+rsp]
+ vpxor xmm9,xmm9,XMMWORD[112+rbp]
+ vmovups XMMWORD[(-16)+r9],xmm3
+ sub r9,QWORD[72+rsp]
+ vmovdqu XMMWORD[rbp],xmm2
+ vpxor xmm2,xmm2,xmm15
+ vmovdqu xmm3,XMMWORD[((128+16))+rsp]
+ vmovups XMMWORD[(-16)+r10],xmm4
+ sub r10,QWORD[80+rsp]
+ vmovdqu XMMWORD[16+rbp],xmm3
+ vpxor xmm3,xmm3,xmm15
+ vmovdqu xmm4,XMMWORD[((128+32))+rsp]
+ vmovups XMMWORD[(-16)+r11],xmm5
+ sub r11,QWORD[88+rsp]
+ vmovdqu XMMWORD[32+rbp],xmm4
+ vpxor xmm4,xmm4,xmm15
+ vmovdqu xmm5,XMMWORD[((128+48))+rsp]
+ vmovups XMMWORD[(-16)+r12],xmm6
+ sub r12,QWORD[96+rsp]
+ vmovdqu XMMWORD[48+rbp],xmm5
+ vpxor xmm5,xmm5,xmm15
+ vmovdqu XMMWORD[64+rbp],xmm10
+ vpxor xmm6,xmm15,xmm10
+ vmovups XMMWORD[(-16)+r13],xmm7
+ sub r13,QWORD[104+rsp]
+ vmovdqu XMMWORD[80+rbp],xmm11
+ vpxor xmm7,xmm15,xmm11
+ vmovups XMMWORD[(-16)+r14],xmm8
+ sub r14,QWORD[112+rsp]
+ vmovdqu XMMWORD[96+rbp],xmm12
+ vpxor xmm8,xmm15,xmm12
+ vmovups XMMWORD[(-16)+r15],xmm9
+ sub r15,QWORD[120+rsp]
+ vmovdqu XMMWORD[112+rbp],xmm13
+ vpxor xmm9,xmm15,xmm13
+
+ xor rbp,128
+ dec edx
+ jnz NEAR $L$oop_dec8x
+
+ mov rax,QWORD[16+rsp]
+
+
+
+
+
+
+$L$dec8x_done:
+ vzeroupper
+ movaps xmm6,XMMWORD[((-216))+rax]
+ movaps xmm7,XMMWORD[((-200))+rax]
+ movaps xmm8,XMMWORD[((-184))+rax]
+ movaps xmm9,XMMWORD[((-168))+rax]
+ movaps xmm10,XMMWORD[((-152))+rax]
+ movaps xmm11,XMMWORD[((-136))+rax]
+ movaps xmm12,XMMWORD[((-120))+rax]
+ movaps xmm13,XMMWORD[((-104))+rax]
+ movaps xmm14,XMMWORD[((-88))+rax]
+ movaps xmm15,XMMWORD[((-72))+rax]
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$dec8x_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_multi_cbc_decrypt_avx:
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$in_prologue
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_prologue
+
+ mov rax,QWORD[16+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+ mov rbp,QWORD[((-16))+rax]
+ mov r12,QWORD[((-24))+rax]
+ mov r13,QWORD[((-32))+rax]
+ mov r14,QWORD[((-40))+rax]
+ mov r15,QWORD[((-48))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+ lea rsi,[((-56-160))+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+
+$L$in_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$SEH_begin_aesni_multi_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_multi_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_info_aesni_multi_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_begin_aesni_multi_cbc_decrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_multi_cbc_decrypt wrt ..imagebase
+ DD $L$SEH_info_aesni_multi_cbc_decrypt wrt ..imagebase
+ DD $L$SEH_begin_aesni_multi_cbc_encrypt_avx wrt ..imagebase
+ DD $L$SEH_end_aesni_multi_cbc_encrypt_avx wrt ..imagebase
+ DD $L$SEH_info_aesni_multi_cbc_encrypt_avx wrt ..imagebase
+ DD $L$SEH_begin_aesni_multi_cbc_decrypt_avx wrt ..imagebase
+ DD $L$SEH_end_aesni_multi_cbc_decrypt_avx wrt ..imagebase
+ DD $L$SEH_info_aesni_multi_cbc_decrypt_avx wrt ..imagebase
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_aesni_multi_cbc_encrypt:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$enc4x_body wrt ..imagebase,$L$enc4x_epilogue wrt ..imagebase
+$L$SEH_info_aesni_multi_cbc_decrypt:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$dec4x_body wrt ..imagebase,$L$dec4x_epilogue wrt ..imagebase
+$L$SEH_info_aesni_multi_cbc_encrypt_avx:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$enc8x_body wrt ..imagebase,$L$enc8x_epilogue wrt ..imagebase
+$L$SEH_info_aesni_multi_cbc_decrypt_avx:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$dec8x_body wrt ..imagebase,$L$dec8x_epilogue wrt ..imagebase
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha1-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha1-x86_64.nasm
new file mode 100644
index 0000000000..f4ed3f7084
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha1-x86_64.nasm
@@ -0,0 +1,3268 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+EXTERN OPENSSL_ia32cap_P
+
+global aesni_cbc_sha1_enc
+
+ALIGN 32
+aesni_cbc_sha1_enc:
+
+
+ mov r10d,DWORD[((OPENSSL_ia32cap_P+0))]
+ mov r11,QWORD[((OPENSSL_ia32cap_P+4))]
+ bt r11,61
+ jc NEAR aesni_cbc_sha1_enc_shaext
+ and r11d,268435456
+ and r10d,1073741824
+ or r10d,r11d
+ cmp r10d,1342177280
+ je NEAR aesni_cbc_sha1_enc_avx
+ jmp NEAR aesni_cbc_sha1_enc_ssse3
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 32
+aesni_cbc_sha1_enc_ssse3:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha1_enc_ssse3:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+ mov r10,QWORD[56+rsp]
+
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-264))+rsp]
+
+
+
+ movaps XMMWORD[(96+0)+rsp],xmm6
+ movaps XMMWORD[(96+16)+rsp],xmm7
+ movaps XMMWORD[(96+32)+rsp],xmm8
+ movaps XMMWORD[(96+48)+rsp],xmm9
+ movaps XMMWORD[(96+64)+rsp],xmm10
+ movaps XMMWORD[(96+80)+rsp],xmm11
+ movaps XMMWORD[(96+96)+rsp],xmm12
+ movaps XMMWORD[(96+112)+rsp],xmm13
+ movaps XMMWORD[(96+128)+rsp],xmm14
+ movaps XMMWORD[(96+144)+rsp],xmm15
+$L$prologue_ssse3:
+ mov r12,rdi
+ mov r13,rsi
+ mov r14,rdx
+ lea r15,[112+rcx]
+ movdqu xmm2,XMMWORD[r8]
+ mov QWORD[88+rsp],r8
+ shl r14,6
+ sub r13,r12
+ mov r8d,DWORD[((240-112))+r15]
+ add r14,r10
+
+ lea r11,[K_XX_XX]
+ mov eax,DWORD[r9]
+ mov ebx,DWORD[4+r9]
+ mov ecx,DWORD[8+r9]
+ mov edx,DWORD[12+r9]
+ mov esi,ebx
+ mov ebp,DWORD[16+r9]
+ mov edi,ecx
+ xor edi,edx
+ and esi,edi
+
+ movdqa xmm3,XMMWORD[64+r11]
+ movdqa xmm13,XMMWORD[r11]
+ movdqu xmm4,XMMWORD[r10]
+ movdqu xmm5,XMMWORD[16+r10]
+ movdqu xmm6,XMMWORD[32+r10]
+ movdqu xmm7,XMMWORD[48+r10]
+DB 102,15,56,0,227
+DB 102,15,56,0,235
+DB 102,15,56,0,243
+ add r10,64
+ paddd xmm4,xmm13
+DB 102,15,56,0,251
+ paddd xmm5,xmm13
+ paddd xmm6,xmm13
+ movdqa XMMWORD[rsp],xmm4
+ psubd xmm4,xmm13
+ movdqa XMMWORD[16+rsp],xmm5
+ psubd xmm5,xmm13
+ movdqa XMMWORD[32+rsp],xmm6
+ psubd xmm6,xmm13
+ movups xmm15,XMMWORD[((-112))+r15]
+ movups xmm0,XMMWORD[((16-112))+r15]
+ jmp NEAR $L$oop_ssse3
+ALIGN 32
+$L$oop_ssse3:
+ ror ebx,2
+ movups xmm14,XMMWORD[r12]
+ xorps xmm14,xmm15
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+r15]
+DB 102,15,56,220,208
+ pshufd xmm8,xmm4,238
+ xor esi,edx
+ movdqa xmm12,xmm7
+ paddd xmm13,xmm7
+ mov edi,eax
+ add ebp,DWORD[rsp]
+ punpcklqdq xmm8,xmm5
+ xor ebx,ecx
+ rol eax,5
+ add ebp,esi
+ psrldq xmm12,4
+ and edi,ebx
+ xor ebx,ecx
+ pxor xmm8,xmm4
+ add ebp,eax
+ ror eax,7
+ pxor xmm12,xmm6
+ xor edi,ecx
+ mov esi,ebp
+ add edx,DWORD[4+rsp]
+ pxor xmm8,xmm12
+ xor eax,ebx
+ rol ebp,5
+ movdqa XMMWORD[48+rsp],xmm13
+ add edx,edi
+ movups xmm0,XMMWORD[((-64))+r15]
+DB 102,15,56,220,209
+ and esi,eax
+ movdqa xmm3,xmm8
+ xor eax,ebx
+ add edx,ebp
+ ror ebp,7
+ movdqa xmm12,xmm8
+ xor esi,ebx
+ pslldq xmm3,12
+ paddd xmm8,xmm8
+ mov edi,edx
+ add ecx,DWORD[8+rsp]
+ psrld xmm12,31
+ xor ebp,eax
+ rol edx,5
+ add ecx,esi
+ movdqa xmm13,xmm3
+ and edi,ebp
+ xor ebp,eax
+ psrld xmm3,30
+ add ecx,edx
+ ror edx,7
+ por xmm8,xmm12
+ xor edi,eax
+ mov esi,ecx
+ add ebx,DWORD[12+rsp]
+ movups xmm1,XMMWORD[((-48))+r15]
+DB 102,15,56,220,208
+ pslld xmm13,2
+ pxor xmm8,xmm3
+ xor edx,ebp
+ movdqa xmm3,XMMWORD[r11]
+ rol ecx,5
+ add ebx,edi
+ and esi,edx
+ pxor xmm8,xmm13
+ xor edx,ebp
+ add ebx,ecx
+ ror ecx,7
+ pshufd xmm9,xmm5,238
+ xor esi,ebp
+ movdqa xmm13,xmm8
+ paddd xmm3,xmm8
+ mov edi,ebx
+ add eax,DWORD[16+rsp]
+ punpcklqdq xmm9,xmm6
+ xor ecx,edx
+ rol ebx,5
+ add eax,esi
+ psrldq xmm13,4
+ and edi,ecx
+ xor ecx,edx
+ pxor xmm9,xmm5
+ add eax,ebx
+ ror ebx,7
+ movups xmm0,XMMWORD[((-32))+r15]
+DB 102,15,56,220,209
+ pxor xmm13,xmm7
+ xor edi,edx
+ mov esi,eax
+ add ebp,DWORD[20+rsp]
+ pxor xmm9,xmm13
+ xor ebx,ecx
+ rol eax,5
+ movdqa XMMWORD[rsp],xmm3
+ add ebp,edi
+ and esi,ebx
+ movdqa xmm12,xmm9
+ xor ebx,ecx
+ add ebp,eax
+ ror eax,7
+ movdqa xmm13,xmm9
+ xor esi,ecx
+ pslldq xmm12,12
+ paddd xmm9,xmm9
+ mov edi,ebp
+ add edx,DWORD[24+rsp]
+ psrld xmm13,31
+ xor eax,ebx
+ rol ebp,5
+ add edx,esi
+ movups xmm1,XMMWORD[((-16))+r15]
+DB 102,15,56,220,208
+ movdqa xmm3,xmm12
+ and edi,eax
+ xor eax,ebx
+ psrld xmm12,30
+ add edx,ebp
+ ror ebp,7
+ por xmm9,xmm13
+ xor edi,ebx
+ mov esi,edx
+ add ecx,DWORD[28+rsp]
+ pslld xmm3,2
+ pxor xmm9,xmm12
+ xor ebp,eax
+ movdqa xmm12,XMMWORD[16+r11]
+ rol edx,5
+ add ecx,edi
+ and esi,ebp
+ pxor xmm9,xmm3
+ xor ebp,eax
+ add ecx,edx
+ ror edx,7
+ pshufd xmm10,xmm6,238
+ xor esi,eax
+ movdqa xmm3,xmm9
+ paddd xmm12,xmm9
+ mov edi,ecx
+ add ebx,DWORD[32+rsp]
+ movups xmm0,XMMWORD[r15]
+DB 102,15,56,220,209
+ punpcklqdq xmm10,xmm7
+ xor edx,ebp
+ rol ecx,5
+ add ebx,esi
+ psrldq xmm3,4
+ and edi,edx
+ xor edx,ebp
+ pxor xmm10,xmm6
+ add ebx,ecx
+ ror ecx,7
+ pxor xmm3,xmm8
+ xor edi,ebp
+ mov esi,ebx
+ add eax,DWORD[36+rsp]
+ pxor xmm10,xmm3
+ xor ecx,edx
+ rol ebx,5
+ movdqa XMMWORD[16+rsp],xmm12
+ add eax,edi
+ and esi,ecx
+ movdqa xmm13,xmm10
+ xor ecx,edx
+ add eax,ebx
+ ror ebx,7
+ movups xmm1,XMMWORD[16+r15]
+DB 102,15,56,220,208
+ movdqa xmm3,xmm10
+ xor esi,edx
+ pslldq xmm13,12
+ paddd xmm10,xmm10
+ mov edi,eax
+ add ebp,DWORD[40+rsp]
+ psrld xmm3,31
+ xor ebx,ecx
+ rol eax,5
+ add ebp,esi
+ movdqa xmm12,xmm13
+ and edi,ebx
+ xor ebx,ecx
+ psrld xmm13,30
+ add ebp,eax
+ ror eax,7
+ por xmm10,xmm3
+ xor edi,ecx
+ mov esi,ebp
+ add edx,DWORD[44+rsp]
+ pslld xmm12,2
+ pxor xmm10,xmm13
+ xor eax,ebx
+ movdqa xmm13,XMMWORD[16+r11]
+ rol ebp,5
+ add edx,edi
+ movups xmm0,XMMWORD[32+r15]
+DB 102,15,56,220,209
+ and esi,eax
+ pxor xmm10,xmm12
+ xor eax,ebx
+ add edx,ebp
+ ror ebp,7
+ pshufd xmm11,xmm7,238
+ xor esi,ebx
+ movdqa xmm12,xmm10
+ paddd xmm13,xmm10
+ mov edi,edx
+ add ecx,DWORD[48+rsp]
+ punpcklqdq xmm11,xmm8
+ xor ebp,eax
+ rol edx,5
+ add ecx,esi
+ psrldq xmm12,4
+ and edi,ebp
+ xor ebp,eax
+ pxor xmm11,xmm7
+ add ecx,edx
+ ror edx,7
+ pxor xmm12,xmm9
+ xor edi,eax
+ mov esi,ecx
+ add ebx,DWORD[52+rsp]
+ movups xmm1,XMMWORD[48+r15]
+DB 102,15,56,220,208
+ pxor xmm11,xmm12
+ xor edx,ebp
+ rol ecx,5
+ movdqa XMMWORD[32+rsp],xmm13
+ add ebx,edi
+ and esi,edx
+ movdqa xmm3,xmm11
+ xor edx,ebp
+ add ebx,ecx
+ ror ecx,7
+ movdqa xmm12,xmm11
+ xor esi,ebp
+ pslldq xmm3,12
+ paddd xmm11,xmm11
+ mov edi,ebx
+ add eax,DWORD[56+rsp]
+ psrld xmm12,31
+ xor ecx,edx
+ rol ebx,5
+ add eax,esi
+ movdqa xmm13,xmm3
+ and edi,ecx
+ xor ecx,edx
+ psrld xmm3,30
+ add eax,ebx
+ ror ebx,7
+ cmp r8d,11
+ jb NEAR $L$aesenclast1
+ movups xmm0,XMMWORD[64+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+r15]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast1
+ movups xmm0,XMMWORD[96+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+r15]
+DB 102,15,56,220,208
+$L$aesenclast1:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+r15]
+ por xmm11,xmm12
+ xor edi,edx
+ mov esi,eax
+ add ebp,DWORD[60+rsp]
+ pslld xmm13,2
+ pxor xmm11,xmm3
+ xor ebx,ecx
+ movdqa xmm3,XMMWORD[16+r11]
+ rol eax,5
+ add ebp,edi
+ and esi,ebx
+ pxor xmm11,xmm13
+ pshufd xmm13,xmm10,238
+ xor ebx,ecx
+ add ebp,eax
+ ror eax,7
+ pxor xmm4,xmm8
+ xor esi,ecx
+ mov edi,ebp
+ add edx,DWORD[rsp]
+ punpcklqdq xmm13,xmm11
+ xor eax,ebx
+ rol ebp,5
+ pxor xmm4,xmm5
+ add edx,esi
+ movups xmm14,XMMWORD[16+r12]
+ xorps xmm14,xmm15
+ movups XMMWORD[r13*1+r12],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+r15]
+DB 102,15,56,220,208
+ and edi,eax
+ movdqa xmm12,xmm3
+ xor eax,ebx
+ paddd xmm3,xmm11
+ add edx,ebp
+ pxor xmm4,xmm13
+ ror ebp,7
+ xor edi,ebx
+ mov esi,edx
+ add ecx,DWORD[4+rsp]
+ movdqa xmm13,xmm4
+ xor ebp,eax
+ rol edx,5
+ movdqa XMMWORD[48+rsp],xmm3
+ add ecx,edi
+ and esi,ebp
+ xor ebp,eax
+ pslld xmm4,2
+ add ecx,edx
+ ror edx,7
+ psrld xmm13,30
+ xor esi,eax
+ mov edi,ecx
+ add ebx,DWORD[8+rsp]
+ movups xmm0,XMMWORD[((-64))+r15]
+DB 102,15,56,220,209
+ por xmm4,xmm13
+ xor edx,ebp
+ rol ecx,5
+ pshufd xmm3,xmm11,238
+ add ebx,esi
+ and edi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[12+rsp]
+ xor edi,ebp
+ mov esi,ebx
+ rol ebx,5
+ add eax,edi
+ xor esi,edx
+ ror ecx,7
+ add eax,ebx
+ pxor xmm5,xmm9
+ add ebp,DWORD[16+rsp]
+ movups xmm1,XMMWORD[((-48))+r15]
+DB 102,15,56,220,208
+ xor esi,ecx
+ punpcklqdq xmm3,xmm4
+ mov edi,eax
+ rol eax,5
+ pxor xmm5,xmm6
+ add ebp,esi
+ xor edi,ecx
+ movdqa xmm13,xmm12
+ ror ebx,7
+ paddd xmm12,xmm4
+ add ebp,eax
+ pxor xmm5,xmm3
+ add edx,DWORD[20+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ movdqa xmm3,xmm5
+ add edx,edi
+ xor esi,ebx
+ movdqa XMMWORD[rsp],xmm12
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[24+rsp]
+ pslld xmm5,2
+ xor esi,eax
+ mov edi,edx
+ psrld xmm3,30
+ rol edx,5
+ add ecx,esi
+ movups xmm0,XMMWORD[((-32))+r15]
+DB 102,15,56,220,209
+ xor edi,eax
+ ror ebp,7
+ por xmm5,xmm3
+ add ecx,edx
+ add ebx,DWORD[28+rsp]
+ pshufd xmm12,xmm4,238
+ xor edi,ebp
+ mov esi,ecx
+ rol ecx,5
+ add ebx,edi
+ xor esi,ebp
+ ror edx,7
+ add ebx,ecx
+ pxor xmm6,xmm10
+ add eax,DWORD[32+rsp]
+ xor esi,edx
+ punpcklqdq xmm12,xmm5
+ mov edi,ebx
+ rol ebx,5
+ pxor xmm6,xmm7
+ add eax,esi
+ xor edi,edx
+ movdqa xmm3,XMMWORD[32+r11]
+ ror ecx,7
+ paddd xmm13,xmm5
+ add eax,ebx
+ pxor xmm6,xmm12
+ add ebp,DWORD[36+rsp]
+ movups xmm1,XMMWORD[((-16))+r15]
+DB 102,15,56,220,208
+ xor edi,ecx
+ mov esi,eax
+ rol eax,5
+ movdqa xmm12,xmm6
+ add ebp,edi
+ xor esi,ecx
+ movdqa XMMWORD[16+rsp],xmm13
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[40+rsp]
+ pslld xmm6,2
+ xor esi,ebx
+ mov edi,ebp
+ psrld xmm12,30
+ rol ebp,5
+ add edx,esi
+ xor edi,ebx
+ ror eax,7
+ por xmm6,xmm12
+ add edx,ebp
+ add ecx,DWORD[44+rsp]
+ pshufd xmm13,xmm5,238
+ xor edi,eax
+ mov esi,edx
+ rol edx,5
+ add ecx,edi
+ movups xmm0,XMMWORD[r15]
+DB 102,15,56,220,209
+ xor esi,eax
+ ror ebp,7
+ add ecx,edx
+ pxor xmm7,xmm11
+ add ebx,DWORD[48+rsp]
+ xor esi,ebp
+ punpcklqdq xmm13,xmm6
+ mov edi,ecx
+ rol ecx,5
+ pxor xmm7,xmm8
+ add ebx,esi
+ xor edi,ebp
+ movdqa xmm12,xmm3
+ ror edx,7
+ paddd xmm3,xmm6
+ add ebx,ecx
+ pxor xmm7,xmm13
+ add eax,DWORD[52+rsp]
+ xor edi,edx
+ mov esi,ebx
+ rol ebx,5
+ movdqa xmm13,xmm7
+ add eax,edi
+ xor esi,edx
+ movdqa XMMWORD[32+rsp],xmm3
+ ror ecx,7
+ add eax,ebx
+ add ebp,DWORD[56+rsp]
+ movups xmm1,XMMWORD[16+r15]
+DB 102,15,56,220,208
+ pslld xmm7,2
+ xor esi,ecx
+ mov edi,eax
+ psrld xmm13,30
+ rol eax,5
+ add ebp,esi
+ xor edi,ecx
+ ror ebx,7
+ por xmm7,xmm13
+ add ebp,eax
+ add edx,DWORD[60+rsp]
+ pshufd xmm3,xmm6,238
+ xor edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ add edx,edi
+ xor esi,ebx
+ ror eax,7
+ add edx,ebp
+ pxor xmm8,xmm4
+ add ecx,DWORD[rsp]
+ xor esi,eax
+ punpcklqdq xmm3,xmm7
+ mov edi,edx
+ rol edx,5
+ pxor xmm8,xmm9
+ add ecx,esi
+ movups xmm0,XMMWORD[32+r15]
+DB 102,15,56,220,209
+ xor edi,eax
+ movdqa xmm13,xmm12
+ ror ebp,7
+ paddd xmm12,xmm7
+ add ecx,edx
+ pxor xmm8,xmm3
+ add ebx,DWORD[4+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ rol ecx,5
+ movdqa xmm3,xmm8
+ add ebx,edi
+ xor esi,ebp
+ movdqa XMMWORD[48+rsp],xmm12
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[8+rsp]
+ pslld xmm8,2
+ xor esi,edx
+ mov edi,ebx
+ psrld xmm3,30
+ rol ebx,5
+ add eax,esi
+ xor edi,edx
+ ror ecx,7
+ por xmm8,xmm3
+ add eax,ebx
+ add ebp,DWORD[12+rsp]
+ movups xmm1,XMMWORD[48+r15]
+DB 102,15,56,220,208
+ pshufd xmm12,xmm7,238
+ xor edi,ecx
+ mov esi,eax
+ rol eax,5
+ add ebp,edi
+ xor esi,ecx
+ ror ebx,7
+ add ebp,eax
+ pxor xmm9,xmm5
+ add edx,DWORD[16+rsp]
+ xor esi,ebx
+ punpcklqdq xmm12,xmm8
+ mov edi,ebp
+ rol ebp,5
+ pxor xmm9,xmm10
+ add edx,esi
+ xor edi,ebx
+ movdqa xmm3,xmm13
+ ror eax,7
+ paddd xmm13,xmm8
+ add edx,ebp
+ pxor xmm9,xmm12
+ add ecx,DWORD[20+rsp]
+ xor edi,eax
+ mov esi,edx
+ rol edx,5
+ movdqa xmm12,xmm9
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$aesenclast2
+ movups xmm0,XMMWORD[64+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+r15]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast2
+ movups xmm0,XMMWORD[96+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+r15]
+DB 102,15,56,220,208
+$L$aesenclast2:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ movdqa XMMWORD[rsp],xmm13
+ ror ebp,7
+ add ecx,edx
+ add ebx,DWORD[24+rsp]
+ pslld xmm9,2
+ xor esi,ebp
+ mov edi,ecx
+ psrld xmm12,30
+ rol ecx,5
+ add ebx,esi
+ xor edi,ebp
+ ror edx,7
+ por xmm9,xmm12
+ add ebx,ecx
+ add eax,DWORD[28+rsp]
+ pshufd xmm13,xmm8,238
+ ror ecx,7
+ mov esi,ebx
+ xor edi,edx
+ rol ebx,5
+ add eax,edi
+ xor esi,ecx
+ xor ecx,edx
+ add eax,ebx
+ pxor xmm10,xmm6
+ add ebp,DWORD[32+rsp]
+ movups xmm14,XMMWORD[32+r12]
+ xorps xmm14,xmm15
+ movups XMMWORD[16+r12*1+r13],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+r15]
+DB 102,15,56,220,208
+ and esi,ecx
+ xor ecx,edx
+ ror ebx,7
+ punpcklqdq xmm13,xmm9
+ mov edi,eax
+ xor esi,ecx
+ pxor xmm10,xmm11
+ rol eax,5
+ add ebp,esi
+ movdqa xmm12,xmm3
+ xor edi,ebx
+ paddd xmm3,xmm9
+ xor ebx,ecx
+ pxor xmm10,xmm13
+ add ebp,eax
+ add edx,DWORD[36+rsp]
+ and edi,ebx
+ xor ebx,ecx
+ ror eax,7
+ movdqa xmm13,xmm10
+ mov esi,ebp
+ xor edi,ebx
+ movdqa XMMWORD[16+rsp],xmm3
+ rol ebp,5
+ add edx,edi
+ movups xmm0,XMMWORD[((-64))+r15]
+DB 102,15,56,220,209
+ xor esi,eax
+ pslld xmm10,2
+ xor eax,ebx
+ add edx,ebp
+ psrld xmm13,30
+ add ecx,DWORD[40+rsp]
+ and esi,eax
+ xor eax,ebx
+ por xmm10,xmm13
+ ror ebp,7
+ mov edi,edx
+ xor esi,eax
+ rol edx,5
+ pshufd xmm3,xmm9,238
+ add ecx,esi
+ xor edi,ebp
+ xor ebp,eax
+ add ecx,edx
+ add ebx,DWORD[44+rsp]
+ and edi,ebp
+ xor ebp,eax
+ ror edx,7
+ movups xmm1,XMMWORD[((-48))+r15]
+DB 102,15,56,220,208
+ mov esi,ecx
+ xor edi,ebp
+ rol ecx,5
+ add ebx,edi
+ xor esi,edx
+ xor edx,ebp
+ add ebx,ecx
+ pxor xmm11,xmm7
+ add eax,DWORD[48+rsp]
+ and esi,edx
+ xor edx,ebp
+ ror ecx,7
+ punpcklqdq xmm3,xmm10
+ mov edi,ebx
+ xor esi,edx
+ pxor xmm11,xmm4
+ rol ebx,5
+ add eax,esi
+ movdqa xmm13,XMMWORD[48+r11]
+ xor edi,ecx
+ paddd xmm12,xmm10
+ xor ecx,edx
+ pxor xmm11,xmm3
+ add eax,ebx
+ add ebp,DWORD[52+rsp]
+ movups xmm0,XMMWORD[((-32))+r15]
+DB 102,15,56,220,209
+ and edi,ecx
+ xor ecx,edx
+ ror ebx,7
+ movdqa xmm3,xmm11
+ mov esi,eax
+ xor edi,ecx
+ movdqa XMMWORD[32+rsp],xmm12
+ rol eax,5
+ add ebp,edi
+ xor esi,ebx
+ pslld xmm11,2
+ xor ebx,ecx
+ add ebp,eax
+ psrld xmm3,30
+ add edx,DWORD[56+rsp]
+ and esi,ebx
+ xor ebx,ecx
+ por xmm11,xmm3
+ ror eax,7
+ mov edi,ebp
+ xor esi,ebx
+ rol ebp,5
+ pshufd xmm12,xmm10,238
+ add edx,esi
+ movups xmm1,XMMWORD[((-16))+r15]
+DB 102,15,56,220,208
+ xor edi,eax
+ xor eax,ebx
+ add edx,ebp
+ add ecx,DWORD[60+rsp]
+ and edi,eax
+ xor eax,ebx
+ ror ebp,7
+ mov esi,edx
+ xor edi,eax
+ rol edx,5
+ add ecx,edi
+ xor esi,ebp
+ xor ebp,eax
+ add ecx,edx
+ pxor xmm4,xmm8
+ add ebx,DWORD[rsp]
+ and esi,ebp
+ xor ebp,eax
+ ror edx,7
+ movups xmm0,XMMWORD[r15]
+DB 102,15,56,220,209
+ punpcklqdq xmm12,xmm11
+ mov edi,ecx
+ xor esi,ebp
+ pxor xmm4,xmm5
+ rol ecx,5
+ add ebx,esi
+ movdqa xmm3,xmm13
+ xor edi,edx
+ paddd xmm13,xmm11
+ xor edx,ebp
+ pxor xmm4,xmm12
+ add ebx,ecx
+ add eax,DWORD[4+rsp]
+ and edi,edx
+ xor edx,ebp
+ ror ecx,7
+ movdqa xmm12,xmm4
+ mov esi,ebx
+ xor edi,edx
+ movdqa XMMWORD[48+rsp],xmm13
+ rol ebx,5
+ add eax,edi
+ xor esi,ecx
+ pslld xmm4,2
+ xor ecx,edx
+ add eax,ebx
+ psrld xmm12,30
+ add ebp,DWORD[8+rsp]
+ movups xmm1,XMMWORD[16+r15]
+DB 102,15,56,220,208
+ and esi,ecx
+ xor ecx,edx
+ por xmm4,xmm12
+ ror ebx,7
+ mov edi,eax
+ xor esi,ecx
+ rol eax,5
+ pshufd xmm13,xmm11,238
+ add ebp,esi
+ xor edi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ add edx,DWORD[12+rsp]
+ and edi,ebx
+ xor ebx,ecx
+ ror eax,7
+ mov esi,ebp
+ xor edi,ebx
+ rol ebp,5
+ add edx,edi
+ movups xmm0,XMMWORD[32+r15]
+DB 102,15,56,220,209
+ xor esi,eax
+ xor eax,ebx
+ add edx,ebp
+ pxor xmm5,xmm9
+ add ecx,DWORD[16+rsp]
+ and esi,eax
+ xor eax,ebx
+ ror ebp,7
+ punpcklqdq xmm13,xmm4
+ mov edi,edx
+ xor esi,eax
+ pxor xmm5,xmm6
+ rol edx,5
+ add ecx,esi
+ movdqa xmm12,xmm3
+ xor edi,ebp
+ paddd xmm3,xmm4
+ xor ebp,eax
+ pxor xmm5,xmm13
+ add ecx,edx
+ add ebx,DWORD[20+rsp]
+ and edi,ebp
+ xor ebp,eax
+ ror edx,7
+ movups xmm1,XMMWORD[48+r15]
+DB 102,15,56,220,208
+ movdqa xmm13,xmm5
+ mov esi,ecx
+ xor edi,ebp
+ movdqa XMMWORD[rsp],xmm3
+ rol ecx,5
+ add ebx,edi
+ xor esi,edx
+ pslld xmm5,2
+ xor edx,ebp
+ add ebx,ecx
+ psrld xmm13,30
+ add eax,DWORD[24+rsp]
+ and esi,edx
+ xor edx,ebp
+ por xmm5,xmm13
+ ror ecx,7
+ mov edi,ebx
+ xor esi,edx
+ rol ebx,5
+ pshufd xmm3,xmm4,238
+ add eax,esi
+ xor edi,ecx
+ xor ecx,edx
+ add eax,ebx
+ add ebp,DWORD[28+rsp]
+ cmp r8d,11
+ jb NEAR $L$aesenclast3
+ movups xmm0,XMMWORD[64+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+r15]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast3
+ movups xmm0,XMMWORD[96+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+r15]
+DB 102,15,56,220,208
+$L$aesenclast3:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+r15]
+ and edi,ecx
+ xor ecx,edx
+ ror ebx,7
+ mov esi,eax
+ xor edi,ecx
+ rol eax,5
+ add ebp,edi
+ xor esi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ pxor xmm6,xmm10
+ add edx,DWORD[32+rsp]
+ and esi,ebx
+ xor ebx,ecx
+ ror eax,7
+ punpcklqdq xmm3,xmm5
+ mov edi,ebp
+ xor esi,ebx
+ pxor xmm6,xmm7
+ rol ebp,5
+ add edx,esi
+ movups xmm14,XMMWORD[48+r12]
+ xorps xmm14,xmm15
+ movups XMMWORD[32+r12*1+r13],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+r15]
+DB 102,15,56,220,208
+ movdqa xmm13,xmm12
+ xor edi,eax
+ paddd xmm12,xmm5
+ xor eax,ebx
+ pxor xmm6,xmm3
+ add edx,ebp
+ add ecx,DWORD[36+rsp]
+ and edi,eax
+ xor eax,ebx
+ ror ebp,7
+ movdqa xmm3,xmm6
+ mov esi,edx
+ xor edi,eax
+ movdqa XMMWORD[16+rsp],xmm12
+ rol edx,5
+ add ecx,edi
+ xor esi,ebp
+ pslld xmm6,2
+ xor ebp,eax
+ add ecx,edx
+ psrld xmm3,30
+ add ebx,DWORD[40+rsp]
+ and esi,ebp
+ xor ebp,eax
+ por xmm6,xmm3
+ ror edx,7
+ movups xmm0,XMMWORD[((-64))+r15]
+DB 102,15,56,220,209
+ mov edi,ecx
+ xor esi,ebp
+ rol ecx,5
+ pshufd xmm12,xmm5,238
+ add ebx,esi
+ xor edi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[44+rsp]
+ and edi,edx
+ xor edx,ebp
+ ror ecx,7
+ mov esi,ebx
+ xor edi,edx
+ rol ebx,5
+ add eax,edi
+ xor esi,edx
+ add eax,ebx
+ pxor xmm7,xmm11
+ add ebp,DWORD[48+rsp]
+ movups xmm1,XMMWORD[((-48))+r15]
+DB 102,15,56,220,208
+ xor esi,ecx
+ punpcklqdq xmm12,xmm6
+ mov edi,eax
+ rol eax,5
+ pxor xmm7,xmm8
+ add ebp,esi
+ xor edi,ecx
+ movdqa xmm3,xmm13
+ ror ebx,7
+ paddd xmm13,xmm6
+ add ebp,eax
+ pxor xmm7,xmm12
+ add edx,DWORD[52+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ movdqa xmm12,xmm7
+ add edx,edi
+ xor esi,ebx
+ movdqa XMMWORD[32+rsp],xmm13
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[56+rsp]
+ pslld xmm7,2
+ xor esi,eax
+ mov edi,edx
+ psrld xmm12,30
+ rol edx,5
+ add ecx,esi
+ movups xmm0,XMMWORD[((-32))+r15]
+DB 102,15,56,220,209
+ xor edi,eax
+ ror ebp,7
+ por xmm7,xmm12
+ add ecx,edx
+ add ebx,DWORD[60+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ rol ecx,5
+ add ebx,edi
+ xor esi,ebp
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[rsp]
+ xor esi,edx
+ mov edi,ebx
+ rol ebx,5
+ paddd xmm3,xmm7
+ add eax,esi
+ xor edi,edx
+ movdqa XMMWORD[48+rsp],xmm3
+ ror ecx,7
+ add eax,ebx
+ add ebp,DWORD[4+rsp]
+ movups xmm1,XMMWORD[((-16))+r15]
+DB 102,15,56,220,208
+ xor edi,ecx
+ mov esi,eax
+ rol eax,5
+ add ebp,edi
+ xor esi,ecx
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[8+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ rol ebp,5
+ add edx,esi
+ xor edi,ebx
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[12+rsp]
+ xor edi,eax
+ mov esi,edx
+ rol edx,5
+ add ecx,edi
+ movups xmm0,XMMWORD[r15]
+DB 102,15,56,220,209
+ xor esi,eax
+ ror ebp,7
+ add ecx,edx
+ cmp r10,r14
+ je NEAR $L$done_ssse3
+ movdqa xmm3,XMMWORD[64+r11]
+ movdqa xmm13,XMMWORD[r11]
+ movdqu xmm4,XMMWORD[r10]
+ movdqu xmm5,XMMWORD[16+r10]
+ movdqu xmm6,XMMWORD[32+r10]
+ movdqu xmm7,XMMWORD[48+r10]
+DB 102,15,56,0,227
+ add r10,64
+ add ebx,DWORD[16+rsp]
+ xor esi,ebp
+ mov edi,ecx
+DB 102,15,56,0,235
+ rol ecx,5
+ add ebx,esi
+ xor edi,ebp
+ ror edx,7
+ paddd xmm4,xmm13
+ add ebx,ecx
+ add eax,DWORD[20+rsp]
+ xor edi,edx
+ mov esi,ebx
+ movdqa XMMWORD[rsp],xmm4
+ rol ebx,5
+ add eax,edi
+ xor esi,edx
+ ror ecx,7
+ psubd xmm4,xmm13
+ add eax,ebx
+ add ebp,DWORD[24+rsp]
+ movups xmm1,XMMWORD[16+r15]
+DB 102,15,56,220,208
+ xor esi,ecx
+ mov edi,eax
+ rol eax,5
+ add ebp,esi
+ xor edi,ecx
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[28+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ add edx,edi
+ xor esi,ebx
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[32+rsp]
+ xor esi,eax
+ mov edi,edx
+DB 102,15,56,0,243
+ rol edx,5
+ add ecx,esi
+ movups xmm0,XMMWORD[32+r15]
+DB 102,15,56,220,209
+ xor edi,eax
+ ror ebp,7
+ paddd xmm5,xmm13
+ add ecx,edx
+ add ebx,DWORD[36+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ movdqa XMMWORD[16+rsp],xmm5
+ rol ecx,5
+ add ebx,edi
+ xor esi,ebp
+ ror edx,7
+ psubd xmm5,xmm13
+ add ebx,ecx
+ add eax,DWORD[40+rsp]
+ xor esi,edx
+ mov edi,ebx
+ rol ebx,5
+ add eax,esi
+ xor edi,edx
+ ror ecx,7
+ add eax,ebx
+ add ebp,DWORD[44+rsp]
+ movups xmm1,XMMWORD[48+r15]
+DB 102,15,56,220,208
+ xor edi,ecx
+ mov esi,eax
+ rol eax,5
+ add ebp,edi
+ xor esi,ecx
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[48+rsp]
+ xor esi,ebx
+ mov edi,ebp
+DB 102,15,56,0,251
+ rol ebp,5
+ add edx,esi
+ xor edi,ebx
+ ror eax,7
+ paddd xmm6,xmm13
+ add edx,ebp
+ add ecx,DWORD[52+rsp]
+ xor edi,eax
+ mov esi,edx
+ movdqa XMMWORD[32+rsp],xmm6
+ rol edx,5
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$aesenclast4
+ movups xmm0,XMMWORD[64+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+r15]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast4
+ movups xmm0,XMMWORD[96+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+r15]
+DB 102,15,56,220,208
+$L$aesenclast4:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ ror ebp,7
+ psubd xmm6,xmm13
+ add ecx,edx
+ add ebx,DWORD[56+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ rol ecx,5
+ add ebx,esi
+ xor edi,ebp
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[60+rsp]
+ xor edi,edx
+ mov esi,ebx
+ rol ebx,5
+ add eax,edi
+ ror ecx,7
+ add eax,ebx
+ movups XMMWORD[48+r12*1+r13],xmm2
+ lea r12,[64+r12]
+
+ add eax,DWORD[r9]
+ add esi,DWORD[4+r9]
+ add ecx,DWORD[8+r9]
+ add edx,DWORD[12+r9]
+ mov DWORD[r9],eax
+ add ebp,DWORD[16+r9]
+ mov DWORD[4+r9],esi
+ mov ebx,esi
+ mov DWORD[8+r9],ecx
+ mov edi,ecx
+ mov DWORD[12+r9],edx
+ xor edi,edx
+ mov DWORD[16+r9],ebp
+ and esi,edi
+ jmp NEAR $L$oop_ssse3
+
+$L$done_ssse3:
+ add ebx,DWORD[16+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ rol ecx,5
+ add ebx,esi
+ xor edi,ebp
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[20+rsp]
+ xor edi,edx
+ mov esi,ebx
+ rol ebx,5
+ add eax,edi
+ xor esi,edx
+ ror ecx,7
+ add eax,ebx
+ add ebp,DWORD[24+rsp]
+ movups xmm1,XMMWORD[16+r15]
+DB 102,15,56,220,208
+ xor esi,ecx
+ mov edi,eax
+ rol eax,5
+ add ebp,esi
+ xor edi,ecx
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[28+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ add edx,edi
+ xor esi,ebx
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[32+rsp]
+ xor esi,eax
+ mov edi,edx
+ rol edx,5
+ add ecx,esi
+ movups xmm0,XMMWORD[32+r15]
+DB 102,15,56,220,209
+ xor edi,eax
+ ror ebp,7
+ add ecx,edx
+ add ebx,DWORD[36+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ rol ecx,5
+ add ebx,edi
+ xor esi,ebp
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[40+rsp]
+ xor esi,edx
+ mov edi,ebx
+ rol ebx,5
+ add eax,esi
+ xor edi,edx
+ ror ecx,7
+ add eax,ebx
+ add ebp,DWORD[44+rsp]
+ movups xmm1,XMMWORD[48+r15]
+DB 102,15,56,220,208
+ xor edi,ecx
+ mov esi,eax
+ rol eax,5
+ add ebp,edi
+ xor esi,ecx
+ ror ebx,7
+ add ebp,eax
+ add edx,DWORD[48+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ rol ebp,5
+ add edx,esi
+ xor edi,ebx
+ ror eax,7
+ add edx,ebp
+ add ecx,DWORD[52+rsp]
+ xor edi,eax
+ mov esi,edx
+ rol edx,5
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$aesenclast5
+ movups xmm0,XMMWORD[64+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+r15]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast5
+ movups xmm0,XMMWORD[96+r15]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+r15]
+DB 102,15,56,220,208
+$L$aesenclast5:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ ror ebp,7
+ add ecx,edx
+ add ebx,DWORD[56+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ rol ecx,5
+ add ebx,esi
+ xor edi,ebp
+ ror edx,7
+ add ebx,ecx
+ add eax,DWORD[60+rsp]
+ xor edi,edx
+ mov esi,ebx
+ rol ebx,5
+ add eax,edi
+ ror ecx,7
+ add eax,ebx
+ movups XMMWORD[48+r12*1+r13],xmm2
+ mov r8,QWORD[88+rsp]
+
+ add eax,DWORD[r9]
+ add esi,DWORD[4+r9]
+ add ecx,DWORD[8+r9]
+ mov DWORD[r9],eax
+ add edx,DWORD[12+r9]
+ mov DWORD[4+r9],esi
+ add ebp,DWORD[16+r9]
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+ mov DWORD[16+r9],ebp
+ movups XMMWORD[r8],xmm2
+ movaps xmm6,XMMWORD[((96+0))+rsp]
+ movaps xmm7,XMMWORD[((96+16))+rsp]
+ movaps xmm8,XMMWORD[((96+32))+rsp]
+ movaps xmm9,XMMWORD[((96+48))+rsp]
+ movaps xmm10,XMMWORD[((96+64))+rsp]
+ movaps xmm11,XMMWORD[((96+80))+rsp]
+ movaps xmm12,XMMWORD[((96+96))+rsp]
+ movaps xmm13,XMMWORD[((96+112))+rsp]
+ movaps xmm14,XMMWORD[((96+128))+rsp]
+ movaps xmm15,XMMWORD[((96+144))+rsp]
+ lea rsi,[264+rsp]
+
+ mov r15,QWORD[rsi]
+
+ mov r14,QWORD[8+rsi]
+
+ mov r13,QWORD[16+rsi]
+
+ mov r12,QWORD[24+rsi]
+
+ mov rbp,QWORD[32+rsi]
+
+ mov rbx,QWORD[40+rsi]
+
+ lea rsp,[48+rsi]
+
+$L$epilogue_ssse3:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha1_enc_ssse3:
+
+ALIGN 32
+aesni_cbc_sha1_enc_avx:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha1_enc_avx:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+ mov r10,QWORD[56+rsp]
+
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-264))+rsp]
+
+
+
+ movaps XMMWORD[(96+0)+rsp],xmm6
+ movaps XMMWORD[(96+16)+rsp],xmm7
+ movaps XMMWORD[(96+32)+rsp],xmm8
+ movaps XMMWORD[(96+48)+rsp],xmm9
+ movaps XMMWORD[(96+64)+rsp],xmm10
+ movaps XMMWORD[(96+80)+rsp],xmm11
+ movaps XMMWORD[(96+96)+rsp],xmm12
+ movaps XMMWORD[(96+112)+rsp],xmm13
+ movaps XMMWORD[(96+128)+rsp],xmm14
+ movaps XMMWORD[(96+144)+rsp],xmm15
+$L$prologue_avx:
+ vzeroall
+ mov r12,rdi
+ mov r13,rsi
+ mov r14,rdx
+ lea r15,[112+rcx]
+ vmovdqu xmm12,XMMWORD[r8]
+ mov QWORD[88+rsp],r8
+ shl r14,6
+ sub r13,r12
+ mov r8d,DWORD[((240-112))+r15]
+ add r14,r10
+
+ lea r11,[K_XX_XX]
+ mov eax,DWORD[r9]
+ mov ebx,DWORD[4+r9]
+ mov ecx,DWORD[8+r9]
+ mov edx,DWORD[12+r9]
+ mov esi,ebx
+ mov ebp,DWORD[16+r9]
+ mov edi,ecx
+ xor edi,edx
+ and esi,edi
+
+ vmovdqa xmm6,XMMWORD[64+r11]
+ vmovdqa xmm10,XMMWORD[r11]
+ vmovdqu xmm0,XMMWORD[r10]
+ vmovdqu xmm1,XMMWORD[16+r10]
+ vmovdqu xmm2,XMMWORD[32+r10]
+ vmovdqu xmm3,XMMWORD[48+r10]
+ vpshufb xmm0,xmm0,xmm6
+ add r10,64
+ vpshufb xmm1,xmm1,xmm6
+ vpshufb xmm2,xmm2,xmm6
+ vpshufb xmm3,xmm3,xmm6
+ vpaddd xmm4,xmm0,xmm10
+ vpaddd xmm5,xmm1,xmm10
+ vpaddd xmm6,xmm2,xmm10
+ vmovdqa XMMWORD[rsp],xmm4
+ vmovdqa XMMWORD[16+rsp],xmm5
+ vmovdqa XMMWORD[32+rsp],xmm6
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ jmp NEAR $L$oop_avx
+ALIGN 32
+$L$oop_avx:
+ shrd ebx,ebx,2
+ vmovdqu xmm13,XMMWORD[r12]
+ vpxor xmm13,xmm13,xmm15
+ vpxor xmm12,xmm12,xmm13
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-80))+r15]
+ xor esi,edx
+ vpalignr xmm4,xmm1,xmm0,8
+ mov edi,eax
+ add ebp,DWORD[rsp]
+ vpaddd xmm9,xmm10,xmm3
+ xor ebx,ecx
+ shld eax,eax,5
+ vpsrldq xmm8,xmm3,4
+ add ebp,esi
+ and edi,ebx
+ vpxor xmm4,xmm4,xmm0
+ xor ebx,ecx
+ add ebp,eax
+ vpxor xmm8,xmm8,xmm2
+ shrd eax,eax,7
+ xor edi,ecx
+ mov esi,ebp
+ add edx,DWORD[4+rsp]
+ vpxor xmm4,xmm4,xmm8
+ xor eax,ebx
+ shld ebp,ebp,5
+ vmovdqa XMMWORD[48+rsp],xmm9
+ add edx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-64))+r15]
+ and esi,eax
+ vpsrld xmm8,xmm4,31
+ xor eax,ebx
+ add edx,ebp
+ shrd ebp,ebp,7
+ xor esi,ebx
+ vpslldq xmm9,xmm4,12
+ vpaddd xmm4,xmm4,xmm4
+ mov edi,edx
+ add ecx,DWORD[8+rsp]
+ xor ebp,eax
+ shld edx,edx,5
+ vpor xmm4,xmm4,xmm8
+ vpsrld xmm8,xmm9,30
+ add ecx,esi
+ and edi,ebp
+ xor ebp,eax
+ add ecx,edx
+ vpslld xmm9,xmm9,2
+ vpxor xmm4,xmm4,xmm8
+ shrd edx,edx,7
+ xor edi,eax
+ mov esi,ecx
+ add ebx,DWORD[12+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-48))+r15]
+ vpxor xmm4,xmm4,xmm9
+ xor edx,ebp
+ shld ecx,ecx,5
+ add ebx,edi
+ and esi,edx
+ xor edx,ebp
+ add ebx,ecx
+ shrd ecx,ecx,7
+ xor esi,ebp
+ vpalignr xmm5,xmm2,xmm1,8
+ mov edi,ebx
+ add eax,DWORD[16+rsp]
+ vpaddd xmm9,xmm10,xmm4
+ xor ecx,edx
+ shld ebx,ebx,5
+ vpsrldq xmm8,xmm4,4
+ add eax,esi
+ and edi,ecx
+ vpxor xmm5,xmm5,xmm1
+ xor ecx,edx
+ add eax,ebx
+ vpxor xmm8,xmm8,xmm3
+ shrd ebx,ebx,7
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-32))+r15]
+ xor edi,edx
+ mov esi,eax
+ add ebp,DWORD[20+rsp]
+ vpxor xmm5,xmm5,xmm8
+ xor ebx,ecx
+ shld eax,eax,5
+ vmovdqa XMMWORD[rsp],xmm9
+ add ebp,edi
+ and esi,ebx
+ vpsrld xmm8,xmm5,31
+ xor ebx,ecx
+ add ebp,eax
+ shrd eax,eax,7
+ xor esi,ecx
+ vpslldq xmm9,xmm5,12
+ vpaddd xmm5,xmm5,xmm5
+ mov edi,ebp
+ add edx,DWORD[24+rsp]
+ xor eax,ebx
+ shld ebp,ebp,5
+ vpor xmm5,xmm5,xmm8
+ vpsrld xmm8,xmm9,30
+ add edx,esi
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-16))+r15]
+ and edi,eax
+ xor eax,ebx
+ add edx,ebp
+ vpslld xmm9,xmm9,2
+ vpxor xmm5,xmm5,xmm8
+ shrd ebp,ebp,7
+ xor edi,ebx
+ mov esi,edx
+ add ecx,DWORD[28+rsp]
+ vpxor xmm5,xmm5,xmm9
+ xor ebp,eax
+ shld edx,edx,5
+ vmovdqa xmm10,XMMWORD[16+r11]
+ add ecx,edi
+ and esi,ebp
+ xor ebp,eax
+ add ecx,edx
+ shrd edx,edx,7
+ xor esi,eax
+ vpalignr xmm6,xmm3,xmm2,8
+ mov edi,ecx
+ add ebx,DWORD[32+rsp]
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[r15]
+ vpaddd xmm9,xmm10,xmm5
+ xor edx,ebp
+ shld ecx,ecx,5
+ vpsrldq xmm8,xmm5,4
+ add ebx,esi
+ and edi,edx
+ vpxor xmm6,xmm6,xmm2
+ xor edx,ebp
+ add ebx,ecx
+ vpxor xmm8,xmm8,xmm4
+ shrd ecx,ecx,7
+ xor edi,ebp
+ mov esi,ebx
+ add eax,DWORD[36+rsp]
+ vpxor xmm6,xmm6,xmm8
+ xor ecx,edx
+ shld ebx,ebx,5
+ vmovdqa XMMWORD[16+rsp],xmm9
+ add eax,edi
+ and esi,ecx
+ vpsrld xmm8,xmm6,31
+ xor ecx,edx
+ add eax,ebx
+ shrd ebx,ebx,7
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[16+r15]
+ xor esi,edx
+ vpslldq xmm9,xmm6,12
+ vpaddd xmm6,xmm6,xmm6
+ mov edi,eax
+ add ebp,DWORD[40+rsp]
+ xor ebx,ecx
+ shld eax,eax,5
+ vpor xmm6,xmm6,xmm8
+ vpsrld xmm8,xmm9,30
+ add ebp,esi
+ and edi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ vpslld xmm9,xmm9,2
+ vpxor xmm6,xmm6,xmm8
+ shrd eax,eax,7
+ xor edi,ecx
+ mov esi,ebp
+ add edx,DWORD[44+rsp]
+ vpxor xmm6,xmm6,xmm9
+ xor eax,ebx
+ shld ebp,ebp,5
+ add edx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[32+r15]
+ and esi,eax
+ xor eax,ebx
+ add edx,ebp
+ shrd ebp,ebp,7
+ xor esi,ebx
+ vpalignr xmm7,xmm4,xmm3,8
+ mov edi,edx
+ add ecx,DWORD[48+rsp]
+ vpaddd xmm9,xmm10,xmm6
+ xor ebp,eax
+ shld edx,edx,5
+ vpsrldq xmm8,xmm6,4
+ add ecx,esi
+ and edi,ebp
+ vpxor xmm7,xmm7,xmm3
+ xor ebp,eax
+ add ecx,edx
+ vpxor xmm8,xmm8,xmm5
+ shrd edx,edx,7
+ xor edi,eax
+ mov esi,ecx
+ add ebx,DWORD[52+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[48+r15]
+ vpxor xmm7,xmm7,xmm8
+ xor edx,ebp
+ shld ecx,ecx,5
+ vmovdqa XMMWORD[32+rsp],xmm9
+ add ebx,edi
+ and esi,edx
+ vpsrld xmm8,xmm7,31
+ xor edx,ebp
+ add ebx,ecx
+ shrd ecx,ecx,7
+ xor esi,ebp
+ vpslldq xmm9,xmm7,12
+ vpaddd xmm7,xmm7,xmm7
+ mov edi,ebx
+ add eax,DWORD[56+rsp]
+ xor ecx,edx
+ shld ebx,ebx,5
+ vpor xmm7,xmm7,xmm8
+ vpsrld xmm8,xmm9,30
+ add eax,esi
+ and edi,ecx
+ xor ecx,edx
+ add eax,ebx
+ vpslld xmm9,xmm9,2
+ vpxor xmm7,xmm7,xmm8
+ shrd ebx,ebx,7
+ cmp r8d,11
+ jb NEAR $L$vaesenclast6
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[64+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[80+r15]
+ je NEAR $L$vaesenclast6
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[96+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[112+r15]
+$L$vaesenclast6:
+ vaesenclast xmm12,xmm12,xmm15
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ xor edi,edx
+ mov esi,eax
+ add ebp,DWORD[60+rsp]
+ vpxor xmm7,xmm7,xmm9
+ xor ebx,ecx
+ shld eax,eax,5
+ add ebp,edi
+ and esi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ vpalignr xmm8,xmm7,xmm6,8
+ vpxor xmm0,xmm0,xmm4
+ shrd eax,eax,7
+ xor esi,ecx
+ mov edi,ebp
+ add edx,DWORD[rsp]
+ vpxor xmm0,xmm0,xmm1
+ xor eax,ebx
+ shld ebp,ebp,5
+ vpaddd xmm9,xmm10,xmm7
+ add edx,esi
+ vmovdqu xmm13,XMMWORD[16+r12]
+ vpxor xmm13,xmm13,xmm15
+ vmovups XMMWORD[r13*1+r12],xmm12
+ vpxor xmm12,xmm12,xmm13
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-80))+r15]
+ and edi,eax
+ vpxor xmm0,xmm0,xmm8
+ xor eax,ebx
+ add edx,ebp
+ shrd ebp,ebp,7
+ xor edi,ebx
+ vpsrld xmm8,xmm0,30
+ vmovdqa XMMWORD[48+rsp],xmm9
+ mov esi,edx
+ add ecx,DWORD[4+rsp]
+ xor ebp,eax
+ shld edx,edx,5
+ vpslld xmm0,xmm0,2
+ add ecx,edi
+ and esi,ebp
+ xor ebp,eax
+ add ecx,edx
+ shrd edx,edx,7
+ xor esi,eax
+ mov edi,ecx
+ add ebx,DWORD[8+rsp]
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-64))+r15]
+ vpor xmm0,xmm0,xmm8
+ xor edx,ebp
+ shld ecx,ecx,5
+ add ebx,esi
+ and edi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[12+rsp]
+ xor edi,ebp
+ mov esi,ebx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ vpalignr xmm8,xmm0,xmm7,8
+ vpxor xmm1,xmm1,xmm5
+ add ebp,DWORD[16+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-48))+r15]
+ xor esi,ecx
+ mov edi,eax
+ shld eax,eax,5
+ vpxor xmm1,xmm1,xmm2
+ add ebp,esi
+ xor edi,ecx
+ vpaddd xmm9,xmm10,xmm0
+ shrd ebx,ebx,7
+ add ebp,eax
+ vpxor xmm1,xmm1,xmm8
+ add edx,DWORD[20+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ shld ebp,ebp,5
+ vpsrld xmm8,xmm1,30
+ vmovdqa XMMWORD[rsp],xmm9
+ add edx,edi
+ xor esi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ vpslld xmm1,xmm1,2
+ add ecx,DWORD[24+rsp]
+ xor esi,eax
+ mov edi,edx
+ shld edx,edx,5
+ add ecx,esi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-32))+r15]
+ xor edi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ vpor xmm1,xmm1,xmm8
+ add ebx,DWORD[28+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ vpalignr xmm8,xmm1,xmm0,8
+ vpxor xmm2,xmm2,xmm6
+ add eax,DWORD[32+rsp]
+ xor esi,edx
+ mov edi,ebx
+ shld ebx,ebx,5
+ vpxor xmm2,xmm2,xmm3
+ add eax,esi
+ xor edi,edx
+ vpaddd xmm9,xmm10,xmm1
+ vmovdqa xmm10,XMMWORD[32+r11]
+ shrd ecx,ecx,7
+ add eax,ebx
+ vpxor xmm2,xmm2,xmm8
+ add ebp,DWORD[36+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-16))+r15]
+ xor edi,ecx
+ mov esi,eax
+ shld eax,eax,5
+ vpsrld xmm8,xmm2,30
+ vmovdqa XMMWORD[16+rsp],xmm9
+ add ebp,edi
+ xor esi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ vpslld xmm2,xmm2,2
+ add edx,DWORD[40+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ shld ebp,ebp,5
+ add edx,esi
+ xor edi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ vpor xmm2,xmm2,xmm8
+ add ecx,DWORD[44+rsp]
+ xor edi,eax
+ mov esi,edx
+ shld edx,edx,5
+ add ecx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[r15]
+ xor esi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ vpalignr xmm8,xmm2,xmm1,8
+ vpxor xmm3,xmm3,xmm7
+ add ebx,DWORD[48+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ shld ecx,ecx,5
+ vpxor xmm3,xmm3,xmm4
+ add ebx,esi
+ xor edi,ebp
+ vpaddd xmm9,xmm10,xmm2
+ shrd edx,edx,7
+ add ebx,ecx
+ vpxor xmm3,xmm3,xmm8
+ add eax,DWORD[52+rsp]
+ xor edi,edx
+ mov esi,ebx
+ shld ebx,ebx,5
+ vpsrld xmm8,xmm3,30
+ vmovdqa XMMWORD[32+rsp],xmm9
+ add eax,edi
+ xor esi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ vpslld xmm3,xmm3,2
+ add ebp,DWORD[56+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[16+r15]
+ xor esi,ecx
+ mov edi,eax
+ shld eax,eax,5
+ add ebp,esi
+ xor edi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ vpor xmm3,xmm3,xmm8
+ add edx,DWORD[60+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ shld ebp,ebp,5
+ add edx,edi
+ xor esi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ vpalignr xmm8,xmm3,xmm2,8
+ vpxor xmm4,xmm4,xmm0
+ add ecx,DWORD[rsp]
+ xor esi,eax
+ mov edi,edx
+ shld edx,edx,5
+ vpxor xmm4,xmm4,xmm5
+ add ecx,esi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[32+r15]
+ xor edi,eax
+ vpaddd xmm9,xmm10,xmm3
+ shrd ebp,ebp,7
+ add ecx,edx
+ vpxor xmm4,xmm4,xmm8
+ add ebx,DWORD[4+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ shld ecx,ecx,5
+ vpsrld xmm8,xmm4,30
+ vmovdqa XMMWORD[48+rsp],xmm9
+ add ebx,edi
+ xor esi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ vpslld xmm4,xmm4,2
+ add eax,DWORD[8+rsp]
+ xor esi,edx
+ mov edi,ebx
+ shld ebx,ebx,5
+ add eax,esi
+ xor edi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ vpor xmm4,xmm4,xmm8
+ add ebp,DWORD[12+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[48+r15]
+ xor edi,ecx
+ mov esi,eax
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ vpalignr xmm8,xmm4,xmm3,8
+ vpxor xmm5,xmm5,xmm1
+ add edx,DWORD[16+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ shld ebp,ebp,5
+ vpxor xmm5,xmm5,xmm6
+ add edx,esi
+ xor edi,ebx
+ vpaddd xmm9,xmm10,xmm4
+ shrd eax,eax,7
+ add edx,ebp
+ vpxor xmm5,xmm5,xmm8
+ add ecx,DWORD[20+rsp]
+ xor edi,eax
+ mov esi,edx
+ shld edx,edx,5
+ vpsrld xmm8,xmm5,30
+ vmovdqa XMMWORD[rsp],xmm9
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$vaesenclast7
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[64+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[80+r15]
+ je NEAR $L$vaesenclast7
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[96+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[112+r15]
+$L$vaesenclast7:
+ vaesenclast xmm12,xmm12,xmm15
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ vpslld xmm5,xmm5,2
+ add ebx,DWORD[24+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ shld ecx,ecx,5
+ add ebx,esi
+ xor edi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ vpor xmm5,xmm5,xmm8
+ add eax,DWORD[28+rsp]
+ shrd ecx,ecx,7
+ mov esi,ebx
+ xor edi,edx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,ecx
+ xor ecx,edx
+ add eax,ebx
+ vpalignr xmm8,xmm5,xmm4,8
+ vpxor xmm6,xmm6,xmm2
+ add ebp,DWORD[32+rsp]
+ vmovdqu xmm13,XMMWORD[32+r12]
+ vpxor xmm13,xmm13,xmm15
+ vmovups XMMWORD[16+r12*1+r13],xmm12
+ vpxor xmm12,xmm12,xmm13
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-80))+r15]
+ and esi,ecx
+ xor ecx,edx
+ shrd ebx,ebx,7
+ vpxor xmm6,xmm6,xmm7
+ mov edi,eax
+ xor esi,ecx
+ vpaddd xmm9,xmm10,xmm5
+ shld eax,eax,5
+ add ebp,esi
+ vpxor xmm6,xmm6,xmm8
+ xor edi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ add edx,DWORD[36+rsp]
+ vpsrld xmm8,xmm6,30
+ vmovdqa XMMWORD[16+rsp],xmm9
+ and edi,ebx
+ xor ebx,ecx
+ shrd eax,eax,7
+ mov esi,ebp
+ vpslld xmm6,xmm6,2
+ xor edi,ebx
+ shld ebp,ebp,5
+ add edx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-64))+r15]
+ xor esi,eax
+ xor eax,ebx
+ add edx,ebp
+ add ecx,DWORD[40+rsp]
+ and esi,eax
+ vpor xmm6,xmm6,xmm8
+ xor eax,ebx
+ shrd ebp,ebp,7
+ mov edi,edx
+ xor esi,eax
+ shld edx,edx,5
+ add ecx,esi
+ xor edi,ebp
+ xor ebp,eax
+ add ecx,edx
+ add ebx,DWORD[44+rsp]
+ and edi,ebp
+ xor ebp,eax
+ shrd edx,edx,7
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-48))+r15]
+ mov esi,ecx
+ xor edi,ebp
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,edx
+ xor edx,ebp
+ add ebx,ecx
+ vpalignr xmm8,xmm6,xmm5,8
+ vpxor xmm7,xmm7,xmm3
+ add eax,DWORD[48+rsp]
+ and esi,edx
+ xor edx,ebp
+ shrd ecx,ecx,7
+ vpxor xmm7,xmm7,xmm0
+ mov edi,ebx
+ xor esi,edx
+ vpaddd xmm9,xmm10,xmm6
+ vmovdqa xmm10,XMMWORD[48+r11]
+ shld ebx,ebx,5
+ add eax,esi
+ vpxor xmm7,xmm7,xmm8
+ xor edi,ecx
+ xor ecx,edx
+ add eax,ebx
+ add ebp,DWORD[52+rsp]
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-32))+r15]
+ vpsrld xmm8,xmm7,30
+ vmovdqa XMMWORD[32+rsp],xmm9
+ and edi,ecx
+ xor ecx,edx
+ shrd ebx,ebx,7
+ mov esi,eax
+ vpslld xmm7,xmm7,2
+ xor edi,ecx
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ add edx,DWORD[56+rsp]
+ and esi,ebx
+ vpor xmm7,xmm7,xmm8
+ xor ebx,ecx
+ shrd eax,eax,7
+ mov edi,ebp
+ xor esi,ebx
+ shld ebp,ebp,5
+ add edx,esi
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-16))+r15]
+ xor edi,eax
+ xor eax,ebx
+ add edx,ebp
+ add ecx,DWORD[60+rsp]
+ and edi,eax
+ xor eax,ebx
+ shrd ebp,ebp,7
+ mov esi,edx
+ xor edi,eax
+ shld edx,edx,5
+ add ecx,edi
+ xor esi,ebp
+ xor ebp,eax
+ add ecx,edx
+ vpalignr xmm8,xmm7,xmm6,8
+ vpxor xmm0,xmm0,xmm4
+ add ebx,DWORD[rsp]
+ and esi,ebp
+ xor ebp,eax
+ shrd edx,edx,7
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[r15]
+ vpxor xmm0,xmm0,xmm1
+ mov edi,ecx
+ xor esi,ebp
+ vpaddd xmm9,xmm10,xmm7
+ shld ecx,ecx,5
+ add ebx,esi
+ vpxor xmm0,xmm0,xmm8
+ xor edi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[4+rsp]
+ vpsrld xmm8,xmm0,30
+ vmovdqa XMMWORD[48+rsp],xmm9
+ and edi,edx
+ xor edx,ebp
+ shrd ecx,ecx,7
+ mov esi,ebx
+ vpslld xmm0,xmm0,2
+ xor edi,edx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,ecx
+ xor ecx,edx
+ add eax,ebx
+ add ebp,DWORD[8+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[16+r15]
+ and esi,ecx
+ vpor xmm0,xmm0,xmm8
+ xor ecx,edx
+ shrd ebx,ebx,7
+ mov edi,eax
+ xor esi,ecx
+ shld eax,eax,5
+ add ebp,esi
+ xor edi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ add edx,DWORD[12+rsp]
+ and edi,ebx
+ xor ebx,ecx
+ shrd eax,eax,7
+ mov esi,ebp
+ xor edi,ebx
+ shld ebp,ebp,5
+ add edx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[32+r15]
+ xor esi,eax
+ xor eax,ebx
+ add edx,ebp
+ vpalignr xmm8,xmm0,xmm7,8
+ vpxor xmm1,xmm1,xmm5
+ add ecx,DWORD[16+rsp]
+ and esi,eax
+ xor eax,ebx
+ shrd ebp,ebp,7
+ vpxor xmm1,xmm1,xmm2
+ mov edi,edx
+ xor esi,eax
+ vpaddd xmm9,xmm10,xmm0
+ shld edx,edx,5
+ add ecx,esi
+ vpxor xmm1,xmm1,xmm8
+ xor edi,ebp
+ xor ebp,eax
+ add ecx,edx
+ add ebx,DWORD[20+rsp]
+ vpsrld xmm8,xmm1,30
+ vmovdqa XMMWORD[rsp],xmm9
+ and edi,ebp
+ xor ebp,eax
+ shrd edx,edx,7
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[48+r15]
+ mov esi,ecx
+ vpslld xmm1,xmm1,2
+ xor edi,ebp
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[24+rsp]
+ and esi,edx
+ vpor xmm1,xmm1,xmm8
+ xor edx,ebp
+ shrd ecx,ecx,7
+ mov edi,ebx
+ xor esi,edx
+ shld ebx,ebx,5
+ add eax,esi
+ xor edi,ecx
+ xor ecx,edx
+ add eax,ebx
+ add ebp,DWORD[28+rsp]
+ cmp r8d,11
+ jb NEAR $L$vaesenclast8
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[64+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[80+r15]
+ je NEAR $L$vaesenclast8
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[96+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[112+r15]
+$L$vaesenclast8:
+ vaesenclast xmm12,xmm12,xmm15
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ and edi,ecx
+ xor ecx,edx
+ shrd ebx,ebx,7
+ mov esi,eax
+ xor edi,ecx
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ebx
+ xor ebx,ecx
+ add ebp,eax
+ vpalignr xmm8,xmm1,xmm0,8
+ vpxor xmm2,xmm2,xmm6
+ add edx,DWORD[32+rsp]
+ and esi,ebx
+ xor ebx,ecx
+ shrd eax,eax,7
+ vpxor xmm2,xmm2,xmm3
+ mov edi,ebp
+ xor esi,ebx
+ vpaddd xmm9,xmm10,xmm1
+ shld ebp,ebp,5
+ add edx,esi
+ vmovdqu xmm13,XMMWORD[48+r12]
+ vpxor xmm13,xmm13,xmm15
+ vmovups XMMWORD[32+r12*1+r13],xmm12
+ vpxor xmm12,xmm12,xmm13
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-80))+r15]
+ vpxor xmm2,xmm2,xmm8
+ xor edi,eax
+ xor eax,ebx
+ add edx,ebp
+ add ecx,DWORD[36+rsp]
+ vpsrld xmm8,xmm2,30
+ vmovdqa XMMWORD[16+rsp],xmm9
+ and edi,eax
+ xor eax,ebx
+ shrd ebp,ebp,7
+ mov esi,edx
+ vpslld xmm2,xmm2,2
+ xor edi,eax
+ shld edx,edx,5
+ add ecx,edi
+ xor esi,ebp
+ xor ebp,eax
+ add ecx,edx
+ add ebx,DWORD[40+rsp]
+ and esi,ebp
+ vpor xmm2,xmm2,xmm8
+ xor ebp,eax
+ shrd edx,edx,7
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-64))+r15]
+ mov edi,ecx
+ xor esi,ebp
+ shld ecx,ecx,5
+ add ebx,esi
+ xor edi,edx
+ xor edx,ebp
+ add ebx,ecx
+ add eax,DWORD[44+rsp]
+ and edi,edx
+ xor edx,ebp
+ shrd ecx,ecx,7
+ mov esi,ebx
+ xor edi,edx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,edx
+ add eax,ebx
+ vpalignr xmm8,xmm2,xmm1,8
+ vpxor xmm3,xmm3,xmm7
+ add ebp,DWORD[48+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-48))+r15]
+ xor esi,ecx
+ mov edi,eax
+ shld eax,eax,5
+ vpxor xmm3,xmm3,xmm4
+ add ebp,esi
+ xor edi,ecx
+ vpaddd xmm9,xmm10,xmm2
+ shrd ebx,ebx,7
+ add ebp,eax
+ vpxor xmm3,xmm3,xmm8
+ add edx,DWORD[52+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ shld ebp,ebp,5
+ vpsrld xmm8,xmm3,30
+ vmovdqa XMMWORD[32+rsp],xmm9
+ add edx,edi
+ xor esi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ vpslld xmm3,xmm3,2
+ add ecx,DWORD[56+rsp]
+ xor esi,eax
+ mov edi,edx
+ shld edx,edx,5
+ add ecx,esi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[((-32))+r15]
+ xor edi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ vpor xmm3,xmm3,xmm8
+ add ebx,DWORD[60+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[rsp]
+ vpaddd xmm9,xmm10,xmm3
+ xor esi,edx
+ mov edi,ebx
+ shld ebx,ebx,5
+ add eax,esi
+ vmovdqa XMMWORD[48+rsp],xmm9
+ xor edi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ add ebp,DWORD[4+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[((-16))+r15]
+ xor edi,ecx
+ mov esi,eax
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ add edx,DWORD[8+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ shld ebp,ebp,5
+ add edx,esi
+ xor edi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ add ecx,DWORD[12+rsp]
+ xor edi,eax
+ mov esi,edx
+ shld edx,edx,5
+ add ecx,edi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[r15]
+ xor esi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ cmp r10,r14
+ je NEAR $L$done_avx
+ vmovdqa xmm9,XMMWORD[64+r11]
+ vmovdqa xmm10,XMMWORD[r11]
+ vmovdqu xmm0,XMMWORD[r10]
+ vmovdqu xmm1,XMMWORD[16+r10]
+ vmovdqu xmm2,XMMWORD[32+r10]
+ vmovdqu xmm3,XMMWORD[48+r10]
+ vpshufb xmm0,xmm0,xmm9
+ add r10,64
+ add ebx,DWORD[16+rsp]
+ xor esi,ebp
+ vpshufb xmm1,xmm1,xmm9
+ mov edi,ecx
+ shld ecx,ecx,5
+ vpaddd xmm8,xmm0,xmm10
+ add ebx,esi
+ xor edi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ vmovdqa XMMWORD[rsp],xmm8
+ add eax,DWORD[20+rsp]
+ xor edi,edx
+ mov esi,ebx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ add ebp,DWORD[24+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[16+r15]
+ xor esi,ecx
+ mov edi,eax
+ shld eax,eax,5
+ add ebp,esi
+ xor edi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ add edx,DWORD[28+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ shld ebp,ebp,5
+ add edx,edi
+ xor esi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ add ecx,DWORD[32+rsp]
+ xor esi,eax
+ vpshufb xmm2,xmm2,xmm9
+ mov edi,edx
+ shld edx,edx,5
+ vpaddd xmm8,xmm1,xmm10
+ add ecx,esi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[32+r15]
+ xor edi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ vmovdqa XMMWORD[16+rsp],xmm8
+ add ebx,DWORD[36+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[40+rsp]
+ xor esi,edx
+ mov edi,ebx
+ shld ebx,ebx,5
+ add eax,esi
+ xor edi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ add ebp,DWORD[44+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[48+r15]
+ xor edi,ecx
+ mov esi,eax
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ add edx,DWORD[48+rsp]
+ xor esi,ebx
+ vpshufb xmm3,xmm3,xmm9
+ mov edi,ebp
+ shld ebp,ebp,5
+ vpaddd xmm8,xmm2,xmm10
+ add edx,esi
+ xor edi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ vmovdqa XMMWORD[32+rsp],xmm8
+ add ecx,DWORD[52+rsp]
+ xor edi,eax
+ mov esi,edx
+ shld edx,edx,5
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$vaesenclast9
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[64+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[80+r15]
+ je NEAR $L$vaesenclast9
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[96+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[112+r15]
+$L$vaesenclast9:
+ vaesenclast xmm12,xmm12,xmm15
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ add ebx,DWORD[56+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ shld ecx,ecx,5
+ add ebx,esi
+ xor edi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[60+rsp]
+ xor edi,edx
+ mov esi,ebx
+ shld ebx,ebx,5
+ add eax,edi
+ shrd ecx,ecx,7
+ add eax,ebx
+ vmovups XMMWORD[48+r12*1+r13],xmm12
+ lea r12,[64+r12]
+
+ add eax,DWORD[r9]
+ add esi,DWORD[4+r9]
+ add ecx,DWORD[8+r9]
+ add edx,DWORD[12+r9]
+ mov DWORD[r9],eax
+ add ebp,DWORD[16+r9]
+ mov DWORD[4+r9],esi
+ mov ebx,esi
+ mov DWORD[8+r9],ecx
+ mov edi,ecx
+ mov DWORD[12+r9],edx
+ xor edi,edx
+ mov DWORD[16+r9],ebp
+ and esi,edi
+ jmp NEAR $L$oop_avx
+
+$L$done_avx:
+ add ebx,DWORD[16+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ shld ecx,ecx,5
+ add ebx,esi
+ xor edi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[20+rsp]
+ xor edi,edx
+ mov esi,ebx
+ shld ebx,ebx,5
+ add eax,edi
+ xor esi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ add ebp,DWORD[24+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[16+r15]
+ xor esi,ecx
+ mov edi,eax
+ shld eax,eax,5
+ add ebp,esi
+ xor edi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ add edx,DWORD[28+rsp]
+ xor edi,ebx
+ mov esi,ebp
+ shld ebp,ebp,5
+ add edx,edi
+ xor esi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ add ecx,DWORD[32+rsp]
+ xor esi,eax
+ mov edi,edx
+ shld edx,edx,5
+ add ecx,esi
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[32+r15]
+ xor edi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ add ebx,DWORD[36+rsp]
+ xor edi,ebp
+ mov esi,ecx
+ shld ecx,ecx,5
+ add ebx,edi
+ xor esi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[40+rsp]
+ xor esi,edx
+ mov edi,ebx
+ shld ebx,ebx,5
+ add eax,esi
+ xor edi,edx
+ shrd ecx,ecx,7
+ add eax,ebx
+ add ebp,DWORD[44+rsp]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[48+r15]
+ xor edi,ecx
+ mov esi,eax
+ shld eax,eax,5
+ add ebp,edi
+ xor esi,ecx
+ shrd ebx,ebx,7
+ add ebp,eax
+ add edx,DWORD[48+rsp]
+ xor esi,ebx
+ mov edi,ebp
+ shld ebp,ebp,5
+ add edx,esi
+ xor edi,ebx
+ shrd eax,eax,7
+ add edx,ebp
+ add ecx,DWORD[52+rsp]
+ xor edi,eax
+ mov esi,edx
+ shld edx,edx,5
+ add ecx,edi
+ cmp r8d,11
+ jb NEAR $L$vaesenclast10
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[64+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[80+r15]
+ je NEAR $L$vaesenclast10
+ vaesenc xmm12,xmm12,xmm15
+ vmovups xmm14,XMMWORD[96+r15]
+ vaesenc xmm12,xmm12,xmm14
+ vmovups xmm15,XMMWORD[112+r15]
+$L$vaesenclast10:
+ vaesenclast xmm12,xmm12,xmm15
+ vmovups xmm15,XMMWORD[((-112))+r15]
+ vmovups xmm14,XMMWORD[((16-112))+r15]
+ xor esi,eax
+ shrd ebp,ebp,7
+ add ecx,edx
+ add ebx,DWORD[56+rsp]
+ xor esi,ebp
+ mov edi,ecx
+ shld ecx,ecx,5
+ add ebx,esi
+ xor edi,ebp
+ shrd edx,edx,7
+ add ebx,ecx
+ add eax,DWORD[60+rsp]
+ xor edi,edx
+ mov esi,ebx
+ shld ebx,ebx,5
+ add eax,edi
+ shrd ecx,ecx,7
+ add eax,ebx
+ vmovups XMMWORD[48+r12*1+r13],xmm12
+ mov r8,QWORD[88+rsp]
+
+ add eax,DWORD[r9]
+ add esi,DWORD[4+r9]
+ add ecx,DWORD[8+r9]
+ mov DWORD[r9],eax
+ add edx,DWORD[12+r9]
+ mov DWORD[4+r9],esi
+ add ebp,DWORD[16+r9]
+ mov DWORD[8+r9],ecx
+ mov DWORD[12+r9],edx
+ mov DWORD[16+r9],ebp
+ vmovups XMMWORD[r8],xmm12
+ vzeroall
+ movaps xmm6,XMMWORD[((96+0))+rsp]
+ movaps xmm7,XMMWORD[((96+16))+rsp]
+ movaps xmm8,XMMWORD[((96+32))+rsp]
+ movaps xmm9,XMMWORD[((96+48))+rsp]
+ movaps xmm10,XMMWORD[((96+64))+rsp]
+ movaps xmm11,XMMWORD[((96+80))+rsp]
+ movaps xmm12,XMMWORD[((96+96))+rsp]
+ movaps xmm13,XMMWORD[((96+112))+rsp]
+ movaps xmm14,XMMWORD[((96+128))+rsp]
+ movaps xmm15,XMMWORD[((96+144))+rsp]
+ lea rsi,[264+rsp]
+
+ mov r15,QWORD[rsi]
+
+ mov r14,QWORD[8+rsi]
+
+ mov r13,QWORD[16+rsi]
+
+ mov r12,QWORD[24+rsi]
+
+ mov rbp,QWORD[32+rsi]
+
+ mov rbx,QWORD[40+rsi]
+
+ lea rsp,[48+rsi]
+
+$L$epilogue_avx:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha1_enc_avx:
+ALIGN 64
+K_XX_XX:
+ DD 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+ DD 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+ DD 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+ DD 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+ DD 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+DB 0xf,0xe,0xd,0xc,0xb,0xa,0x9,0x8,0x7,0x6,0x5,0x4,0x3,0x2,0x1,0x0
+
+DB 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115
+DB 116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52
+DB 44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32
+DB 60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111
+DB 114,103,62,0
+ALIGN 64
+
+ALIGN 32
+aesni_cbc_sha1_enc_shaext:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha1_enc_shaext:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+ mov r10,QWORD[56+rsp]
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[(-8-160)+rax],xmm6
+ movaps XMMWORD[(-8-144)+rax],xmm7
+ movaps XMMWORD[(-8-128)+rax],xmm8
+ movaps XMMWORD[(-8-112)+rax],xmm9
+ movaps XMMWORD[(-8-96)+rax],xmm10
+ movaps XMMWORD[(-8-80)+rax],xmm11
+ movaps XMMWORD[(-8-64)+rax],xmm12
+ movaps XMMWORD[(-8-48)+rax],xmm13
+ movaps XMMWORD[(-8-32)+rax],xmm14
+ movaps XMMWORD[(-8-16)+rax],xmm15
+$L$prologue_shaext:
+ movdqu xmm8,XMMWORD[r9]
+ movd xmm9,DWORD[16+r9]
+ movdqa xmm7,XMMWORD[((K_XX_XX+80))]
+
+ mov r11d,DWORD[240+rcx]
+ sub rsi,rdi
+ movups xmm15,XMMWORD[rcx]
+ movups xmm2,XMMWORD[r8]
+ movups xmm0,XMMWORD[16+rcx]
+ lea rcx,[112+rcx]
+
+ pshufd xmm8,xmm8,27
+ pshufd xmm9,xmm9,27
+ jmp NEAR $L$oop_shaext
+
+ALIGN 16
+$L$oop_shaext:
+ movups xmm14,XMMWORD[rdi]
+ xorps xmm14,xmm15
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+rcx]
+DB 102,15,56,220,208
+ movdqu xmm3,XMMWORD[r10]
+ movdqa xmm12,xmm9
+DB 102,15,56,0,223
+ movdqu xmm4,XMMWORD[16+r10]
+ movdqa xmm11,xmm8
+ movups xmm0,XMMWORD[((-64))+rcx]
+DB 102,15,56,220,209
+DB 102,15,56,0,231
+
+ paddd xmm9,xmm3
+ movdqu xmm5,XMMWORD[32+r10]
+ lea r10,[64+r10]
+ pxor xmm3,xmm12
+ movups xmm1,XMMWORD[((-48))+rcx]
+DB 102,15,56,220,208
+ pxor xmm3,xmm12
+ movdqa xmm10,xmm8
+DB 102,15,56,0,239
+DB 69,15,58,204,193,0
+DB 68,15,56,200,212
+ movups xmm0,XMMWORD[((-32))+rcx]
+DB 102,15,56,220,209
+DB 15,56,201,220
+ movdqu xmm6,XMMWORD[((-16))+r10]
+ movdqa xmm9,xmm8
+DB 102,15,56,0,247
+ movups xmm1,XMMWORD[((-16))+rcx]
+DB 102,15,56,220,208
+DB 69,15,58,204,194,0
+DB 68,15,56,200,205
+ pxor xmm3,xmm5
+DB 15,56,201,229
+ movups xmm0,XMMWORD[rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,0
+DB 68,15,56,200,214
+ movups xmm1,XMMWORD[16+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,222
+ pxor xmm4,xmm6
+DB 15,56,201,238
+ movups xmm0,XMMWORD[32+rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,0
+DB 68,15,56,200,203
+ movups xmm1,XMMWORD[48+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,227
+ pxor xmm5,xmm3
+DB 15,56,201,243
+ cmp r11d,11
+ jb NEAR $L$aesenclast11
+ movups xmm0,XMMWORD[64+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+rcx]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast11
+ movups xmm0,XMMWORD[96+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+rcx]
+DB 102,15,56,220,208
+$L$aesenclast11:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+rcx]
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,0
+DB 68,15,56,200,212
+ movups xmm14,XMMWORD[16+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[rdi*1+rsi],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,236
+ pxor xmm6,xmm4
+DB 15,56,201,220
+ movups xmm0,XMMWORD[((-64))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,1
+DB 68,15,56,200,205
+ movups xmm1,XMMWORD[((-48))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,245
+ pxor xmm3,xmm5
+DB 15,56,201,229
+ movups xmm0,XMMWORD[((-32))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,1
+DB 68,15,56,200,214
+ movups xmm1,XMMWORD[((-16))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,222
+ pxor xmm4,xmm6
+DB 15,56,201,238
+ movups xmm0,XMMWORD[rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,1
+DB 68,15,56,200,203
+ movups xmm1,XMMWORD[16+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,227
+ pxor xmm5,xmm3
+DB 15,56,201,243
+ movups xmm0,XMMWORD[32+rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,1
+DB 68,15,56,200,212
+ movups xmm1,XMMWORD[48+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,236
+ pxor xmm6,xmm4
+DB 15,56,201,220
+ cmp r11d,11
+ jb NEAR $L$aesenclast12
+ movups xmm0,XMMWORD[64+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+rcx]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast12
+ movups xmm0,XMMWORD[96+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+rcx]
+DB 102,15,56,220,208
+$L$aesenclast12:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+rcx]
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,1
+DB 68,15,56,200,205
+ movups xmm14,XMMWORD[32+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[16+rdi*1+rsi],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,245
+ pxor xmm3,xmm5
+DB 15,56,201,229
+ movups xmm0,XMMWORD[((-64))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,2
+DB 68,15,56,200,214
+ movups xmm1,XMMWORD[((-48))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,222
+ pxor xmm4,xmm6
+DB 15,56,201,238
+ movups xmm0,XMMWORD[((-32))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,2
+DB 68,15,56,200,203
+ movups xmm1,XMMWORD[((-16))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,227
+ pxor xmm5,xmm3
+DB 15,56,201,243
+ movups xmm0,XMMWORD[rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,2
+DB 68,15,56,200,212
+ movups xmm1,XMMWORD[16+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,236
+ pxor xmm6,xmm4
+DB 15,56,201,220
+ movups xmm0,XMMWORD[32+rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,2
+DB 68,15,56,200,205
+ movups xmm1,XMMWORD[48+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,245
+ pxor xmm3,xmm5
+DB 15,56,201,229
+ cmp r11d,11
+ jb NEAR $L$aesenclast13
+ movups xmm0,XMMWORD[64+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+rcx]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast13
+ movups xmm0,XMMWORD[96+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+rcx]
+DB 102,15,56,220,208
+$L$aesenclast13:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+rcx]
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,2
+DB 68,15,56,200,214
+ movups xmm14,XMMWORD[48+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[32+rdi*1+rsi],xmm2
+ xorps xmm2,xmm14
+ movups xmm1,XMMWORD[((-80))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,222
+ pxor xmm4,xmm6
+DB 15,56,201,238
+ movups xmm0,XMMWORD[((-64))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,3
+DB 68,15,56,200,203
+ movups xmm1,XMMWORD[((-48))+rcx]
+DB 102,15,56,220,208
+DB 15,56,202,227
+ pxor xmm5,xmm3
+DB 15,56,201,243
+ movups xmm0,XMMWORD[((-32))+rcx]
+DB 102,15,56,220,209
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,3
+DB 68,15,56,200,212
+DB 15,56,202,236
+ pxor xmm6,xmm4
+ movups xmm1,XMMWORD[((-16))+rcx]
+DB 102,15,56,220,208
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,3
+DB 68,15,56,200,205
+DB 15,56,202,245
+ movups xmm0,XMMWORD[rcx]
+DB 102,15,56,220,209
+ movdqa xmm5,xmm12
+ movdqa xmm10,xmm8
+DB 69,15,58,204,193,3
+DB 68,15,56,200,214
+ movups xmm1,XMMWORD[16+rcx]
+DB 102,15,56,220,208
+ movdqa xmm9,xmm8
+DB 69,15,58,204,194,3
+DB 68,15,56,200,205
+ movups xmm0,XMMWORD[32+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[48+rcx]
+DB 102,15,56,220,208
+ cmp r11d,11
+ jb NEAR $L$aesenclast14
+ movups xmm0,XMMWORD[64+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[80+rcx]
+DB 102,15,56,220,208
+ je NEAR $L$aesenclast14
+ movups xmm0,XMMWORD[96+rcx]
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[112+rcx]
+DB 102,15,56,220,208
+$L$aesenclast14:
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[((16-112))+rcx]
+ dec rdx
+
+ paddd xmm8,xmm11
+ movups XMMWORD[48+rdi*1+rsi],xmm2
+ lea rdi,[64+rdi]
+ jnz NEAR $L$oop_shaext
+
+ pshufd xmm8,xmm8,27
+ pshufd xmm9,xmm9,27
+ movups XMMWORD[r8],xmm2
+ movdqu XMMWORD[r9],xmm8
+ movd DWORD[16+r9],xmm9
+ movaps xmm6,XMMWORD[((-8-160))+rax]
+ movaps xmm7,XMMWORD[((-8-144))+rax]
+ movaps xmm8,XMMWORD[((-8-128))+rax]
+ movaps xmm9,XMMWORD[((-8-112))+rax]
+ movaps xmm10,XMMWORD[((-8-96))+rax]
+ movaps xmm11,XMMWORD[((-8-80))+rax]
+ movaps xmm12,XMMWORD[((-8-64))+rax]
+ movaps xmm13,XMMWORD[((-8-48))+rax]
+ movaps xmm14,XMMWORD[((-8-32))+rax]
+ movaps xmm15,XMMWORD[((-8-16))+rax]
+ mov rsp,rax
+$L$epilogue_shaext:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha1_enc_shaext:
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+ssse3_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$common_seh_tail
+ lea r10,[aesni_cbc_sha1_enc_shaext]
+ cmp rbx,r10
+ jb NEAR $L$seh_no_shaext
+
+ lea rsi,[rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[168+rax]
+ jmp NEAR $L$common_seh_tail
+$L$seh_no_shaext:
+ lea rsi,[96+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[264+rax]
+
+ mov r15,QWORD[rax]
+ mov r14,QWORD[8+rax]
+ mov r13,QWORD[16+rax]
+ mov r12,QWORD[24+rax]
+ mov rbp,QWORD[32+rax]
+ mov rbx,QWORD[40+rax]
+ lea rax,[48+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+$L$common_seh_tail:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$SEH_begin_aesni_cbc_sha1_enc_ssse3 wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha1_enc_ssse3 wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha1_enc_ssse3 wrt ..imagebase
+ DD $L$SEH_begin_aesni_cbc_sha1_enc_avx wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha1_enc_avx wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha1_enc_avx wrt ..imagebase
+ DD $L$SEH_begin_aesni_cbc_sha1_enc_shaext wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha1_enc_shaext wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha1_enc_shaext wrt ..imagebase
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_aesni_cbc_sha1_enc_ssse3:
+DB 9,0,0,0
+ DD ssse3_handler wrt ..imagebase
+ DD $L$prologue_ssse3 wrt ..imagebase,$L$epilogue_ssse3 wrt ..imagebase
+$L$SEH_info_aesni_cbc_sha1_enc_avx:
+DB 9,0,0,0
+ DD ssse3_handler wrt ..imagebase
+ DD $L$prologue_avx wrt ..imagebase,$L$epilogue_avx wrt ..imagebase
+$L$SEH_info_aesni_cbc_sha1_enc_shaext:
+DB 9,0,0,0
+ DD ssse3_handler wrt ..imagebase
+ DD $L$prologue_shaext wrt ..imagebase,$L$epilogue_shaext wrt ..imagebase
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha256-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha256-x86_64.nasm
new file mode 100644
index 0000000000..b2a9c65f5d
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-sha256-x86_64.nasm
@@ -0,0 +1,4708 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+
+EXTERN OPENSSL_ia32cap_P
+global aesni_cbc_sha256_enc
+
+ALIGN 16
+aesni_cbc_sha256_enc:
+
+ lea r11,[OPENSSL_ia32cap_P]
+ mov eax,1
+ cmp rcx,0
+ je NEAR $L$probe
+ mov eax,DWORD[r11]
+ mov r10,QWORD[4+r11]
+ bt r10,61
+ jc NEAR aesni_cbc_sha256_enc_shaext
+ mov r11,r10
+ shr r11,32
+
+ test r10d,2048
+ jnz NEAR aesni_cbc_sha256_enc_xop
+ and r11d,296
+ cmp r11d,296
+ je NEAR aesni_cbc_sha256_enc_avx2
+ and r10d,268435456
+ jnz NEAR aesni_cbc_sha256_enc_avx
+ ud2
+ xor eax,eax
+ cmp rcx,0
+ je NEAR $L$probe
+ ud2
+$L$probe:
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 64
+
+K256:
+ DD 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+ DD 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+ DD 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+ DD 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+ DD 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+ DD 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+ DD 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+ DD 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+ DD 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+ DD 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+ DD 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+ DD 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+ DD 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+ DD 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+ DD 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+ DD 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+ DD 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+ DD 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+ DD 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+ DD 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+ DD 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+ DD 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+ DD 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+ DD 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+ DD 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+ DD 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+ DD 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+ DD 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+ DD 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+ DD 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+ DD 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+ DD 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+ DD 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+ DD 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+ DD 0,0,0,0,0,0,0,0,-1,-1,-1,-1
+ DD 0,0,0,0,0,0,0,0
+DB 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54
+DB 32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95
+DB 54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98
+DB 121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108
+DB 46,111,114,103,62,0
+ALIGN 64
+
+ALIGN 64
+aesni_cbc_sha256_enc_xop:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha256_enc_xop:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+$L$xop_shortcut:
+ mov r10,QWORD[56+rsp]
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ sub rsp,288
+ and rsp,-64
+
+ shl rdx,6
+ sub rsi,rdi
+ sub r10,rdi
+ add rdx,rdi
+
+
+ mov QWORD[((64+8))+rsp],rsi
+ mov QWORD[((64+16))+rsp],rdx
+
+ mov QWORD[((64+32))+rsp],r8
+ mov QWORD[((64+40))+rsp],r9
+ mov QWORD[((64+48))+rsp],r10
+ mov QWORD[120+rsp],rax
+
+ movaps XMMWORD[128+rsp],xmm6
+ movaps XMMWORD[144+rsp],xmm7
+ movaps XMMWORD[160+rsp],xmm8
+ movaps XMMWORD[176+rsp],xmm9
+ movaps XMMWORD[192+rsp],xmm10
+ movaps XMMWORD[208+rsp],xmm11
+ movaps XMMWORD[224+rsp],xmm12
+ movaps XMMWORD[240+rsp],xmm13
+ movaps XMMWORD[256+rsp],xmm14
+ movaps XMMWORD[272+rsp],xmm15
+$L$prologue_xop:
+ vzeroall
+
+ mov r12,rdi
+ lea rdi,[128+rcx]
+ lea r13,[((K256+544))]
+ mov r14d,DWORD[((240-128))+rdi]
+ mov r15,r9
+ mov rsi,r10
+ vmovdqu xmm8,XMMWORD[r8]
+ sub r14,9
+
+ mov eax,DWORD[r15]
+ mov ebx,DWORD[4+r15]
+ mov ecx,DWORD[8+r15]
+ mov edx,DWORD[12+r15]
+ mov r8d,DWORD[16+r15]
+ mov r9d,DWORD[20+r15]
+ mov r10d,DWORD[24+r15]
+ mov r11d,DWORD[28+r15]
+
+ vmovdqa xmm14,XMMWORD[r14*8+r13]
+ vmovdqa xmm13,XMMWORD[16+r14*8+r13]
+ vmovdqa xmm12,XMMWORD[32+r14*8+r13]
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ jmp NEAR $L$loop_xop
+ALIGN 16
+$L$loop_xop:
+ vmovdqa xmm7,XMMWORD[((K256+512))]
+ vmovdqu xmm0,XMMWORD[r12*1+rsi]
+ vmovdqu xmm1,XMMWORD[16+r12*1+rsi]
+ vmovdqu xmm2,XMMWORD[32+r12*1+rsi]
+ vmovdqu xmm3,XMMWORD[48+r12*1+rsi]
+ vpshufb xmm0,xmm0,xmm7
+ lea rbp,[K256]
+ vpshufb xmm1,xmm1,xmm7
+ vpshufb xmm2,xmm2,xmm7
+ vpaddd xmm4,xmm0,XMMWORD[rbp]
+ vpshufb xmm3,xmm3,xmm7
+ vpaddd xmm5,xmm1,XMMWORD[32+rbp]
+ vpaddd xmm6,xmm2,XMMWORD[64+rbp]
+ vpaddd xmm7,xmm3,XMMWORD[96+rbp]
+ vmovdqa XMMWORD[rsp],xmm4
+ mov r14d,eax
+ vmovdqa XMMWORD[16+rsp],xmm5
+ mov esi,ebx
+ vmovdqa XMMWORD[32+rsp],xmm6
+ xor esi,ecx
+ vmovdqa XMMWORD[48+rsp],xmm7
+ mov r13d,r8d
+ jmp NEAR $L$xop_00_47
+
+ALIGN 16
+$L$xop_00_47:
+ sub rbp,-16*2*4
+ vmovdqu xmm9,XMMWORD[r12]
+ mov QWORD[((64+0))+rsp],r12
+ vpalignr xmm4,xmm1,xmm0,4
+ ror r13d,14
+ mov eax,r14d
+ vpalignr xmm7,xmm3,xmm2,4
+ mov r12d,r9d
+ xor r13d,r8d
+DB 143,232,120,194,236,14
+ ror r14d,9
+ xor r12d,r10d
+ vpsrld xmm4,xmm4,3
+ ror r13d,5
+ xor r14d,eax
+ vpaddd xmm0,xmm0,xmm7
+ and r12d,r8d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[rsp]
+ mov r15d,eax
+DB 143,232,120,194,245,11
+ ror r14d,11
+ xor r12d,r10d
+ vpxor xmm4,xmm4,xmm5
+ xor r15d,ebx
+ ror r13d,6
+ add r11d,r12d
+ and esi,r15d
+DB 143,232,120,194,251,13
+ xor r14d,eax
+ add r11d,r13d
+ vpxor xmm4,xmm4,xmm6
+ xor esi,ebx
+ add edx,r11d
+ vpsrld xmm6,xmm3,10
+ ror r14d,2
+ add r11d,esi
+ vpaddd xmm0,xmm0,xmm4
+ mov r13d,edx
+ add r14d,r11d
+DB 143,232,120,194,239,2
+ ror r13d,14
+ mov r11d,r14d
+ vpxor xmm7,xmm7,xmm6
+ mov r12d,r8d
+ xor r13d,edx
+ ror r14d,9
+ xor r12d,r9d
+ vpxor xmm7,xmm7,xmm5
+ ror r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vpxor xmm9,xmm9,xmm8
+ xor r13d,edx
+ vpsrldq xmm7,xmm7,8
+ add r10d,DWORD[4+rsp]
+ mov esi,r11d
+ ror r14d,11
+ xor r12d,r9d
+ vpaddd xmm0,xmm0,xmm7
+ xor esi,eax
+ ror r13d,6
+ add r10d,r12d
+ and r15d,esi
+DB 143,232,120,194,248,13
+ xor r14d,r11d
+ add r10d,r13d
+ vpsrld xmm6,xmm0,10
+ xor r15d,eax
+ add ecx,r10d
+DB 143,232,120,194,239,2
+ ror r14d,2
+ add r10d,r15d
+ vpxor xmm7,xmm7,xmm6
+ mov r13d,ecx
+ add r14d,r10d
+ ror r13d,14
+ mov r10d,r14d
+ vpxor xmm7,xmm7,xmm5
+ mov r12d,edx
+ xor r13d,ecx
+ ror r14d,9
+ xor r12d,r8d
+ vpslldq xmm7,xmm7,8
+ ror r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r13d,ecx
+ vpaddd xmm0,xmm0,xmm7
+ add r9d,DWORD[8+rsp]
+ mov r15d,r10d
+ ror r14d,11
+ xor r12d,r8d
+ vpaddd xmm6,xmm0,XMMWORD[rbp]
+ xor r15d,r11d
+ ror r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ ror r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ ror r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ ror r14d,9
+ xor r12d,edx
+ ror r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[12+rsp]
+ mov esi,r9d
+ ror r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ ror r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ ror r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ vmovdqa XMMWORD[rsp],xmm6
+ vpalignr xmm4,xmm2,xmm1,4
+ ror r13d,14
+ mov r8d,r14d
+ vpalignr xmm7,xmm0,xmm3,4
+ mov r12d,ebx
+ xor r13d,eax
+DB 143,232,120,194,236,14
+ ror r14d,9
+ xor r12d,ecx
+ vpsrld xmm4,xmm4,3
+ ror r13d,5
+ xor r14d,r8d
+ vpaddd xmm1,xmm1,xmm7
+ and r12d,eax
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[16+rsp]
+ mov r15d,r8d
+DB 143,232,120,194,245,11
+ ror r14d,11
+ xor r12d,ecx
+ vpxor xmm4,xmm4,xmm5
+ xor r15d,r9d
+ ror r13d,6
+ add edx,r12d
+ and esi,r15d
+DB 143,232,120,194,248,13
+ xor r14d,r8d
+ add edx,r13d
+ vpxor xmm4,xmm4,xmm6
+ xor esi,r9d
+ add r11d,edx
+ vpsrld xmm6,xmm0,10
+ ror r14d,2
+ add edx,esi
+ vpaddd xmm1,xmm1,xmm4
+ mov r13d,r11d
+ add r14d,edx
+DB 143,232,120,194,239,2
+ ror r13d,14
+ mov edx,r14d
+ vpxor xmm7,xmm7,xmm6
+ mov r12d,eax
+ xor r13d,r11d
+ ror r14d,9
+ xor r12d,ebx
+ vpxor xmm7,xmm7,xmm5
+ ror r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r13d,r11d
+ vpsrldq xmm7,xmm7,8
+ add ecx,DWORD[20+rsp]
+ mov esi,edx
+ ror r14d,11
+ xor r12d,ebx
+ vpaddd xmm1,xmm1,xmm7
+ xor esi,r8d
+ ror r13d,6
+ add ecx,r12d
+ and r15d,esi
+DB 143,232,120,194,249,13
+ xor r14d,edx
+ add ecx,r13d
+ vpsrld xmm6,xmm1,10
+ xor r15d,r8d
+ add r10d,ecx
+DB 143,232,120,194,239,2
+ ror r14d,2
+ add ecx,r15d
+ vpxor xmm7,xmm7,xmm6
+ mov r13d,r10d
+ add r14d,ecx
+ ror r13d,14
+ mov ecx,r14d
+ vpxor xmm7,xmm7,xmm5
+ mov r12d,r11d
+ xor r13d,r10d
+ ror r14d,9
+ xor r12d,eax
+ vpslldq xmm7,xmm7,8
+ ror r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r13d,r10d
+ vpaddd xmm1,xmm1,xmm7
+ add ebx,DWORD[24+rsp]
+ mov r15d,ecx
+ ror r14d,11
+ xor r12d,eax
+ vpaddd xmm6,xmm1,XMMWORD[32+rbp]
+ xor r15d,edx
+ ror r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ ror r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ ror r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ ror r14d,9
+ xor r12d,r11d
+ ror r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[28+rsp]
+ mov esi,ebx
+ ror r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ ror r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ ror r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ vmovdqa XMMWORD[16+rsp],xmm6
+ vpalignr xmm4,xmm3,xmm2,4
+ ror r13d,14
+ mov eax,r14d
+ vpalignr xmm7,xmm1,xmm0,4
+ mov r12d,r9d
+ xor r13d,r8d
+DB 143,232,120,194,236,14
+ ror r14d,9
+ xor r12d,r10d
+ vpsrld xmm4,xmm4,3
+ ror r13d,5
+ xor r14d,eax
+ vpaddd xmm2,xmm2,xmm7
+ and r12d,r8d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[32+rsp]
+ mov r15d,eax
+DB 143,232,120,194,245,11
+ ror r14d,11
+ xor r12d,r10d
+ vpxor xmm4,xmm4,xmm5
+ xor r15d,ebx
+ ror r13d,6
+ add r11d,r12d
+ and esi,r15d
+DB 143,232,120,194,249,13
+ xor r14d,eax
+ add r11d,r13d
+ vpxor xmm4,xmm4,xmm6
+ xor esi,ebx
+ add edx,r11d
+ vpsrld xmm6,xmm1,10
+ ror r14d,2
+ add r11d,esi
+ vpaddd xmm2,xmm2,xmm4
+ mov r13d,edx
+ add r14d,r11d
+DB 143,232,120,194,239,2
+ ror r13d,14
+ mov r11d,r14d
+ vpxor xmm7,xmm7,xmm6
+ mov r12d,r8d
+ xor r13d,edx
+ ror r14d,9
+ xor r12d,r9d
+ vpxor xmm7,xmm7,xmm5
+ ror r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r13d,edx
+ vpsrldq xmm7,xmm7,8
+ add r10d,DWORD[36+rsp]
+ mov esi,r11d
+ ror r14d,11
+ xor r12d,r9d
+ vpaddd xmm2,xmm2,xmm7
+ xor esi,eax
+ ror r13d,6
+ add r10d,r12d
+ and r15d,esi
+DB 143,232,120,194,250,13
+ xor r14d,r11d
+ add r10d,r13d
+ vpsrld xmm6,xmm2,10
+ xor r15d,eax
+ add ecx,r10d
+DB 143,232,120,194,239,2
+ ror r14d,2
+ add r10d,r15d
+ vpxor xmm7,xmm7,xmm6
+ mov r13d,ecx
+ add r14d,r10d
+ ror r13d,14
+ mov r10d,r14d
+ vpxor xmm7,xmm7,xmm5
+ mov r12d,edx
+ xor r13d,ecx
+ ror r14d,9
+ xor r12d,r8d
+ vpslldq xmm7,xmm7,8
+ ror r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r13d,ecx
+ vpaddd xmm2,xmm2,xmm7
+ add r9d,DWORD[40+rsp]
+ mov r15d,r10d
+ ror r14d,11
+ xor r12d,r8d
+ vpaddd xmm6,xmm2,XMMWORD[64+rbp]
+ xor r15d,r11d
+ ror r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ ror r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ ror r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ ror r14d,9
+ xor r12d,edx
+ ror r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[44+rsp]
+ mov esi,r9d
+ ror r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ ror r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ ror r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ vmovdqa XMMWORD[32+rsp],xmm6
+ vpalignr xmm4,xmm0,xmm3,4
+ ror r13d,14
+ mov r8d,r14d
+ vpalignr xmm7,xmm2,xmm1,4
+ mov r12d,ebx
+ xor r13d,eax
+DB 143,232,120,194,236,14
+ ror r14d,9
+ xor r12d,ecx
+ vpsrld xmm4,xmm4,3
+ ror r13d,5
+ xor r14d,r8d
+ vpaddd xmm3,xmm3,xmm7
+ and r12d,eax
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[48+rsp]
+ mov r15d,r8d
+DB 143,232,120,194,245,11
+ ror r14d,11
+ xor r12d,ecx
+ vpxor xmm4,xmm4,xmm5
+ xor r15d,r9d
+ ror r13d,6
+ add edx,r12d
+ and esi,r15d
+DB 143,232,120,194,250,13
+ xor r14d,r8d
+ add edx,r13d
+ vpxor xmm4,xmm4,xmm6
+ xor esi,r9d
+ add r11d,edx
+ vpsrld xmm6,xmm2,10
+ ror r14d,2
+ add edx,esi
+ vpaddd xmm3,xmm3,xmm4
+ mov r13d,r11d
+ add r14d,edx
+DB 143,232,120,194,239,2
+ ror r13d,14
+ mov edx,r14d
+ vpxor xmm7,xmm7,xmm6
+ mov r12d,eax
+ xor r13d,r11d
+ ror r14d,9
+ xor r12d,ebx
+ vpxor xmm7,xmm7,xmm5
+ ror r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r13d,r11d
+ vpsrldq xmm7,xmm7,8
+ add ecx,DWORD[52+rsp]
+ mov esi,edx
+ ror r14d,11
+ xor r12d,ebx
+ vpaddd xmm3,xmm3,xmm7
+ xor esi,r8d
+ ror r13d,6
+ add ecx,r12d
+ and r15d,esi
+DB 143,232,120,194,251,13
+ xor r14d,edx
+ add ecx,r13d
+ vpsrld xmm6,xmm3,10
+ xor r15d,r8d
+ add r10d,ecx
+DB 143,232,120,194,239,2
+ ror r14d,2
+ add ecx,r15d
+ vpxor xmm7,xmm7,xmm6
+ mov r13d,r10d
+ add r14d,ecx
+ ror r13d,14
+ mov ecx,r14d
+ vpxor xmm7,xmm7,xmm5
+ mov r12d,r11d
+ xor r13d,r10d
+ ror r14d,9
+ xor r12d,eax
+ vpslldq xmm7,xmm7,8
+ ror r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r13d,r10d
+ vpaddd xmm3,xmm3,xmm7
+ add ebx,DWORD[56+rsp]
+ mov r15d,ecx
+ ror r14d,11
+ xor r12d,eax
+ vpaddd xmm6,xmm3,XMMWORD[96+rbp]
+ xor r15d,edx
+ ror r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ ror r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ ror r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ ror r14d,9
+ xor r12d,r11d
+ ror r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[60+rsp]
+ mov esi,ebx
+ ror r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ ror r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ ror r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ vmovdqa XMMWORD[48+rsp],xmm6
+ mov r12,QWORD[((64+0))+rsp]
+ vpand xmm11,xmm11,xmm14
+ mov r15,QWORD[((64+8))+rsp]
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r12*1+r15],xmm8
+ lea r12,[16+r12]
+ cmp BYTE[131+rbp],0
+ jne NEAR $L$xop_00_47
+ vmovdqu xmm9,XMMWORD[r12]
+ mov QWORD[((64+0))+rsp],r12
+ ror r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ xor r13d,r8d
+ ror r14d,9
+ xor r12d,r10d
+ ror r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[rsp]
+ mov r15d,eax
+ ror r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ ror r13d,6
+ add r11d,r12d
+ and esi,r15d
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ add edx,r11d
+ ror r14d,2
+ add r11d,esi
+ mov r13d,edx
+ add r14d,r11d
+ ror r13d,14
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ ror r14d,9
+ xor r12d,r9d
+ ror r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vpxor xmm9,xmm9,xmm8
+ xor r13d,edx
+ add r10d,DWORD[4+rsp]
+ mov esi,r11d
+ ror r14d,11
+ xor r12d,r9d
+ xor esi,eax
+ ror r13d,6
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ ror r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ add r14d,r10d
+ ror r13d,14
+ mov r10d,r14d
+ mov r12d,edx
+ xor r13d,ecx
+ ror r14d,9
+ xor r12d,r8d
+ ror r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[8+rsp]
+ mov r15d,r10d
+ ror r14d,11
+ xor r12d,r8d
+ xor r15d,r11d
+ ror r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ ror r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ ror r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ ror r14d,9
+ xor r12d,edx
+ ror r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[12+rsp]
+ mov esi,r9d
+ ror r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ ror r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ ror r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ ror r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ xor r13d,eax
+ ror r14d,9
+ xor r12d,ecx
+ ror r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[16+rsp]
+ mov r15d,r8d
+ ror r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ ror r13d,6
+ add edx,r12d
+ and esi,r15d
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ add r11d,edx
+ ror r14d,2
+ add edx,esi
+ mov r13d,r11d
+ add r14d,edx
+ ror r13d,14
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ ror r14d,9
+ xor r12d,ebx
+ ror r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r13d,r11d
+ add ecx,DWORD[20+rsp]
+ mov esi,edx
+ ror r14d,11
+ xor r12d,ebx
+ xor esi,r8d
+ ror r13d,6
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ ror r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ add r14d,ecx
+ ror r13d,14
+ mov ecx,r14d
+ mov r12d,r11d
+ xor r13d,r10d
+ ror r14d,9
+ xor r12d,eax
+ ror r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[24+rsp]
+ mov r15d,ecx
+ ror r14d,11
+ xor r12d,eax
+ xor r15d,edx
+ ror r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ ror r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ ror r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ ror r14d,9
+ xor r12d,r11d
+ ror r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[28+rsp]
+ mov esi,ebx
+ ror r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ ror r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ ror r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ ror r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ xor r13d,r8d
+ ror r14d,9
+ xor r12d,r10d
+ ror r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[32+rsp]
+ mov r15d,eax
+ ror r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ ror r13d,6
+ add r11d,r12d
+ and esi,r15d
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ add edx,r11d
+ ror r14d,2
+ add r11d,esi
+ mov r13d,edx
+ add r14d,r11d
+ ror r13d,14
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ ror r14d,9
+ xor r12d,r9d
+ ror r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r13d,edx
+ add r10d,DWORD[36+rsp]
+ mov esi,r11d
+ ror r14d,11
+ xor r12d,r9d
+ xor esi,eax
+ ror r13d,6
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ ror r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ add r14d,r10d
+ ror r13d,14
+ mov r10d,r14d
+ mov r12d,edx
+ xor r13d,ecx
+ ror r14d,9
+ xor r12d,r8d
+ ror r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[40+rsp]
+ mov r15d,r10d
+ ror r14d,11
+ xor r12d,r8d
+ xor r15d,r11d
+ ror r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ ror r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ ror r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ ror r14d,9
+ xor r12d,edx
+ ror r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[44+rsp]
+ mov esi,r9d
+ ror r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ ror r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ ror r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ ror r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ xor r13d,eax
+ ror r14d,9
+ xor r12d,ecx
+ ror r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[48+rsp]
+ mov r15d,r8d
+ ror r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ ror r13d,6
+ add edx,r12d
+ and esi,r15d
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ add r11d,edx
+ ror r14d,2
+ add edx,esi
+ mov r13d,r11d
+ add r14d,edx
+ ror r13d,14
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ ror r14d,9
+ xor r12d,ebx
+ ror r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r13d,r11d
+ add ecx,DWORD[52+rsp]
+ mov esi,edx
+ ror r14d,11
+ xor r12d,ebx
+ xor esi,r8d
+ ror r13d,6
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ ror r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ add r14d,ecx
+ ror r13d,14
+ mov ecx,r14d
+ mov r12d,r11d
+ xor r13d,r10d
+ ror r14d,9
+ xor r12d,eax
+ ror r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[56+rsp]
+ mov r15d,ecx
+ ror r14d,11
+ xor r12d,eax
+ xor r15d,edx
+ ror r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ ror r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ ror r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ ror r14d,9
+ xor r12d,r11d
+ ror r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[60+rsp]
+ mov esi,ebx
+ ror r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ ror r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ ror r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ mov r12,QWORD[((64+0))+rsp]
+ mov r13,QWORD[((64+8))+rsp]
+ mov r15,QWORD[((64+40))+rsp]
+ mov rsi,QWORD[((64+48))+rsp]
+
+ vpand xmm11,xmm11,xmm14
+ mov eax,r14d
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r13*1+r12],xmm8
+ lea r12,[16+r12]
+
+ add eax,DWORD[r15]
+ add ebx,DWORD[4+r15]
+ add ecx,DWORD[8+r15]
+ add edx,DWORD[12+r15]
+ add r8d,DWORD[16+r15]
+ add r9d,DWORD[20+r15]
+ add r10d,DWORD[24+r15]
+ add r11d,DWORD[28+r15]
+
+ cmp r12,QWORD[((64+16))+rsp]
+
+ mov DWORD[r15],eax
+ mov DWORD[4+r15],ebx
+ mov DWORD[8+r15],ecx
+ mov DWORD[12+r15],edx
+ mov DWORD[16+r15],r8d
+ mov DWORD[20+r15],r9d
+ mov DWORD[24+r15],r10d
+ mov DWORD[28+r15],r11d
+
+ jb NEAR $L$loop_xop
+
+ mov r8,QWORD[((64+32))+rsp]
+ mov rsi,QWORD[120+rsp]
+
+ vmovdqu XMMWORD[r8],xmm8
+ vzeroall
+ movaps xmm6,XMMWORD[128+rsp]
+ movaps xmm7,XMMWORD[144+rsp]
+ movaps xmm8,XMMWORD[160+rsp]
+ movaps xmm9,XMMWORD[176+rsp]
+ movaps xmm10,XMMWORD[192+rsp]
+ movaps xmm11,XMMWORD[208+rsp]
+ movaps xmm12,XMMWORD[224+rsp]
+ movaps xmm13,XMMWORD[240+rsp]
+ movaps xmm14,XMMWORD[256+rsp]
+ movaps xmm15,XMMWORD[272+rsp]
+ mov r15,QWORD[((-48))+rsi]
+
+ mov r14,QWORD[((-40))+rsi]
+
+ mov r13,QWORD[((-32))+rsi]
+
+ mov r12,QWORD[((-24))+rsi]
+
+ mov rbp,QWORD[((-16))+rsi]
+
+ mov rbx,QWORD[((-8))+rsi]
+
+ lea rsp,[rsi]
+
+$L$epilogue_xop:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha256_enc_xop:
+
+ALIGN 64
+aesni_cbc_sha256_enc_avx:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha256_enc_avx:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+$L$avx_shortcut:
+ mov r10,QWORD[56+rsp]
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ sub rsp,288
+ and rsp,-64
+
+ shl rdx,6
+ sub rsi,rdi
+ sub r10,rdi
+ add rdx,rdi
+
+
+ mov QWORD[((64+8))+rsp],rsi
+ mov QWORD[((64+16))+rsp],rdx
+
+ mov QWORD[((64+32))+rsp],r8
+ mov QWORD[((64+40))+rsp],r9
+ mov QWORD[((64+48))+rsp],r10
+ mov QWORD[120+rsp],rax
+
+ movaps XMMWORD[128+rsp],xmm6
+ movaps XMMWORD[144+rsp],xmm7
+ movaps XMMWORD[160+rsp],xmm8
+ movaps XMMWORD[176+rsp],xmm9
+ movaps XMMWORD[192+rsp],xmm10
+ movaps XMMWORD[208+rsp],xmm11
+ movaps XMMWORD[224+rsp],xmm12
+ movaps XMMWORD[240+rsp],xmm13
+ movaps XMMWORD[256+rsp],xmm14
+ movaps XMMWORD[272+rsp],xmm15
+$L$prologue_avx:
+ vzeroall
+
+ mov r12,rdi
+ lea rdi,[128+rcx]
+ lea r13,[((K256+544))]
+ mov r14d,DWORD[((240-128))+rdi]
+ mov r15,r9
+ mov rsi,r10
+ vmovdqu xmm8,XMMWORD[r8]
+ sub r14,9
+
+ mov eax,DWORD[r15]
+ mov ebx,DWORD[4+r15]
+ mov ecx,DWORD[8+r15]
+ mov edx,DWORD[12+r15]
+ mov r8d,DWORD[16+r15]
+ mov r9d,DWORD[20+r15]
+ mov r10d,DWORD[24+r15]
+ mov r11d,DWORD[28+r15]
+
+ vmovdqa xmm14,XMMWORD[r14*8+r13]
+ vmovdqa xmm13,XMMWORD[16+r14*8+r13]
+ vmovdqa xmm12,XMMWORD[32+r14*8+r13]
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ jmp NEAR $L$loop_avx
+ALIGN 16
+$L$loop_avx:
+ vmovdqa xmm7,XMMWORD[((K256+512))]
+ vmovdqu xmm0,XMMWORD[r12*1+rsi]
+ vmovdqu xmm1,XMMWORD[16+r12*1+rsi]
+ vmovdqu xmm2,XMMWORD[32+r12*1+rsi]
+ vmovdqu xmm3,XMMWORD[48+r12*1+rsi]
+ vpshufb xmm0,xmm0,xmm7
+ lea rbp,[K256]
+ vpshufb xmm1,xmm1,xmm7
+ vpshufb xmm2,xmm2,xmm7
+ vpaddd xmm4,xmm0,XMMWORD[rbp]
+ vpshufb xmm3,xmm3,xmm7
+ vpaddd xmm5,xmm1,XMMWORD[32+rbp]
+ vpaddd xmm6,xmm2,XMMWORD[64+rbp]
+ vpaddd xmm7,xmm3,XMMWORD[96+rbp]
+ vmovdqa XMMWORD[rsp],xmm4
+ mov r14d,eax
+ vmovdqa XMMWORD[16+rsp],xmm5
+ mov esi,ebx
+ vmovdqa XMMWORD[32+rsp],xmm6
+ xor esi,ecx
+ vmovdqa XMMWORD[48+rsp],xmm7
+ mov r13d,r8d
+ jmp NEAR $L$avx_00_47
+
+ALIGN 16
+$L$avx_00_47:
+ sub rbp,-16*2*4
+ vmovdqu xmm9,XMMWORD[r12]
+ mov QWORD[((64+0))+rsp],r12
+ vpalignr xmm4,xmm1,xmm0,4
+ shrd r13d,r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ vpalignr xmm7,xmm3,xmm2,4
+ xor r13d,r8d
+ shrd r14d,r14d,9
+ xor r12d,r10d
+ vpsrld xmm6,xmm4,7
+ shrd r13d,r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vpaddd xmm0,xmm0,xmm7
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[rsp]
+ mov r15d,eax
+ vpsrld xmm7,xmm4,3
+ shrd r14d,r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ vpslld xmm5,xmm4,14
+ shrd r13d,r13d,6
+ add r11d,r12d
+ and esi,r15d
+ vpxor xmm4,xmm7,xmm6
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ vpshufd xmm7,xmm3,250
+ add edx,r11d
+ shrd r14d,r14d,2
+ add r11d,esi
+ vpsrld xmm6,xmm6,11
+ mov r13d,edx
+ add r14d,r11d
+ shrd r13d,r13d,14
+ vpxor xmm4,xmm4,xmm5
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ vpslld xmm5,xmm5,11
+ shrd r14d,r14d,9
+ xor r12d,r9d
+ shrd r13d,r13d,5
+ vpxor xmm4,xmm4,xmm6
+ xor r14d,r11d
+ and r12d,edx
+ vpxor xmm9,xmm9,xmm8
+ xor r13d,edx
+ vpsrld xmm6,xmm7,10
+ add r10d,DWORD[4+rsp]
+ mov esi,r11d
+ shrd r14d,r14d,11
+ vpxor xmm4,xmm4,xmm5
+ xor r12d,r9d
+ xor esi,eax
+ shrd r13d,r13d,6
+ vpsrlq xmm7,xmm7,17
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ vpaddd xmm0,xmm0,xmm4
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ vpxor xmm6,xmm6,xmm7
+ shrd r14d,r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ vpsrlq xmm7,xmm7,2
+ add r14d,r10d
+ shrd r13d,r13d,14
+ mov r10d,r14d
+ vpxor xmm6,xmm6,xmm7
+ mov r12d,edx
+ xor r13d,ecx
+ shrd r14d,r14d,9
+ vpshufd xmm6,xmm6,132
+ xor r12d,r8d
+ shrd r13d,r13d,5
+ xor r14d,r10d
+ vpsrldq xmm6,xmm6,8
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[8+rsp]
+ vpaddd xmm0,xmm0,xmm6
+ mov r15d,r10d
+ shrd r14d,r14d,11
+ xor r12d,r8d
+ vpshufd xmm7,xmm0,80
+ xor r15d,r11d
+ shrd r13d,r13d,6
+ add r9d,r12d
+ vpsrld xmm6,xmm7,10
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ vpsrlq xmm7,xmm7,17
+ xor esi,r11d
+ add ebx,r9d
+ shrd r14d,r14d,2
+ vpxor xmm6,xmm6,xmm7
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ vpsrlq xmm7,xmm7,2
+ shrd r13d,r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ vpxor xmm6,xmm6,xmm7
+ xor r13d,ebx
+ shrd r14d,r14d,9
+ xor r12d,edx
+ vpshufd xmm6,xmm6,232
+ shrd r13d,r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vpslldq xmm6,xmm6,8
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[12+rsp]
+ mov esi,r9d
+ vpaddd xmm0,xmm0,xmm6
+ shrd r14d,r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ vpaddd xmm6,xmm0,XMMWORD[rbp]
+ shrd r13d,r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ shrd r14d,r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ vmovdqa XMMWORD[rsp],xmm6
+ vpalignr xmm4,xmm2,xmm1,4
+ shrd r13d,r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ vpalignr xmm7,xmm0,xmm3,4
+ xor r13d,eax
+ shrd r14d,r14d,9
+ xor r12d,ecx
+ vpsrld xmm6,xmm4,7
+ shrd r13d,r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vpaddd xmm1,xmm1,xmm7
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[16+rsp]
+ mov r15d,r8d
+ vpsrld xmm7,xmm4,3
+ shrd r14d,r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ vpslld xmm5,xmm4,14
+ shrd r13d,r13d,6
+ add edx,r12d
+ and esi,r15d
+ vpxor xmm4,xmm7,xmm6
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ vpshufd xmm7,xmm0,250
+ add r11d,edx
+ shrd r14d,r14d,2
+ add edx,esi
+ vpsrld xmm6,xmm6,11
+ mov r13d,r11d
+ add r14d,edx
+ shrd r13d,r13d,14
+ vpxor xmm4,xmm4,xmm5
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ vpslld xmm5,xmm5,11
+ shrd r14d,r14d,9
+ xor r12d,ebx
+ shrd r13d,r13d,5
+ vpxor xmm4,xmm4,xmm6
+ xor r14d,edx
+ and r12d,r11d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r13d,r11d
+ vpsrld xmm6,xmm7,10
+ add ecx,DWORD[20+rsp]
+ mov esi,edx
+ shrd r14d,r14d,11
+ vpxor xmm4,xmm4,xmm5
+ xor r12d,ebx
+ xor esi,r8d
+ shrd r13d,r13d,6
+ vpsrlq xmm7,xmm7,17
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ vpaddd xmm1,xmm1,xmm4
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ vpxor xmm6,xmm6,xmm7
+ shrd r14d,r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ vpsrlq xmm7,xmm7,2
+ add r14d,ecx
+ shrd r13d,r13d,14
+ mov ecx,r14d
+ vpxor xmm6,xmm6,xmm7
+ mov r12d,r11d
+ xor r13d,r10d
+ shrd r14d,r14d,9
+ vpshufd xmm6,xmm6,132
+ xor r12d,eax
+ shrd r13d,r13d,5
+ xor r14d,ecx
+ vpsrldq xmm6,xmm6,8
+ and r12d,r10d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[24+rsp]
+ vpaddd xmm1,xmm1,xmm6
+ mov r15d,ecx
+ shrd r14d,r14d,11
+ xor r12d,eax
+ vpshufd xmm7,xmm1,80
+ xor r15d,edx
+ shrd r13d,r13d,6
+ add ebx,r12d
+ vpsrld xmm6,xmm7,10
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ vpsrlq xmm7,xmm7,17
+ xor esi,edx
+ add r9d,ebx
+ shrd r14d,r14d,2
+ vpxor xmm6,xmm6,xmm7
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ vpsrlq xmm7,xmm7,2
+ shrd r13d,r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ vpxor xmm6,xmm6,xmm7
+ xor r13d,r9d
+ shrd r14d,r14d,9
+ xor r12d,r11d
+ vpshufd xmm6,xmm6,232
+ shrd r13d,r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vpslldq xmm6,xmm6,8
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[28+rsp]
+ mov esi,ebx
+ vpaddd xmm1,xmm1,xmm6
+ shrd r14d,r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ vpaddd xmm6,xmm1,XMMWORD[32+rbp]
+ shrd r13d,r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ shrd r14d,r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ vmovdqa XMMWORD[16+rsp],xmm6
+ vpalignr xmm4,xmm3,xmm2,4
+ shrd r13d,r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ vpalignr xmm7,xmm1,xmm0,4
+ xor r13d,r8d
+ shrd r14d,r14d,9
+ xor r12d,r10d
+ vpsrld xmm6,xmm4,7
+ shrd r13d,r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vpaddd xmm2,xmm2,xmm7
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[32+rsp]
+ mov r15d,eax
+ vpsrld xmm7,xmm4,3
+ shrd r14d,r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ vpslld xmm5,xmm4,14
+ shrd r13d,r13d,6
+ add r11d,r12d
+ and esi,r15d
+ vpxor xmm4,xmm7,xmm6
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ vpshufd xmm7,xmm1,250
+ add edx,r11d
+ shrd r14d,r14d,2
+ add r11d,esi
+ vpsrld xmm6,xmm6,11
+ mov r13d,edx
+ add r14d,r11d
+ shrd r13d,r13d,14
+ vpxor xmm4,xmm4,xmm5
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ vpslld xmm5,xmm5,11
+ shrd r14d,r14d,9
+ xor r12d,r9d
+ shrd r13d,r13d,5
+ vpxor xmm4,xmm4,xmm6
+ xor r14d,r11d
+ and r12d,edx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r13d,edx
+ vpsrld xmm6,xmm7,10
+ add r10d,DWORD[36+rsp]
+ mov esi,r11d
+ shrd r14d,r14d,11
+ vpxor xmm4,xmm4,xmm5
+ xor r12d,r9d
+ xor esi,eax
+ shrd r13d,r13d,6
+ vpsrlq xmm7,xmm7,17
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ vpaddd xmm2,xmm2,xmm4
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ vpxor xmm6,xmm6,xmm7
+ shrd r14d,r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ vpsrlq xmm7,xmm7,2
+ add r14d,r10d
+ shrd r13d,r13d,14
+ mov r10d,r14d
+ vpxor xmm6,xmm6,xmm7
+ mov r12d,edx
+ xor r13d,ecx
+ shrd r14d,r14d,9
+ vpshufd xmm6,xmm6,132
+ xor r12d,r8d
+ shrd r13d,r13d,5
+ xor r14d,r10d
+ vpsrldq xmm6,xmm6,8
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[40+rsp]
+ vpaddd xmm2,xmm2,xmm6
+ mov r15d,r10d
+ shrd r14d,r14d,11
+ xor r12d,r8d
+ vpshufd xmm7,xmm2,80
+ xor r15d,r11d
+ shrd r13d,r13d,6
+ add r9d,r12d
+ vpsrld xmm6,xmm7,10
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ vpsrlq xmm7,xmm7,17
+ xor esi,r11d
+ add ebx,r9d
+ shrd r14d,r14d,2
+ vpxor xmm6,xmm6,xmm7
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ vpsrlq xmm7,xmm7,2
+ shrd r13d,r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ vpxor xmm6,xmm6,xmm7
+ xor r13d,ebx
+ shrd r14d,r14d,9
+ xor r12d,edx
+ vpshufd xmm6,xmm6,232
+ shrd r13d,r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vpslldq xmm6,xmm6,8
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[44+rsp]
+ mov esi,r9d
+ vpaddd xmm2,xmm2,xmm6
+ shrd r14d,r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ vpaddd xmm6,xmm2,XMMWORD[64+rbp]
+ shrd r13d,r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ shrd r14d,r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ vmovdqa XMMWORD[32+rsp],xmm6
+ vpalignr xmm4,xmm0,xmm3,4
+ shrd r13d,r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ vpalignr xmm7,xmm2,xmm1,4
+ xor r13d,eax
+ shrd r14d,r14d,9
+ xor r12d,ecx
+ vpsrld xmm6,xmm4,7
+ shrd r13d,r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vpaddd xmm3,xmm3,xmm7
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[48+rsp]
+ mov r15d,r8d
+ vpsrld xmm7,xmm4,3
+ shrd r14d,r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ vpslld xmm5,xmm4,14
+ shrd r13d,r13d,6
+ add edx,r12d
+ and esi,r15d
+ vpxor xmm4,xmm7,xmm6
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ vpshufd xmm7,xmm2,250
+ add r11d,edx
+ shrd r14d,r14d,2
+ add edx,esi
+ vpsrld xmm6,xmm6,11
+ mov r13d,r11d
+ add r14d,edx
+ shrd r13d,r13d,14
+ vpxor xmm4,xmm4,xmm5
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ vpslld xmm5,xmm5,11
+ shrd r14d,r14d,9
+ xor r12d,ebx
+ shrd r13d,r13d,5
+ vpxor xmm4,xmm4,xmm6
+ xor r14d,edx
+ and r12d,r11d
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r13d,r11d
+ vpsrld xmm6,xmm7,10
+ add ecx,DWORD[52+rsp]
+ mov esi,edx
+ shrd r14d,r14d,11
+ vpxor xmm4,xmm4,xmm5
+ xor r12d,ebx
+ xor esi,r8d
+ shrd r13d,r13d,6
+ vpsrlq xmm7,xmm7,17
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ vpaddd xmm3,xmm3,xmm4
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ vpxor xmm6,xmm6,xmm7
+ shrd r14d,r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ vpsrlq xmm7,xmm7,2
+ add r14d,ecx
+ shrd r13d,r13d,14
+ mov ecx,r14d
+ vpxor xmm6,xmm6,xmm7
+ mov r12d,r11d
+ xor r13d,r10d
+ shrd r14d,r14d,9
+ vpshufd xmm6,xmm6,132
+ xor r12d,eax
+ shrd r13d,r13d,5
+ xor r14d,ecx
+ vpsrldq xmm6,xmm6,8
+ and r12d,r10d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[56+rsp]
+ vpaddd xmm3,xmm3,xmm6
+ mov r15d,ecx
+ shrd r14d,r14d,11
+ xor r12d,eax
+ vpshufd xmm7,xmm3,80
+ xor r15d,edx
+ shrd r13d,r13d,6
+ add ebx,r12d
+ vpsrld xmm6,xmm7,10
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ vpsrlq xmm7,xmm7,17
+ xor esi,edx
+ add r9d,ebx
+ shrd r14d,r14d,2
+ vpxor xmm6,xmm6,xmm7
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ vpsrlq xmm7,xmm7,2
+ shrd r13d,r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ vpxor xmm6,xmm6,xmm7
+ xor r13d,r9d
+ shrd r14d,r14d,9
+ xor r12d,r11d
+ vpshufd xmm6,xmm6,232
+ shrd r13d,r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vpslldq xmm6,xmm6,8
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[60+rsp]
+ mov esi,ebx
+ vpaddd xmm3,xmm3,xmm6
+ shrd r14d,r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ vpaddd xmm6,xmm3,XMMWORD[96+rbp]
+ shrd r13d,r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ shrd r14d,r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ vmovdqa XMMWORD[48+rsp],xmm6
+ mov r12,QWORD[((64+0))+rsp]
+ vpand xmm11,xmm11,xmm14
+ mov r15,QWORD[((64+8))+rsp]
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r12*1+r15],xmm8
+ lea r12,[16+r12]
+ cmp BYTE[131+rbp],0
+ jne NEAR $L$avx_00_47
+ vmovdqu xmm9,XMMWORD[r12]
+ mov QWORD[((64+0))+rsp],r12
+ shrd r13d,r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ xor r13d,r8d
+ shrd r14d,r14d,9
+ xor r12d,r10d
+ shrd r13d,r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[rsp]
+ mov r15d,eax
+ shrd r14d,r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ shrd r13d,r13d,6
+ add r11d,r12d
+ and esi,r15d
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ add edx,r11d
+ shrd r14d,r14d,2
+ add r11d,esi
+ mov r13d,edx
+ add r14d,r11d
+ shrd r13d,r13d,14
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ shrd r14d,r14d,9
+ xor r12d,r9d
+ shrd r13d,r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vpxor xmm9,xmm9,xmm8
+ xor r13d,edx
+ add r10d,DWORD[4+rsp]
+ mov esi,r11d
+ shrd r14d,r14d,11
+ xor r12d,r9d
+ xor esi,eax
+ shrd r13d,r13d,6
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ shrd r14d,r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ add r14d,r10d
+ shrd r13d,r13d,14
+ mov r10d,r14d
+ mov r12d,edx
+ xor r13d,ecx
+ shrd r14d,r14d,9
+ xor r12d,r8d
+ shrd r13d,r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[8+rsp]
+ mov r15d,r10d
+ shrd r14d,r14d,11
+ xor r12d,r8d
+ xor r15d,r11d
+ shrd r13d,r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ shrd r14d,r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ shrd r13d,r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ shrd r14d,r14d,9
+ xor r12d,edx
+ shrd r13d,r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[12+rsp]
+ mov esi,r9d
+ shrd r14d,r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ shrd r13d,r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ shrd r14d,r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ shrd r13d,r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ xor r13d,eax
+ shrd r14d,r14d,9
+ xor r12d,ecx
+ shrd r13d,r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[16+rsp]
+ mov r15d,r8d
+ shrd r14d,r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ shrd r13d,r13d,6
+ add edx,r12d
+ and esi,r15d
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ add r11d,edx
+ shrd r14d,r14d,2
+ add edx,esi
+ mov r13d,r11d
+ add r14d,edx
+ shrd r13d,r13d,14
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ shrd r14d,r14d,9
+ xor r12d,ebx
+ shrd r13d,r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r13d,r11d
+ add ecx,DWORD[20+rsp]
+ mov esi,edx
+ shrd r14d,r14d,11
+ xor r12d,ebx
+ xor esi,r8d
+ shrd r13d,r13d,6
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ shrd r14d,r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ add r14d,ecx
+ shrd r13d,r13d,14
+ mov ecx,r14d
+ mov r12d,r11d
+ xor r13d,r10d
+ shrd r14d,r14d,9
+ xor r12d,eax
+ shrd r13d,r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[24+rsp]
+ mov r15d,ecx
+ shrd r14d,r14d,11
+ xor r12d,eax
+ xor r15d,edx
+ shrd r13d,r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ shrd r14d,r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ shrd r13d,r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ shrd r14d,r14d,9
+ xor r12d,r11d
+ shrd r13d,r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[28+rsp]
+ mov esi,ebx
+ shrd r14d,r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ shrd r13d,r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ shrd r14d,r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ shrd r13d,r13d,14
+ mov eax,r14d
+ mov r12d,r9d
+ xor r13d,r8d
+ shrd r14d,r14d,9
+ xor r12d,r10d
+ shrd r13d,r13d,5
+ xor r14d,eax
+ and r12d,r8d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r13d,r8d
+ add r11d,DWORD[32+rsp]
+ mov r15d,eax
+ shrd r14d,r14d,11
+ xor r12d,r10d
+ xor r15d,ebx
+ shrd r13d,r13d,6
+ add r11d,r12d
+ and esi,r15d
+ xor r14d,eax
+ add r11d,r13d
+ xor esi,ebx
+ add edx,r11d
+ shrd r14d,r14d,2
+ add r11d,esi
+ mov r13d,edx
+ add r14d,r11d
+ shrd r13d,r13d,14
+ mov r11d,r14d
+ mov r12d,r8d
+ xor r13d,edx
+ shrd r14d,r14d,9
+ xor r12d,r9d
+ shrd r13d,r13d,5
+ xor r14d,r11d
+ and r12d,edx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r13d,edx
+ add r10d,DWORD[36+rsp]
+ mov esi,r11d
+ shrd r14d,r14d,11
+ xor r12d,r9d
+ xor esi,eax
+ shrd r13d,r13d,6
+ add r10d,r12d
+ and r15d,esi
+ xor r14d,r11d
+ add r10d,r13d
+ xor r15d,eax
+ add ecx,r10d
+ shrd r14d,r14d,2
+ add r10d,r15d
+ mov r13d,ecx
+ add r14d,r10d
+ shrd r13d,r13d,14
+ mov r10d,r14d
+ mov r12d,edx
+ xor r13d,ecx
+ shrd r14d,r14d,9
+ xor r12d,r8d
+ shrd r13d,r13d,5
+ xor r14d,r10d
+ and r12d,ecx
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r13d,ecx
+ add r9d,DWORD[40+rsp]
+ mov r15d,r10d
+ shrd r14d,r14d,11
+ xor r12d,r8d
+ xor r15d,r11d
+ shrd r13d,r13d,6
+ add r9d,r12d
+ and esi,r15d
+ xor r14d,r10d
+ add r9d,r13d
+ xor esi,r11d
+ add ebx,r9d
+ shrd r14d,r14d,2
+ add r9d,esi
+ mov r13d,ebx
+ add r14d,r9d
+ shrd r13d,r13d,14
+ mov r9d,r14d
+ mov r12d,ecx
+ xor r13d,ebx
+ shrd r14d,r14d,9
+ xor r12d,edx
+ shrd r13d,r13d,5
+ xor r14d,r9d
+ and r12d,ebx
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r13d,ebx
+ add r8d,DWORD[44+rsp]
+ mov esi,r9d
+ shrd r14d,r14d,11
+ xor r12d,edx
+ xor esi,r10d
+ shrd r13d,r13d,6
+ add r8d,r12d
+ and r15d,esi
+ xor r14d,r9d
+ add r8d,r13d
+ xor r15d,r10d
+ add eax,r8d
+ shrd r14d,r14d,2
+ add r8d,r15d
+ mov r13d,eax
+ add r14d,r8d
+ shrd r13d,r13d,14
+ mov r8d,r14d
+ mov r12d,ebx
+ xor r13d,eax
+ shrd r14d,r14d,9
+ xor r12d,ecx
+ shrd r13d,r13d,5
+ xor r14d,r8d
+ and r12d,eax
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r13d,eax
+ add edx,DWORD[48+rsp]
+ mov r15d,r8d
+ shrd r14d,r14d,11
+ xor r12d,ecx
+ xor r15d,r9d
+ shrd r13d,r13d,6
+ add edx,r12d
+ and esi,r15d
+ xor r14d,r8d
+ add edx,r13d
+ xor esi,r9d
+ add r11d,edx
+ shrd r14d,r14d,2
+ add edx,esi
+ mov r13d,r11d
+ add r14d,edx
+ shrd r13d,r13d,14
+ mov edx,r14d
+ mov r12d,eax
+ xor r13d,r11d
+ shrd r14d,r14d,9
+ xor r12d,ebx
+ shrd r13d,r13d,5
+ xor r14d,edx
+ and r12d,r11d
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r13d,r11d
+ add ecx,DWORD[52+rsp]
+ mov esi,edx
+ shrd r14d,r14d,11
+ xor r12d,ebx
+ xor esi,r8d
+ shrd r13d,r13d,6
+ add ecx,r12d
+ and r15d,esi
+ xor r14d,edx
+ add ecx,r13d
+ xor r15d,r8d
+ add r10d,ecx
+ shrd r14d,r14d,2
+ add ecx,r15d
+ mov r13d,r10d
+ add r14d,ecx
+ shrd r13d,r13d,14
+ mov ecx,r14d
+ mov r12d,r11d
+ xor r13d,r10d
+ shrd r14d,r14d,9
+ xor r12d,eax
+ shrd r13d,r13d,5
+ xor r14d,ecx
+ and r12d,r10d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r13d,r10d
+ add ebx,DWORD[56+rsp]
+ mov r15d,ecx
+ shrd r14d,r14d,11
+ xor r12d,eax
+ xor r15d,edx
+ shrd r13d,r13d,6
+ add ebx,r12d
+ and esi,r15d
+ xor r14d,ecx
+ add ebx,r13d
+ xor esi,edx
+ add r9d,ebx
+ shrd r14d,r14d,2
+ add ebx,esi
+ mov r13d,r9d
+ add r14d,ebx
+ shrd r13d,r13d,14
+ mov ebx,r14d
+ mov r12d,r10d
+ xor r13d,r9d
+ shrd r14d,r14d,9
+ xor r12d,r11d
+ shrd r13d,r13d,5
+ xor r14d,ebx
+ and r12d,r9d
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r13d,r9d
+ add eax,DWORD[60+rsp]
+ mov esi,ebx
+ shrd r14d,r14d,11
+ xor r12d,r11d
+ xor esi,ecx
+ shrd r13d,r13d,6
+ add eax,r12d
+ and r15d,esi
+ xor r14d,ebx
+ add eax,r13d
+ xor r15d,ecx
+ add r8d,eax
+ shrd r14d,r14d,2
+ add eax,r15d
+ mov r13d,r8d
+ add r14d,eax
+ mov r12,QWORD[((64+0))+rsp]
+ mov r13,QWORD[((64+8))+rsp]
+ mov r15,QWORD[((64+40))+rsp]
+ mov rsi,QWORD[((64+48))+rsp]
+
+ vpand xmm11,xmm11,xmm14
+ mov eax,r14d
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r13*1+r12],xmm8
+ lea r12,[16+r12]
+
+ add eax,DWORD[r15]
+ add ebx,DWORD[4+r15]
+ add ecx,DWORD[8+r15]
+ add edx,DWORD[12+r15]
+ add r8d,DWORD[16+r15]
+ add r9d,DWORD[20+r15]
+ add r10d,DWORD[24+r15]
+ add r11d,DWORD[28+r15]
+
+ cmp r12,QWORD[((64+16))+rsp]
+
+ mov DWORD[r15],eax
+ mov DWORD[4+r15],ebx
+ mov DWORD[8+r15],ecx
+ mov DWORD[12+r15],edx
+ mov DWORD[16+r15],r8d
+ mov DWORD[20+r15],r9d
+ mov DWORD[24+r15],r10d
+ mov DWORD[28+r15],r11d
+ jb NEAR $L$loop_avx
+
+ mov r8,QWORD[((64+32))+rsp]
+ mov rsi,QWORD[120+rsp]
+
+ vmovdqu XMMWORD[r8],xmm8
+ vzeroall
+ movaps xmm6,XMMWORD[128+rsp]
+ movaps xmm7,XMMWORD[144+rsp]
+ movaps xmm8,XMMWORD[160+rsp]
+ movaps xmm9,XMMWORD[176+rsp]
+ movaps xmm10,XMMWORD[192+rsp]
+ movaps xmm11,XMMWORD[208+rsp]
+ movaps xmm12,XMMWORD[224+rsp]
+ movaps xmm13,XMMWORD[240+rsp]
+ movaps xmm14,XMMWORD[256+rsp]
+ movaps xmm15,XMMWORD[272+rsp]
+ mov r15,QWORD[((-48))+rsi]
+
+ mov r14,QWORD[((-40))+rsi]
+
+ mov r13,QWORD[((-32))+rsi]
+
+ mov r12,QWORD[((-24))+rsi]
+
+ mov rbp,QWORD[((-16))+rsi]
+
+ mov rbx,QWORD[((-8))+rsi]
+
+ lea rsp,[rsi]
+
+$L$epilogue_avx:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha256_enc_avx:
+
+ALIGN 64
+aesni_cbc_sha256_enc_avx2:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha256_enc_avx2:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+$L$avx2_shortcut:
+ mov r10,QWORD[56+rsp]
+ mov rax,rsp
+
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ sub rsp,736
+ and rsp,-256*4
+ add rsp,448
+
+ shl rdx,6
+ sub rsi,rdi
+ sub r10,rdi
+ add rdx,rdi
+
+
+
+ mov QWORD[((64+16))+rsp],rdx
+
+ mov QWORD[((64+32))+rsp],r8
+ mov QWORD[((64+40))+rsp],r9
+ mov QWORD[((64+48))+rsp],r10
+ mov QWORD[120+rsp],rax
+
+ movaps XMMWORD[128+rsp],xmm6
+ movaps XMMWORD[144+rsp],xmm7
+ movaps XMMWORD[160+rsp],xmm8
+ movaps XMMWORD[176+rsp],xmm9
+ movaps XMMWORD[192+rsp],xmm10
+ movaps XMMWORD[208+rsp],xmm11
+ movaps XMMWORD[224+rsp],xmm12
+ movaps XMMWORD[240+rsp],xmm13
+ movaps XMMWORD[256+rsp],xmm14
+ movaps XMMWORD[272+rsp],xmm15
+$L$prologue_avx2:
+ vzeroall
+
+ mov r13,rdi
+ vpinsrq xmm15,xmm15,rsi,1
+ lea rdi,[128+rcx]
+ lea r12,[((K256+544))]
+ mov r14d,DWORD[((240-128))+rdi]
+ mov r15,r9
+ mov rsi,r10
+ vmovdqu xmm8,XMMWORD[r8]
+ lea r14,[((-9))+r14]
+
+ vmovdqa xmm14,XMMWORD[r14*8+r12]
+ vmovdqa xmm13,XMMWORD[16+r14*8+r12]
+ vmovdqa xmm12,XMMWORD[32+r14*8+r12]
+
+ sub r13,-16*4
+ mov eax,DWORD[r15]
+ lea r12,[r13*1+rsi]
+ mov ebx,DWORD[4+r15]
+ cmp r13,rdx
+ mov ecx,DWORD[8+r15]
+ cmove r12,rsp
+ mov edx,DWORD[12+r15]
+ mov r8d,DWORD[16+r15]
+ mov r9d,DWORD[20+r15]
+ mov r10d,DWORD[24+r15]
+ mov r11d,DWORD[28+r15]
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ jmp NEAR $L$oop_avx2
+ALIGN 16
+$L$oop_avx2:
+ vmovdqa ymm7,YMMWORD[((K256+512))]
+ vmovdqu xmm0,XMMWORD[((-64+0))+r13*1+rsi]
+ vmovdqu xmm1,XMMWORD[((-64+16))+r13*1+rsi]
+ vmovdqu xmm2,XMMWORD[((-64+32))+r13*1+rsi]
+ vmovdqu xmm3,XMMWORD[((-64+48))+r13*1+rsi]
+
+ vinserti128 ymm0,ymm0,XMMWORD[r12],1
+ vinserti128 ymm1,ymm1,XMMWORD[16+r12],1
+ vpshufb ymm0,ymm0,ymm7
+ vinserti128 ymm2,ymm2,XMMWORD[32+r12],1
+ vpshufb ymm1,ymm1,ymm7
+ vinserti128 ymm3,ymm3,XMMWORD[48+r12],1
+
+ lea rbp,[K256]
+ vpshufb ymm2,ymm2,ymm7
+ lea r13,[((-64))+r13]
+ vpaddd ymm4,ymm0,YMMWORD[rbp]
+ vpshufb ymm3,ymm3,ymm7
+ vpaddd ymm5,ymm1,YMMWORD[32+rbp]
+ vpaddd ymm6,ymm2,YMMWORD[64+rbp]
+ vpaddd ymm7,ymm3,YMMWORD[96+rbp]
+ vmovdqa YMMWORD[rsp],ymm4
+ xor r14d,r14d
+ vmovdqa YMMWORD[32+rsp],ymm5
+ lea rsp,[((-64))+rsp]
+ mov esi,ebx
+ vmovdqa YMMWORD[rsp],ymm6
+ xor esi,ecx
+ vmovdqa YMMWORD[32+rsp],ymm7
+ mov r12d,r9d
+ sub rbp,-16*2*4
+ jmp NEAR $L$avx2_00_47
+
+ALIGN 16
+$L$avx2_00_47:
+ vmovdqu xmm9,XMMWORD[r13]
+ vpinsrq xmm15,xmm15,r13,0
+ lea rsp,[((-64))+rsp]
+ vpalignr ymm4,ymm1,ymm0,4
+ add r11d,DWORD[((0+128))+rsp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ vpalignr ymm7,ymm3,ymm2,4
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ vpsrld ymm6,ymm4,7
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ vpaddd ymm0,ymm0,ymm7
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ vpsrld ymm7,ymm4,3
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ vpslld ymm5,ymm4,14
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ vpxor ymm4,ymm7,ymm6
+ and esi,r15d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ vpshufd ymm7,ymm3,250
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ vpsrld ymm6,ymm6,11
+ add r10d,DWORD[((4+128))+rsp]
+ and r12d,edx
+ rorx r13d,edx,25
+ vpxor ymm4,ymm4,ymm5
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ vpslld ymm5,ymm5,11
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ vpxor ymm4,ymm4,ymm6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ vpsrld ymm6,ymm7,10
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ vpxor ymm4,ymm4,ymm5
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ vpsrlq ymm7,ymm7,17
+ and r15d,esi
+ vpxor xmm9,xmm9,xmm8
+ xor r14d,r12d
+ xor r15d,eax
+ vpaddd ymm0,ymm0,ymm4
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ vpxor ymm6,ymm6,ymm7
+ add r9d,DWORD[((8+128))+rsp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ vpsrlq ymm7,ymm7,2
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ vpxor ymm6,ymm6,ymm7
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ vpshufd ymm6,ymm6,132
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ vpsrldq ymm6,ymm6,8
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ vpaddd ymm0,ymm0,ymm6
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ vpshufd ymm7,ymm0,80
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ vpsrld ymm6,ymm7,10
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ vpsrlq ymm7,ymm7,17
+ add r8d,DWORD[((12+128))+rsp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ vpxor ymm6,ymm6,ymm7
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ vpsrlq ymm7,ymm7,2
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ vpxor ymm6,ymm6,ymm7
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ vpshufd ymm6,ymm6,232
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ vpslldq ymm6,ymm6,8
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ vpaddd ymm0,ymm0,ymm6
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ vpaddd ymm6,ymm0,YMMWORD[rbp]
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ vmovdqa YMMWORD[rsp],ymm6
+ vpalignr ymm4,ymm2,ymm1,4
+ add edx,DWORD[((32+128))+rsp]
+ and r12d,eax
+ rorx r13d,eax,25
+ vpalignr ymm7,ymm0,ymm3,4
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ vpsrld ymm6,ymm4,7
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ vpaddd ymm1,ymm1,ymm7
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ vpsrld ymm7,ymm4,3
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ vpslld ymm5,ymm4,14
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ vpxor ymm4,ymm7,ymm6
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ vpshufd ymm7,ymm0,250
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ vpsrld ymm6,ymm6,11
+ add ecx,DWORD[((36+128))+rsp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ vpxor ymm4,ymm4,ymm5
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ vpslld ymm5,ymm5,11
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ vpxor ymm4,ymm4,ymm6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ vpsrld ymm6,ymm7,10
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ vpxor ymm4,ymm4,ymm5
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ vpsrlq ymm7,ymm7,17
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ vpaddd ymm1,ymm1,ymm4
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ vpxor ymm6,ymm6,ymm7
+ add ebx,DWORD[((40+128))+rsp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ vpsrlq ymm7,ymm7,2
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ vpxor ymm6,ymm6,ymm7
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ vpshufd ymm6,ymm6,132
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ vpsrldq ymm6,ymm6,8
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ vpaddd ymm1,ymm1,ymm6
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ vpshufd ymm7,ymm1,80
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ vpsrld ymm6,ymm7,10
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ vpsrlq ymm7,ymm7,17
+ add eax,DWORD[((44+128))+rsp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ vpxor ymm6,ymm6,ymm7
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ vpsrlq ymm7,ymm7,2
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ vpxor ymm6,ymm6,ymm7
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ vpshufd ymm6,ymm6,232
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ vpslldq ymm6,ymm6,8
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ vpaddd ymm1,ymm1,ymm6
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ vpaddd ymm6,ymm1,YMMWORD[32+rbp]
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ vmovdqa YMMWORD[32+rsp],ymm6
+ lea rsp,[((-64))+rsp]
+ vpalignr ymm4,ymm3,ymm2,4
+ add r11d,DWORD[((0+128))+rsp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ vpalignr ymm7,ymm1,ymm0,4
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ vpsrld ymm6,ymm4,7
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ vpaddd ymm2,ymm2,ymm7
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ vpsrld ymm7,ymm4,3
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ vpslld ymm5,ymm4,14
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ vpxor ymm4,ymm7,ymm6
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ vpshufd ymm7,ymm1,250
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ vpsrld ymm6,ymm6,11
+ add r10d,DWORD[((4+128))+rsp]
+ and r12d,edx
+ rorx r13d,edx,25
+ vpxor ymm4,ymm4,ymm5
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ vpslld ymm5,ymm5,11
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ vpxor ymm4,ymm4,ymm6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ vpsrld ymm6,ymm7,10
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ vpxor ymm4,ymm4,ymm5
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ vpsrlq ymm7,ymm7,17
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r14d,r12d
+ xor r15d,eax
+ vpaddd ymm2,ymm2,ymm4
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ vpxor ymm6,ymm6,ymm7
+ add r9d,DWORD[((8+128))+rsp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ vpsrlq ymm7,ymm7,2
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ vpxor ymm6,ymm6,ymm7
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ vpshufd ymm6,ymm6,132
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ vpsrldq ymm6,ymm6,8
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ vpaddd ymm2,ymm2,ymm6
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ vpshufd ymm7,ymm2,80
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ vpsrld ymm6,ymm7,10
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ vpsrlq ymm7,ymm7,17
+ add r8d,DWORD[((12+128))+rsp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ vpxor ymm6,ymm6,ymm7
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ vpsrlq ymm7,ymm7,2
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ vpxor ymm6,ymm6,ymm7
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ vpshufd ymm6,ymm6,232
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ vpslldq ymm6,ymm6,8
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ vpaddd ymm2,ymm2,ymm6
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ vpaddd ymm6,ymm2,YMMWORD[64+rbp]
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ vmovdqa YMMWORD[rsp],ymm6
+ vpalignr ymm4,ymm0,ymm3,4
+ add edx,DWORD[((32+128))+rsp]
+ and r12d,eax
+ rorx r13d,eax,25
+ vpalignr ymm7,ymm2,ymm1,4
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ vpsrld ymm6,ymm4,7
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ vpaddd ymm3,ymm3,ymm7
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ vpsrld ymm7,ymm4,3
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ vpslld ymm5,ymm4,14
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ vpxor ymm4,ymm7,ymm6
+ and esi,r15d
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ vpshufd ymm7,ymm2,250
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ vpsrld ymm6,ymm6,11
+ add ecx,DWORD[((36+128))+rsp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ vpxor ymm4,ymm4,ymm5
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ vpslld ymm5,ymm5,11
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ vpxor ymm4,ymm4,ymm6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ vpsrld ymm6,ymm7,10
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ vpxor ymm4,ymm4,ymm5
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ vpsrlq ymm7,ymm7,17
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ vpaddd ymm3,ymm3,ymm4
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ vpxor ymm6,ymm6,ymm7
+ add ebx,DWORD[((40+128))+rsp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ vpsrlq ymm7,ymm7,2
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ vpxor ymm6,ymm6,ymm7
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ vpshufd ymm6,ymm6,132
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ vpsrldq ymm6,ymm6,8
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ vpaddd ymm3,ymm3,ymm6
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ vpshufd ymm7,ymm3,80
+ and esi,r15d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ vpsrld ymm6,ymm7,10
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ vpsrlq ymm7,ymm7,17
+ add eax,DWORD[((44+128))+rsp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ vpxor ymm6,ymm6,ymm7
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ vpsrlq ymm7,ymm7,2
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ vpxor ymm6,ymm6,ymm7
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ vpshufd ymm6,ymm6,232
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ vpslldq ymm6,ymm6,8
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ vpaddd ymm3,ymm3,ymm6
+ and r15d,esi
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ vpaddd ymm6,ymm3,YMMWORD[96+rbp]
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ vmovdqa YMMWORD[32+rsp],ymm6
+ vmovq r13,xmm15
+ vpextrq r15,xmm15,1
+ vpand xmm11,xmm11,xmm14
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r13*1+r15],xmm8
+ lea r13,[16+r13]
+ lea rbp,[128+rbp]
+ cmp BYTE[3+rbp],0
+ jne NEAR $L$avx2_00_47
+ vmovdqu xmm9,XMMWORD[r13]
+ vpinsrq xmm15,xmm15,r13,0
+ add r11d,DWORD[((0+64))+rsp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ and esi,r15d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ add r10d,DWORD[((4+64))+rsp]
+ and r12d,edx
+ rorx r13d,edx,25
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ and r15d,esi
+ vpxor xmm9,xmm9,xmm8
+ xor r14d,r12d
+ xor r15d,eax
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ add r9d,DWORD[((8+64))+rsp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ add r8d,DWORD[((12+64))+rsp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ add edx,DWORD[((32+64))+rsp]
+ and r12d,eax
+ rorx r13d,eax,25
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ add ecx,DWORD[((36+64))+rsp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ add ebx,DWORD[((40+64))+rsp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ add eax,DWORD[((44+64))+rsp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ add r11d,DWORD[rsp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ add r10d,DWORD[4+rsp]
+ and r12d,edx
+ rorx r13d,edx,25
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r14d,r12d
+ xor r15d,eax
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ add r9d,DWORD[8+rsp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ add r8d,DWORD[12+rsp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ add edx,DWORD[32+rsp]
+ and r12d,eax
+ rorx r13d,eax,25
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ and esi,r15d
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ add ecx,DWORD[36+rsp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ add ebx,DWORD[40+rsp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ and esi,r15d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ add eax,DWORD[44+rsp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ and r15d,esi
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ vpextrq r12,xmm15,1
+ vmovq r13,xmm15
+ mov r15,QWORD[552+rsp]
+ add eax,r14d
+ lea rbp,[448+rsp]
+
+ vpand xmm11,xmm11,xmm14
+ vpor xmm8,xmm8,xmm11
+ vmovdqu XMMWORD[r13*1+r12],xmm8
+ lea r13,[16+r13]
+
+ add eax,DWORD[r15]
+ add ebx,DWORD[4+r15]
+ add ecx,DWORD[8+r15]
+ add edx,DWORD[12+r15]
+ add r8d,DWORD[16+r15]
+ add r9d,DWORD[20+r15]
+ add r10d,DWORD[24+r15]
+ add r11d,DWORD[28+r15]
+
+ mov DWORD[r15],eax
+ mov DWORD[4+r15],ebx
+ mov DWORD[8+r15],ecx
+ mov DWORD[12+r15],edx
+ mov DWORD[16+r15],r8d
+ mov DWORD[20+r15],r9d
+ mov DWORD[24+r15],r10d
+ mov DWORD[28+r15],r11d
+
+ cmp r13,QWORD[80+rbp]
+ je NEAR $L$done_avx2
+
+ xor r14d,r14d
+ mov esi,ebx
+ mov r12d,r9d
+ xor esi,ecx
+ jmp NEAR $L$ower_avx2
+ALIGN 16
+$L$ower_avx2:
+ vmovdqu xmm9,XMMWORD[r13]
+ vpinsrq xmm15,xmm15,r13,0
+ add r11d,DWORD[((0+16))+rbp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ and esi,r15d
+ vpxor xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((16-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ add r10d,DWORD[((4+16))+rbp]
+ and r12d,edx
+ rorx r13d,edx,25
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ and r15d,esi
+ vpxor xmm9,xmm9,xmm8
+ xor r14d,r12d
+ xor r15d,eax
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ add r9d,DWORD[((8+16))+rbp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((32-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ add r8d,DWORD[((12+16))+rbp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((48-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ add edx,DWORD[((32+16))+rbp]
+ and r12d,eax
+ rorx r13d,eax,25
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((64-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ add ecx,DWORD[((36+16))+rbp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((80-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ add ebx,DWORD[((40+16))+rbp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((96-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ add eax,DWORD[((44+16))+rbp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((112-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ lea rbp,[((-64))+rbp]
+ add r11d,DWORD[((0+16))+rbp]
+ and r12d,r8d
+ rorx r13d,r8d,25
+ rorx r15d,r8d,11
+ lea eax,[r14*1+rax]
+ lea r11d,[r12*1+r11]
+ andn r12d,r8d,r10d
+ xor r13d,r15d
+ rorx r14d,r8d,6
+ lea r11d,[r12*1+r11]
+ xor r13d,r14d
+ mov r15d,eax
+ rorx r12d,eax,22
+ lea r11d,[r13*1+r11]
+ xor r15d,ebx
+ rorx r14d,eax,13
+ rorx r13d,eax,2
+ lea edx,[r11*1+rdx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((128-128))+rdi]
+ xor r14d,r12d
+ xor esi,ebx
+ xor r14d,r13d
+ lea r11d,[rsi*1+r11]
+ mov r12d,r8d
+ add r10d,DWORD[((4+16))+rbp]
+ and r12d,edx
+ rorx r13d,edx,25
+ rorx esi,edx,11
+ lea r11d,[r14*1+r11]
+ lea r10d,[r12*1+r10]
+ andn r12d,edx,r9d
+ xor r13d,esi
+ rorx r14d,edx,6
+ lea r10d,[r12*1+r10]
+ xor r13d,r14d
+ mov esi,r11d
+ rorx r12d,r11d,22
+ lea r10d,[r13*1+r10]
+ xor esi,eax
+ rorx r14d,r11d,13
+ rorx r13d,r11d,2
+ lea ecx,[r10*1+rcx]
+ and r15d,esi
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((144-128))+rdi]
+ xor r14d,r12d
+ xor r15d,eax
+ xor r14d,r13d
+ lea r10d,[r15*1+r10]
+ mov r12d,edx
+ add r9d,DWORD[((8+16))+rbp]
+ and r12d,ecx
+ rorx r13d,ecx,25
+ rorx r15d,ecx,11
+ lea r10d,[r14*1+r10]
+ lea r9d,[r12*1+r9]
+ andn r12d,ecx,r8d
+ xor r13d,r15d
+ rorx r14d,ecx,6
+ lea r9d,[r12*1+r9]
+ xor r13d,r14d
+ mov r15d,r10d
+ rorx r12d,r10d,22
+ lea r9d,[r13*1+r9]
+ xor r15d,r11d
+ rorx r14d,r10d,13
+ rorx r13d,r10d,2
+ lea ebx,[r9*1+rbx]
+ and esi,r15d
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((160-128))+rdi]
+ xor r14d,r12d
+ xor esi,r11d
+ xor r14d,r13d
+ lea r9d,[rsi*1+r9]
+ mov r12d,ecx
+ add r8d,DWORD[((12+16))+rbp]
+ and r12d,ebx
+ rorx r13d,ebx,25
+ rorx esi,ebx,11
+ lea r9d,[r14*1+r9]
+ lea r8d,[r12*1+r8]
+ andn r12d,ebx,edx
+ xor r13d,esi
+ rorx r14d,ebx,6
+ lea r8d,[r12*1+r8]
+ xor r13d,r14d
+ mov esi,r9d
+ rorx r12d,r9d,22
+ lea r8d,[r13*1+r8]
+ xor esi,r10d
+ rorx r14d,r9d,13
+ rorx r13d,r9d,2
+ lea eax,[r8*1+rax]
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((176-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r10d
+ xor r14d,r13d
+ lea r8d,[r15*1+r8]
+ mov r12d,ebx
+ add edx,DWORD[((32+16))+rbp]
+ and r12d,eax
+ rorx r13d,eax,25
+ rorx r15d,eax,11
+ lea r8d,[r14*1+r8]
+ lea edx,[r12*1+rdx]
+ andn r12d,eax,ecx
+ xor r13d,r15d
+ rorx r14d,eax,6
+ lea edx,[r12*1+rdx]
+ xor r13d,r14d
+ mov r15d,r8d
+ rorx r12d,r8d,22
+ lea edx,[r13*1+rdx]
+ xor r15d,r9d
+ rorx r14d,r8d,13
+ rorx r13d,r8d,2
+ lea r11d,[rdx*1+r11]
+ and esi,r15d
+ vpand xmm8,xmm11,xmm12
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((192-128))+rdi]
+ xor r14d,r12d
+ xor esi,r9d
+ xor r14d,r13d
+ lea edx,[rsi*1+rdx]
+ mov r12d,eax
+ add ecx,DWORD[((36+16))+rbp]
+ and r12d,r11d
+ rorx r13d,r11d,25
+ rorx esi,r11d,11
+ lea edx,[r14*1+rdx]
+ lea ecx,[r12*1+rcx]
+ andn r12d,r11d,ebx
+ xor r13d,esi
+ rorx r14d,r11d,6
+ lea ecx,[r12*1+rcx]
+ xor r13d,r14d
+ mov esi,edx
+ rorx r12d,edx,22
+ lea ecx,[r13*1+rcx]
+ xor esi,r8d
+ rorx r14d,edx,13
+ rorx r13d,edx,2
+ lea r10d,[rcx*1+r10]
+ and r15d,esi
+ vaesenclast xmm11,xmm9,xmm10
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((208-128))+rdi]
+ xor r14d,r12d
+ xor r15d,r8d
+ xor r14d,r13d
+ lea ecx,[r15*1+rcx]
+ mov r12d,r11d
+ add ebx,DWORD[((40+16))+rbp]
+ and r12d,r10d
+ rorx r13d,r10d,25
+ rorx r15d,r10d,11
+ lea ecx,[r14*1+rcx]
+ lea ebx,[r12*1+rbx]
+ andn r12d,r10d,eax
+ xor r13d,r15d
+ rorx r14d,r10d,6
+ lea ebx,[r12*1+rbx]
+ xor r13d,r14d
+ mov r15d,ecx
+ rorx r12d,ecx,22
+ lea ebx,[r13*1+rbx]
+ xor r15d,edx
+ rorx r14d,ecx,13
+ rorx r13d,ecx,2
+ lea r9d,[rbx*1+r9]
+ and esi,r15d
+ vpand xmm11,xmm11,xmm13
+ vaesenc xmm9,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((224-128))+rdi]
+ xor r14d,r12d
+ xor esi,edx
+ xor r14d,r13d
+ lea ebx,[rsi*1+rbx]
+ mov r12d,r10d
+ add eax,DWORD[((44+16))+rbp]
+ and r12d,r9d
+ rorx r13d,r9d,25
+ rorx esi,r9d,11
+ lea ebx,[r14*1+rbx]
+ lea eax,[r12*1+rax]
+ andn r12d,r9d,r11d
+ xor r13d,esi
+ rorx r14d,r9d,6
+ lea eax,[r12*1+rax]
+ xor r13d,r14d
+ mov esi,ebx
+ rorx r12d,ebx,22
+ lea eax,[r13*1+rax]
+ xor esi,ecx
+ rorx r14d,ebx,13
+ rorx r13d,ebx,2
+ lea r8d,[rax*1+r8]
+ and r15d,esi
+ vpor xmm8,xmm8,xmm11
+ vaesenclast xmm11,xmm9,xmm10
+ vmovdqu xmm10,XMMWORD[((0-128))+rdi]
+ xor r14d,r12d
+ xor r15d,ecx
+ xor r14d,r13d
+ lea eax,[r15*1+rax]
+ mov r12d,r9d
+ vmovq r13,xmm15
+ vpextrq r15,xmm15,1
+ vpand xmm11,xmm11,xmm14
+ vpor xmm8,xmm8,xmm11
+ lea rbp,[((-64))+rbp]
+ vmovdqu XMMWORD[r13*1+r15],xmm8
+ lea r13,[16+r13]
+ cmp rbp,rsp
+ jae NEAR $L$ower_avx2
+
+ mov r15,QWORD[552+rsp]
+ lea r13,[64+r13]
+ mov rsi,QWORD[560+rsp]
+ add eax,r14d
+ lea rsp,[448+rsp]
+
+ add eax,DWORD[r15]
+ add ebx,DWORD[4+r15]
+ add ecx,DWORD[8+r15]
+ add edx,DWORD[12+r15]
+ add r8d,DWORD[16+r15]
+ add r9d,DWORD[20+r15]
+ add r10d,DWORD[24+r15]
+ lea r12,[r13*1+rsi]
+ add r11d,DWORD[28+r15]
+
+ cmp r13,QWORD[((64+16))+rsp]
+
+ mov DWORD[r15],eax
+ cmove r12,rsp
+ mov DWORD[4+r15],ebx
+ mov DWORD[8+r15],ecx
+ mov DWORD[12+r15],edx
+ mov DWORD[16+r15],r8d
+ mov DWORD[20+r15],r9d
+ mov DWORD[24+r15],r10d
+ mov DWORD[28+r15],r11d
+
+ jbe NEAR $L$oop_avx2
+ lea rbp,[rsp]
+
+
+
+
+$L$done_avx2:
+ mov r8,QWORD[((64+32))+rbp]
+ mov rsi,QWORD[((64+56))+rbp]
+
+ vmovdqu XMMWORD[r8],xmm8
+ vzeroall
+ movaps xmm6,XMMWORD[128+rbp]
+ movaps xmm7,XMMWORD[144+rbp]
+ movaps xmm8,XMMWORD[160+rbp]
+ movaps xmm9,XMMWORD[176+rbp]
+ movaps xmm10,XMMWORD[192+rbp]
+ movaps xmm11,XMMWORD[208+rbp]
+ movaps xmm12,XMMWORD[224+rbp]
+ movaps xmm13,XMMWORD[240+rbp]
+ movaps xmm14,XMMWORD[256+rbp]
+ movaps xmm15,XMMWORD[272+rbp]
+ mov r15,QWORD[((-48))+rsi]
+
+ mov r14,QWORD[((-40))+rsi]
+
+ mov r13,QWORD[((-32))+rsi]
+
+ mov r12,QWORD[((-24))+rsi]
+
+ mov rbp,QWORD[((-16))+rsi]
+
+ mov rbx,QWORD[((-8))+rsi]
+
+ lea rsp,[rsi]
+
+$L$epilogue_avx2:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha256_enc_avx2:
+
+ALIGN 32
+aesni_cbc_sha256_enc_shaext:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_sha256_enc_shaext:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+ mov r10,QWORD[56+rsp]
+ lea rsp,[((-168))+rsp]
+ movaps XMMWORD[(-8-160)+rax],xmm6
+ movaps XMMWORD[(-8-144)+rax],xmm7
+ movaps XMMWORD[(-8-128)+rax],xmm8
+ movaps XMMWORD[(-8-112)+rax],xmm9
+ movaps XMMWORD[(-8-96)+rax],xmm10
+ movaps XMMWORD[(-8-80)+rax],xmm11
+ movaps XMMWORD[(-8-64)+rax],xmm12
+ movaps XMMWORD[(-8-48)+rax],xmm13
+ movaps XMMWORD[(-8-32)+rax],xmm14
+ movaps XMMWORD[(-8-16)+rax],xmm15
+$L$prologue_shaext:
+ lea rax,[((K256+128))]
+ movdqu xmm1,XMMWORD[r9]
+ movdqu xmm2,XMMWORD[16+r9]
+ movdqa xmm3,XMMWORD[((512-128))+rax]
+
+ mov r11d,DWORD[240+rcx]
+ sub rsi,rdi
+ movups xmm15,XMMWORD[rcx]
+ movups xmm6,XMMWORD[r8]
+ movups xmm4,XMMWORD[16+rcx]
+ lea rcx,[112+rcx]
+
+ pshufd xmm0,xmm1,0x1b
+ pshufd xmm1,xmm1,0xb1
+ pshufd xmm2,xmm2,0x1b
+ movdqa xmm7,xmm3
+DB 102,15,58,15,202,8
+ punpcklqdq xmm2,xmm0
+
+ jmp NEAR $L$oop_shaext
+
+ALIGN 16
+$L$oop_shaext:
+ movdqu xmm10,XMMWORD[r10]
+ movdqu xmm11,XMMWORD[16+r10]
+ movdqu xmm12,XMMWORD[32+r10]
+DB 102,68,15,56,0,211
+ movdqu xmm13,XMMWORD[48+r10]
+
+ movdqa xmm0,XMMWORD[((0-128))+rax]
+ paddd xmm0,xmm10
+DB 102,68,15,56,0,219
+ movdqa xmm9,xmm2
+ movdqa xmm8,xmm1
+ movups xmm14,XMMWORD[rdi]
+ xorps xmm14,xmm15
+ xorps xmm6,xmm14
+ movups xmm5,XMMWORD[((-80))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movups xmm4,XMMWORD[((-64))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,202
+
+ movdqa xmm0,XMMWORD[((32-128))+rax]
+ paddd xmm0,xmm11
+DB 102,68,15,56,0,227
+ lea r10,[64+r10]
+ movups xmm5,XMMWORD[((-48))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movups xmm4,XMMWORD[((-32))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,202
+
+ movdqa xmm0,XMMWORD[((64-128))+rax]
+ paddd xmm0,xmm12
+DB 102,68,15,56,0,235
+DB 69,15,56,204,211
+ movups xmm5,XMMWORD[((-16))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm13
+DB 102,65,15,58,15,220,4
+ paddd xmm10,xmm3
+ movups xmm4,XMMWORD[rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,202
+
+ movdqa xmm0,XMMWORD[((96-128))+rax]
+ paddd xmm0,xmm13
+DB 69,15,56,205,213
+DB 69,15,56,204,220
+ movups xmm5,XMMWORD[16+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movups xmm4,XMMWORD[32+rcx]
+ aesenc xmm6,xmm5
+ movdqa xmm3,xmm10
+DB 102,65,15,58,15,221,4
+ paddd xmm11,xmm3
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((128-128))+rax]
+ paddd xmm0,xmm10
+DB 69,15,56,205,218
+DB 69,15,56,204,229
+ movups xmm5,XMMWORD[48+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm11
+DB 102,65,15,58,15,218,4
+ paddd xmm12,xmm3
+ cmp r11d,11
+ jb NEAR $L$aesenclast1
+ movups xmm4,XMMWORD[64+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[80+rcx]
+ aesenc xmm6,xmm4
+ je NEAR $L$aesenclast1
+ movups xmm4,XMMWORD[96+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[112+rcx]
+ aesenc xmm6,xmm4
+$L$aesenclast1:
+ aesenclast xmm6,xmm5
+ movups xmm4,XMMWORD[((16-112))+rcx]
+ nop
+DB 15,56,203,202
+ movups xmm14,XMMWORD[16+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[rdi*1+rsi],xmm6
+ xorps xmm6,xmm14
+ movups xmm5,XMMWORD[((-80))+rcx]
+ aesenc xmm6,xmm4
+ movdqa xmm0,XMMWORD[((160-128))+rax]
+ paddd xmm0,xmm11
+DB 69,15,56,205,227
+DB 69,15,56,204,234
+ movups xmm4,XMMWORD[((-64))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm12
+DB 102,65,15,58,15,219,4
+ paddd xmm13,xmm3
+ movups xmm5,XMMWORD[((-48))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((192-128))+rax]
+ paddd xmm0,xmm12
+DB 69,15,56,205,236
+DB 69,15,56,204,211
+ movups xmm4,XMMWORD[((-32))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm13
+DB 102,65,15,58,15,220,4
+ paddd xmm10,xmm3
+ movups xmm5,XMMWORD[((-16))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((224-128))+rax]
+ paddd xmm0,xmm13
+DB 69,15,56,205,213
+DB 69,15,56,204,220
+ movups xmm4,XMMWORD[rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm10
+DB 102,65,15,58,15,221,4
+ paddd xmm11,xmm3
+ movups xmm5,XMMWORD[16+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((256-128))+rax]
+ paddd xmm0,xmm10
+DB 69,15,56,205,218
+DB 69,15,56,204,229
+ movups xmm4,XMMWORD[32+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm11
+DB 102,65,15,58,15,218,4
+ paddd xmm12,xmm3
+ movups xmm5,XMMWORD[48+rcx]
+ aesenc xmm6,xmm4
+ cmp r11d,11
+ jb NEAR $L$aesenclast2
+ movups xmm4,XMMWORD[64+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[80+rcx]
+ aesenc xmm6,xmm4
+ je NEAR $L$aesenclast2
+ movups xmm4,XMMWORD[96+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[112+rcx]
+ aesenc xmm6,xmm4
+$L$aesenclast2:
+ aesenclast xmm6,xmm5
+ movups xmm4,XMMWORD[((16-112))+rcx]
+ nop
+DB 15,56,203,202
+ movups xmm14,XMMWORD[32+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[16+rdi*1+rsi],xmm6
+ xorps xmm6,xmm14
+ movups xmm5,XMMWORD[((-80))+rcx]
+ aesenc xmm6,xmm4
+ movdqa xmm0,XMMWORD[((288-128))+rax]
+ paddd xmm0,xmm11
+DB 69,15,56,205,227
+DB 69,15,56,204,234
+ movups xmm4,XMMWORD[((-64))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm12
+DB 102,65,15,58,15,219,4
+ paddd xmm13,xmm3
+ movups xmm5,XMMWORD[((-48))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((320-128))+rax]
+ paddd xmm0,xmm12
+DB 69,15,56,205,236
+DB 69,15,56,204,211
+ movups xmm4,XMMWORD[((-32))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm13
+DB 102,65,15,58,15,220,4
+ paddd xmm10,xmm3
+ movups xmm5,XMMWORD[((-16))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((352-128))+rax]
+ paddd xmm0,xmm13
+DB 69,15,56,205,213
+DB 69,15,56,204,220
+ movups xmm4,XMMWORD[rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm10
+DB 102,65,15,58,15,221,4
+ paddd xmm11,xmm3
+ movups xmm5,XMMWORD[16+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((384-128))+rax]
+ paddd xmm0,xmm10
+DB 69,15,56,205,218
+DB 69,15,56,204,229
+ movups xmm4,XMMWORD[32+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm11
+DB 102,65,15,58,15,218,4
+ paddd xmm12,xmm3
+ movups xmm5,XMMWORD[48+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+ movdqa xmm0,XMMWORD[((416-128))+rax]
+ paddd xmm0,xmm11
+DB 69,15,56,205,227
+DB 69,15,56,204,234
+ cmp r11d,11
+ jb NEAR $L$aesenclast3
+ movups xmm4,XMMWORD[64+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[80+rcx]
+ aesenc xmm6,xmm4
+ je NEAR $L$aesenclast3
+ movups xmm4,XMMWORD[96+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[112+rcx]
+ aesenc xmm6,xmm4
+$L$aesenclast3:
+ aesenclast xmm6,xmm5
+ movups xmm4,XMMWORD[((16-112))+rcx]
+ nop
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movdqa xmm3,xmm12
+DB 102,65,15,58,15,219,4
+ paddd xmm13,xmm3
+ movups xmm14,XMMWORD[48+rdi]
+ xorps xmm14,xmm15
+ movups XMMWORD[32+rdi*1+rsi],xmm6
+ xorps xmm6,xmm14
+ movups xmm5,XMMWORD[((-80))+rcx]
+ aesenc xmm6,xmm4
+ movups xmm4,XMMWORD[((-64))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,202
+
+ movdqa xmm0,XMMWORD[((448-128))+rax]
+ paddd xmm0,xmm12
+DB 69,15,56,205,236
+ movdqa xmm3,xmm7
+ movups xmm5,XMMWORD[((-48))+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movups xmm4,XMMWORD[((-32))+rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,202
+
+ movdqa xmm0,XMMWORD[((480-128))+rax]
+ paddd xmm0,xmm13
+ movups xmm5,XMMWORD[((-16))+rcx]
+ aesenc xmm6,xmm4
+ movups xmm4,XMMWORD[rcx]
+ aesenc xmm6,xmm5
+DB 15,56,203,209
+ pshufd xmm0,xmm0,0x0e
+ movups xmm5,XMMWORD[16+rcx]
+ aesenc xmm6,xmm4
+DB 15,56,203,202
+
+ movups xmm4,XMMWORD[32+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[48+rcx]
+ aesenc xmm6,xmm4
+ cmp r11d,11
+ jb NEAR $L$aesenclast4
+ movups xmm4,XMMWORD[64+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[80+rcx]
+ aesenc xmm6,xmm4
+ je NEAR $L$aesenclast4
+ movups xmm4,XMMWORD[96+rcx]
+ aesenc xmm6,xmm5
+ movups xmm5,XMMWORD[112+rcx]
+ aesenc xmm6,xmm4
+$L$aesenclast4:
+ aesenclast xmm6,xmm5
+ movups xmm4,XMMWORD[((16-112))+rcx]
+ nop
+
+ paddd xmm2,xmm9
+ paddd xmm1,xmm8
+
+ dec rdx
+ movups XMMWORD[48+rdi*1+rsi],xmm6
+ lea rdi,[64+rdi]
+ jnz NEAR $L$oop_shaext
+
+ pshufd xmm2,xmm2,0xb1
+ pshufd xmm3,xmm1,0x1b
+ pshufd xmm1,xmm1,0xb1
+ punpckhqdq xmm1,xmm2
+DB 102,15,58,15,211,8
+
+ movups XMMWORD[r8],xmm6
+ movdqu XMMWORD[r9],xmm1
+ movdqu XMMWORD[16+r9],xmm2
+ movaps xmm6,XMMWORD[rsp]
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps xmm10,XMMWORD[64+rsp]
+ movaps xmm11,XMMWORD[80+rsp]
+ movaps xmm12,XMMWORD[96+rsp]
+ movaps xmm13,XMMWORD[112+rsp]
+ movaps xmm14,XMMWORD[128+rsp]
+ movaps xmm15,XMMWORD[144+rsp]
+ lea rsp,[((8+160))+rsp]
+$L$epilogue_shaext:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_sha256_enc_shaext:
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$in_prologue
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_prologue
+ lea r10,[aesni_cbc_sha256_enc_shaext]
+ cmp rbx,r10
+ jb NEAR $L$not_in_shaext
+
+ lea rsi,[rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[168+rax]
+ jmp NEAR $L$in_prologue
+$L$not_in_shaext:
+ lea r10,[$L$avx2_shortcut]
+ cmp rbx,r10
+ jb NEAR $L$not_in_avx2
+
+ and rax,-256*4
+ add rax,448
+$L$not_in_avx2:
+ mov rsi,rax
+ mov rax,QWORD[((64+56))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+ mov rbp,QWORD[((-16))+rax]
+ mov r12,QWORD[((-24))+rax]
+ mov r13,QWORD[((-32))+rax]
+ mov r14,QWORD[((-40))+rax]
+ mov r15,QWORD[((-48))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+ lea rsi,[((64+64))+rsi]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+
+$L$in_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ DD $L$SEH_begin_aesni_cbc_sha256_enc_xop wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha256_enc_xop wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha256_enc_xop wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_cbc_sha256_enc_avx wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha256_enc_avx wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha256_enc_avx wrt ..imagebase
+ DD $L$SEH_begin_aesni_cbc_sha256_enc_avx2 wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha256_enc_avx2 wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha256_enc_avx2 wrt ..imagebase
+ DD $L$SEH_begin_aesni_cbc_sha256_enc_shaext wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_sha256_enc_shaext wrt ..imagebase
+ DD $L$SEH_info_aesni_cbc_sha256_enc_shaext wrt ..imagebase
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_aesni_cbc_sha256_enc_xop:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$prologue_xop wrt ..imagebase,$L$epilogue_xop wrt ..imagebase
+
+$L$SEH_info_aesni_cbc_sha256_enc_avx:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$prologue_avx wrt ..imagebase,$L$epilogue_avx wrt ..imagebase
+$L$SEH_info_aesni_cbc_sha256_enc_avx2:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$prologue_avx2 wrt ..imagebase,$L$epilogue_avx2 wrt ..imagebase
+$L$SEH_info_aesni_cbc_sha256_enc_shaext:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$prologue_shaext wrt ..imagebase,$L$epilogue_shaext wrt ..imagebase
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-x86_64.nasm
new file mode 100644
index 0000000000..75a9780a38
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/aesni-x86_64.nasm
@@ -0,0 +1,5104 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+EXTERN OPENSSL_ia32cap_P
+global aesni_encrypt
+
+ALIGN 16
+aesni_encrypt:
+
+DB 243,15,30,250
+ movups xmm2,XMMWORD[rcx]
+ mov eax,DWORD[240+r8]
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[16+r8]
+ lea r8,[32+r8]
+ xorps xmm2,xmm0
+$L$oop_enc1_1:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[r8]
+ lea r8,[16+r8]
+ jnz NEAR $L$oop_enc1_1
+DB 102,15,56,221,209
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ movups XMMWORD[rdx],xmm2
+ pxor xmm2,xmm2
+ DB 0F3h,0C3h ;repret
+
+
+
+global aesni_decrypt
+
+ALIGN 16
+aesni_decrypt:
+
+DB 243,15,30,250
+ movups xmm2,XMMWORD[rcx]
+ mov eax,DWORD[240+r8]
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[16+r8]
+ lea r8,[32+r8]
+ xorps xmm2,xmm0
+$L$oop_dec1_2:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[r8]
+ lea r8,[16+r8]
+ jnz NEAR $L$oop_dec1_2
+DB 102,15,56,223,209
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ movups XMMWORD[rdx],xmm2
+ pxor xmm2,xmm2
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_encrypt2:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+ add rax,16
+
+$L$enc_loop2:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$enc_loop2
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_decrypt2:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+ add rax,16
+
+$L$dec_loop2:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$dec_loop2
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,223,208
+DB 102,15,56,223,216
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_encrypt3:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ xorps xmm4,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+ add rax,16
+
+$L$enc_loop3:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$enc_loop3
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+DB 102,15,56,221,224
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_decrypt3:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ xorps xmm4,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+ add rax,16
+
+$L$dec_loop3:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$dec_loop3
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,223,208
+DB 102,15,56,223,216
+DB 102,15,56,223,224
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_encrypt4:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ xorps xmm4,xmm0
+ xorps xmm5,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 0x0f,0x1f,0x00
+ add rax,16
+
+$L$enc_loop4:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$enc_loop4
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+DB 102,15,56,221,224
+DB 102,15,56,221,232
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_decrypt4:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ xorps xmm4,xmm0
+ xorps xmm5,xmm0
+ movups xmm0,XMMWORD[32+rcx]
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 0x0f,0x1f,0x00
+ add rax,16
+
+$L$dec_loop4:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$dec_loop4
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,223,208
+DB 102,15,56,223,216
+DB 102,15,56,223,224
+DB 102,15,56,223,232
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_encrypt6:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ pxor xmm3,xmm0
+ pxor xmm4,xmm0
+DB 102,15,56,220,209
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 102,15,56,220,217
+ pxor xmm5,xmm0
+ pxor xmm6,xmm0
+DB 102,15,56,220,225
+ pxor xmm7,xmm0
+ movups xmm0,XMMWORD[rax*1+rcx]
+ add rax,16
+ jmp NEAR $L$enc_loop6_enter
+ALIGN 16
+$L$enc_loop6:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+$L$enc_loop6_enter:
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$enc_loop6
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+DB 102,15,56,221,224
+DB 102,15,56,221,232
+DB 102,15,56,221,240
+DB 102,15,56,221,248
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_decrypt6:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ pxor xmm3,xmm0
+ pxor xmm4,xmm0
+DB 102,15,56,222,209
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 102,15,56,222,217
+ pxor xmm5,xmm0
+ pxor xmm6,xmm0
+DB 102,15,56,222,225
+ pxor xmm7,xmm0
+ movups xmm0,XMMWORD[rax*1+rcx]
+ add rax,16
+ jmp NEAR $L$dec_loop6_enter
+ALIGN 16
+$L$dec_loop6:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+$L$dec_loop6_enter:
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$dec_loop6
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,15,56,223,208
+DB 102,15,56,223,216
+DB 102,15,56,223,224
+DB 102,15,56,223,232
+DB 102,15,56,223,240
+DB 102,15,56,223,248
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_encrypt8:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ pxor xmm4,xmm0
+ pxor xmm5,xmm0
+ pxor xmm6,xmm0
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 102,15,56,220,209
+ pxor xmm7,xmm0
+ pxor xmm8,xmm0
+DB 102,15,56,220,217
+ pxor xmm9,xmm0
+ movups xmm0,XMMWORD[rax*1+rcx]
+ add rax,16
+ jmp NEAR $L$enc_loop8_inner
+ALIGN 16
+$L$enc_loop8:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+$L$enc_loop8_inner:
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+$L$enc_loop8_enter:
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$enc_loop8
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+DB 102,15,56,221,224
+DB 102,15,56,221,232
+DB 102,15,56,221,240
+DB 102,15,56,221,248
+DB 102,68,15,56,221,192
+DB 102,68,15,56,221,200
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_aesni_decrypt8:
+
+ movups xmm0,XMMWORD[rcx]
+ shl eax,4
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm2,xmm0
+ xorps xmm3,xmm0
+ pxor xmm4,xmm0
+ pxor xmm5,xmm0
+ pxor xmm6,xmm0
+ lea rcx,[32+rax*1+rcx]
+ neg rax
+DB 102,15,56,222,209
+ pxor xmm7,xmm0
+ pxor xmm8,xmm0
+DB 102,15,56,222,217
+ pxor xmm9,xmm0
+ movups xmm0,XMMWORD[rax*1+rcx]
+ add rax,16
+ jmp NEAR $L$dec_loop8_inner
+ALIGN 16
+$L$dec_loop8:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+$L$dec_loop8_inner:
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+$L$dec_loop8_enter:
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$dec_loop8
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+DB 102,15,56,223,208
+DB 102,15,56,223,216
+DB 102,15,56,223,224
+DB 102,15,56,223,232
+DB 102,15,56,223,240
+DB 102,15,56,223,248
+DB 102,68,15,56,223,192
+DB 102,68,15,56,223,200
+ DB 0F3h,0C3h ;repret
+
+
+global aesni_ecb_encrypt
+
+ALIGN 16
+aesni_ecb_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ecb_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-88))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+$L$ecb_enc_body:
+ and rdx,-16
+ jz NEAR $L$ecb_ret
+
+ mov eax,DWORD[240+rcx]
+ movups xmm0,XMMWORD[rcx]
+ mov r11,rcx
+ mov r10d,eax
+ test r8d,r8d
+ jz NEAR $L$ecb_decrypt
+
+ cmp rdx,0x80
+ jb NEAR $L$ecb_enc_tail
+
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqu xmm7,XMMWORD[80+rdi]
+ movdqu xmm8,XMMWORD[96+rdi]
+ movdqu xmm9,XMMWORD[112+rdi]
+ lea rdi,[128+rdi]
+ sub rdx,0x80
+ jmp NEAR $L$ecb_enc_loop8_enter
+ALIGN 16
+$L$ecb_enc_loop8:
+ movups XMMWORD[rsi],xmm2
+ mov rcx,r11
+ movdqu xmm2,XMMWORD[rdi]
+ mov eax,r10d
+ movups XMMWORD[16+rsi],xmm3
+ movdqu xmm3,XMMWORD[16+rdi]
+ movups XMMWORD[32+rsi],xmm4
+ movdqu xmm4,XMMWORD[32+rdi]
+ movups XMMWORD[48+rsi],xmm5
+ movdqu xmm5,XMMWORD[48+rdi]
+ movups XMMWORD[64+rsi],xmm6
+ movdqu xmm6,XMMWORD[64+rdi]
+ movups XMMWORD[80+rsi],xmm7
+ movdqu xmm7,XMMWORD[80+rdi]
+ movups XMMWORD[96+rsi],xmm8
+ movdqu xmm8,XMMWORD[96+rdi]
+ movups XMMWORD[112+rsi],xmm9
+ lea rsi,[128+rsi]
+ movdqu xmm9,XMMWORD[112+rdi]
+ lea rdi,[128+rdi]
+$L$ecb_enc_loop8_enter:
+
+ call _aesni_encrypt8
+
+ sub rdx,0x80
+ jnc NEAR $L$ecb_enc_loop8
+
+ movups XMMWORD[rsi],xmm2
+ mov rcx,r11
+ movups XMMWORD[16+rsi],xmm3
+ mov eax,r10d
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+ movups XMMWORD[80+rsi],xmm7
+ movups XMMWORD[96+rsi],xmm8
+ movups XMMWORD[112+rsi],xmm9
+ lea rsi,[128+rsi]
+ add rdx,0x80
+ jz NEAR $L$ecb_ret
+
+$L$ecb_enc_tail:
+ movups xmm2,XMMWORD[rdi]
+ cmp rdx,0x20
+ jb NEAR $L$ecb_enc_one
+ movups xmm3,XMMWORD[16+rdi]
+ je NEAR $L$ecb_enc_two
+ movups xmm4,XMMWORD[32+rdi]
+ cmp rdx,0x40
+ jb NEAR $L$ecb_enc_three
+ movups xmm5,XMMWORD[48+rdi]
+ je NEAR $L$ecb_enc_four
+ movups xmm6,XMMWORD[64+rdi]
+ cmp rdx,0x60
+ jb NEAR $L$ecb_enc_five
+ movups xmm7,XMMWORD[80+rdi]
+ je NEAR $L$ecb_enc_six
+ movdqu xmm8,XMMWORD[96+rdi]
+ xorps xmm9,xmm9
+ call _aesni_encrypt8
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+ movups XMMWORD[80+rsi],xmm7
+ movups XMMWORD[96+rsi],xmm8
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_one:
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_enc1_3:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_3
+DB 102,15,56,221,209
+ movups XMMWORD[rsi],xmm2
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_two:
+ call _aesni_encrypt2
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_three:
+ call _aesni_encrypt3
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_four:
+ call _aesni_encrypt4
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_five:
+ xorps xmm7,xmm7
+ call _aesni_encrypt6
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_enc_six:
+ call _aesni_encrypt6
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+ movups XMMWORD[80+rsi],xmm7
+ jmp NEAR $L$ecb_ret
+
+ALIGN 16
+$L$ecb_decrypt:
+ cmp rdx,0x80
+ jb NEAR $L$ecb_dec_tail
+
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqu xmm7,XMMWORD[80+rdi]
+ movdqu xmm8,XMMWORD[96+rdi]
+ movdqu xmm9,XMMWORD[112+rdi]
+ lea rdi,[128+rdi]
+ sub rdx,0x80
+ jmp NEAR $L$ecb_dec_loop8_enter
+ALIGN 16
+$L$ecb_dec_loop8:
+ movups XMMWORD[rsi],xmm2
+ mov rcx,r11
+ movdqu xmm2,XMMWORD[rdi]
+ mov eax,r10d
+ movups XMMWORD[16+rsi],xmm3
+ movdqu xmm3,XMMWORD[16+rdi]
+ movups XMMWORD[32+rsi],xmm4
+ movdqu xmm4,XMMWORD[32+rdi]
+ movups XMMWORD[48+rsi],xmm5
+ movdqu xmm5,XMMWORD[48+rdi]
+ movups XMMWORD[64+rsi],xmm6
+ movdqu xmm6,XMMWORD[64+rdi]
+ movups XMMWORD[80+rsi],xmm7
+ movdqu xmm7,XMMWORD[80+rdi]
+ movups XMMWORD[96+rsi],xmm8
+ movdqu xmm8,XMMWORD[96+rdi]
+ movups XMMWORD[112+rsi],xmm9
+ lea rsi,[128+rsi]
+ movdqu xmm9,XMMWORD[112+rdi]
+ lea rdi,[128+rdi]
+$L$ecb_dec_loop8_enter:
+
+ call _aesni_decrypt8
+
+ movups xmm0,XMMWORD[r11]
+ sub rdx,0x80
+ jnc NEAR $L$ecb_dec_loop8
+
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ mov rcx,r11
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ mov eax,r10d
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ movups XMMWORD[80+rsi],xmm7
+ pxor xmm7,xmm7
+ movups XMMWORD[96+rsi],xmm8
+ pxor xmm8,xmm8
+ movups XMMWORD[112+rsi],xmm9
+ pxor xmm9,xmm9
+ lea rsi,[128+rsi]
+ add rdx,0x80
+ jz NEAR $L$ecb_ret
+
+$L$ecb_dec_tail:
+ movups xmm2,XMMWORD[rdi]
+ cmp rdx,0x20
+ jb NEAR $L$ecb_dec_one
+ movups xmm3,XMMWORD[16+rdi]
+ je NEAR $L$ecb_dec_two
+ movups xmm4,XMMWORD[32+rdi]
+ cmp rdx,0x40
+ jb NEAR $L$ecb_dec_three
+ movups xmm5,XMMWORD[48+rdi]
+ je NEAR $L$ecb_dec_four
+ movups xmm6,XMMWORD[64+rdi]
+ cmp rdx,0x60
+ jb NEAR $L$ecb_dec_five
+ movups xmm7,XMMWORD[80+rdi]
+ je NEAR $L$ecb_dec_six
+ movups xmm8,XMMWORD[96+rdi]
+ movups xmm0,XMMWORD[rcx]
+ xorps xmm9,xmm9
+ call _aesni_decrypt8
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ movups XMMWORD[80+rsi],xmm7
+ pxor xmm7,xmm7
+ movups XMMWORD[96+rsi],xmm8
+ pxor xmm8,xmm8
+ pxor xmm9,xmm9
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_one:
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_4:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_4
+DB 102,15,56,223,209
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_two:
+ call _aesni_decrypt2
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_three:
+ call _aesni_decrypt3
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_four:
+ call _aesni_decrypt4
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_five:
+ xorps xmm7,xmm7
+ call _aesni_decrypt6
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ pxor xmm7,xmm7
+ jmp NEAR $L$ecb_ret
+ALIGN 16
+$L$ecb_dec_six:
+ call _aesni_decrypt6
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ movups XMMWORD[80+rsi],xmm7
+ pxor xmm7,xmm7
+
+$L$ecb_ret:
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ movaps xmm6,XMMWORD[rsp]
+ movaps XMMWORD[rsp],xmm0
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ lea rsp,[88+rsp]
+$L$ecb_enc_ret:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ecb_encrypt:
+global aesni_ccm64_encrypt_blocks
+
+ALIGN 16
+aesni_ccm64_encrypt_blocks:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ccm64_encrypt_blocks:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-88))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+$L$ccm64_enc_body:
+ mov eax,DWORD[240+rcx]
+ movdqu xmm6,XMMWORD[r8]
+ movdqa xmm9,XMMWORD[$L$increment64]
+ movdqa xmm7,XMMWORD[$L$bswap_mask]
+
+ shl eax,4
+ mov r10d,16
+ lea r11,[rcx]
+ movdqu xmm3,XMMWORD[r9]
+ movdqa xmm2,xmm6
+ lea rcx,[32+rax*1+rcx]
+DB 102,15,56,0,247
+ sub r10,rax
+ jmp NEAR $L$ccm64_enc_outer
+ALIGN 16
+$L$ccm64_enc_outer:
+ movups xmm0,XMMWORD[r11]
+ mov rax,r10
+ movups xmm8,XMMWORD[rdi]
+
+ xorps xmm2,xmm0
+ movups xmm1,XMMWORD[16+r11]
+ xorps xmm0,xmm8
+ xorps xmm3,xmm0
+ movups xmm0,XMMWORD[32+r11]
+
+$L$ccm64_enc2_loop:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ccm64_enc2_loop
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ paddq xmm6,xmm9
+ dec rdx
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+
+ lea rdi,[16+rdi]
+ xorps xmm8,xmm2
+ movdqa xmm2,xmm6
+ movups XMMWORD[rsi],xmm8
+DB 102,15,56,0,215
+ lea rsi,[16+rsi]
+ jnz NEAR $L$ccm64_enc_outer
+
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ movups XMMWORD[r9],xmm3
+ pxor xmm3,xmm3
+ pxor xmm8,xmm8
+ pxor xmm6,xmm6
+ movaps xmm6,XMMWORD[rsp]
+ movaps XMMWORD[rsp],xmm0
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ lea rsp,[88+rsp]
+$L$ccm64_enc_ret:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ccm64_encrypt_blocks:
+global aesni_ccm64_decrypt_blocks
+
+ALIGN 16
+aesni_ccm64_decrypt_blocks:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ccm64_decrypt_blocks:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-88))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+$L$ccm64_dec_body:
+ mov eax,DWORD[240+rcx]
+ movups xmm6,XMMWORD[r8]
+ movdqu xmm3,XMMWORD[r9]
+ movdqa xmm9,XMMWORD[$L$increment64]
+ movdqa xmm7,XMMWORD[$L$bswap_mask]
+
+ movaps xmm2,xmm6
+ mov r10d,eax
+ mov r11,rcx
+DB 102,15,56,0,247
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_enc1_5:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_5
+DB 102,15,56,221,209
+ shl r10d,4
+ mov eax,16
+ movups xmm8,XMMWORD[rdi]
+ paddq xmm6,xmm9
+ lea rdi,[16+rdi]
+ sub rax,r10
+ lea rcx,[32+r10*1+r11]
+ mov r10,rax
+ jmp NEAR $L$ccm64_dec_outer
+ALIGN 16
+$L$ccm64_dec_outer:
+ xorps xmm8,xmm2
+ movdqa xmm2,xmm6
+ movups XMMWORD[rsi],xmm8
+ lea rsi,[16+rsi]
+DB 102,15,56,0,215
+
+ sub rdx,1
+ jz NEAR $L$ccm64_dec_break
+
+ movups xmm0,XMMWORD[r11]
+ mov rax,r10
+ movups xmm1,XMMWORD[16+r11]
+ xorps xmm8,xmm0
+ xorps xmm2,xmm0
+ xorps xmm3,xmm8
+ movups xmm0,XMMWORD[32+r11]
+ jmp NEAR $L$ccm64_dec2_loop
+ALIGN 16
+$L$ccm64_dec2_loop:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ccm64_dec2_loop
+ movups xmm8,XMMWORD[rdi]
+ paddq xmm6,xmm9
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,221,208
+DB 102,15,56,221,216
+ lea rdi,[16+rdi]
+ jmp NEAR $L$ccm64_dec_outer
+
+ALIGN 16
+$L$ccm64_dec_break:
+
+ mov eax,DWORD[240+r11]
+ movups xmm0,XMMWORD[r11]
+ movups xmm1,XMMWORD[16+r11]
+ xorps xmm8,xmm0
+ lea r11,[32+r11]
+ xorps xmm3,xmm8
+$L$oop_enc1_6:
+DB 102,15,56,220,217
+ dec eax
+ movups xmm1,XMMWORD[r11]
+ lea r11,[16+r11]
+ jnz NEAR $L$oop_enc1_6
+DB 102,15,56,221,217
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ movups XMMWORD[r9],xmm3
+ pxor xmm3,xmm3
+ pxor xmm8,xmm8
+ pxor xmm6,xmm6
+ movaps xmm6,XMMWORD[rsp]
+ movaps XMMWORD[rsp],xmm0
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ lea rsp,[88+rsp]
+$L$ccm64_dec_ret:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ccm64_decrypt_blocks:
+global aesni_ctr32_encrypt_blocks
+
+ALIGN 16
+aesni_ctr32_encrypt_blocks:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ctr32_encrypt_blocks:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+
+
+
+DB 243,15,30,250
+ cmp rdx,1
+ jne NEAR $L$ctr32_bulk
+
+
+
+ movups xmm2,XMMWORD[r8]
+ movups xmm3,XMMWORD[rdi]
+ mov edx,DWORD[240+rcx]
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_enc1_7:
+DB 102,15,56,220,209
+ dec edx
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_7
+DB 102,15,56,221,209
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ xorps xmm2,xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[rsi],xmm2
+ xorps xmm2,xmm2
+ jmp NEAR $L$ctr32_epilogue
+
+ALIGN 16
+$L$ctr32_bulk:
+ lea r11,[rsp]
+
+ push rbp
+
+ sub rsp,288
+ and rsp,-16
+ movaps XMMWORD[(-168)+r11],xmm6
+ movaps XMMWORD[(-152)+r11],xmm7
+ movaps XMMWORD[(-136)+r11],xmm8
+ movaps XMMWORD[(-120)+r11],xmm9
+ movaps XMMWORD[(-104)+r11],xmm10
+ movaps XMMWORD[(-88)+r11],xmm11
+ movaps XMMWORD[(-72)+r11],xmm12
+ movaps XMMWORD[(-56)+r11],xmm13
+ movaps XMMWORD[(-40)+r11],xmm14
+ movaps XMMWORD[(-24)+r11],xmm15
+$L$ctr32_body:
+
+
+
+
+ movdqu xmm2,XMMWORD[r8]
+ movdqu xmm0,XMMWORD[rcx]
+ mov r8d,DWORD[12+r8]
+ pxor xmm2,xmm0
+ mov ebp,DWORD[12+rcx]
+ movdqa XMMWORD[rsp],xmm2
+ bswap r8d
+ movdqa xmm3,xmm2
+ movdqa xmm4,xmm2
+ movdqa xmm5,xmm2
+ movdqa XMMWORD[64+rsp],xmm2
+ movdqa XMMWORD[80+rsp],xmm2
+ movdqa XMMWORD[96+rsp],xmm2
+ mov r10,rdx
+ movdqa XMMWORD[112+rsp],xmm2
+
+ lea rax,[1+r8]
+ lea rdx,[2+r8]
+ bswap eax
+ bswap edx
+ xor eax,ebp
+ xor edx,ebp
+DB 102,15,58,34,216,3
+ lea rax,[3+r8]
+ movdqa XMMWORD[16+rsp],xmm3
+DB 102,15,58,34,226,3
+ bswap eax
+ mov rdx,r10
+ lea r10,[4+r8]
+ movdqa XMMWORD[32+rsp],xmm4
+ xor eax,ebp
+ bswap r10d
+DB 102,15,58,34,232,3
+ xor r10d,ebp
+ movdqa XMMWORD[48+rsp],xmm5
+ lea r9,[5+r8]
+ mov DWORD[((64+12))+rsp],r10d
+ bswap r9d
+ lea r10,[6+r8]
+ mov eax,DWORD[240+rcx]
+ xor r9d,ebp
+ bswap r10d
+ mov DWORD[((80+12))+rsp],r9d
+ xor r10d,ebp
+ lea r9,[7+r8]
+ mov DWORD[((96+12))+rsp],r10d
+ bswap r9d
+ mov r10d,DWORD[((OPENSSL_ia32cap_P+4))]
+ xor r9d,ebp
+ and r10d,71303168
+ mov DWORD[((112+12))+rsp],r9d
+
+ movups xmm1,XMMWORD[16+rcx]
+
+ movdqa xmm6,XMMWORD[64+rsp]
+ movdqa xmm7,XMMWORD[80+rsp]
+
+ cmp rdx,8
+ jb NEAR $L$ctr32_tail
+
+ sub rdx,6
+ cmp r10d,4194304
+ je NEAR $L$ctr32_6x
+
+ lea rcx,[128+rcx]
+ sub rdx,2
+ jmp NEAR $L$ctr32_loop8
+
+ALIGN 16
+$L$ctr32_6x:
+ shl eax,4
+ mov r10d,48
+ bswap ebp
+ lea rcx,[32+rax*1+rcx]
+ sub r10,rax
+ jmp NEAR $L$ctr32_loop6
+
+ALIGN 16
+$L$ctr32_loop6:
+ add r8d,6
+ movups xmm0,XMMWORD[((-48))+r10*1+rcx]
+DB 102,15,56,220,209
+ mov eax,r8d
+ xor eax,ebp
+DB 102,15,56,220,217
+DB 0x0f,0x38,0xf1,0x44,0x24,12
+ lea eax,[1+r8]
+DB 102,15,56,220,225
+ xor eax,ebp
+DB 0x0f,0x38,0xf1,0x44,0x24,28
+DB 102,15,56,220,233
+ lea eax,[2+r8]
+ xor eax,ebp
+DB 102,15,56,220,241
+DB 0x0f,0x38,0xf1,0x44,0x24,44
+ lea eax,[3+r8]
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[((-32))+r10*1+rcx]
+ xor eax,ebp
+
+DB 102,15,56,220,208
+DB 0x0f,0x38,0xf1,0x44,0x24,60
+ lea eax,[4+r8]
+DB 102,15,56,220,216
+ xor eax,ebp
+DB 0x0f,0x38,0xf1,0x44,0x24,76
+DB 102,15,56,220,224
+ lea eax,[5+r8]
+ xor eax,ebp
+DB 102,15,56,220,232
+DB 0x0f,0x38,0xf1,0x44,0x24,92
+ mov rax,r10
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[((-16))+r10*1+rcx]
+
+ call $L$enc_loop6
+
+ movdqu xmm8,XMMWORD[rdi]
+ movdqu xmm9,XMMWORD[16+rdi]
+ movdqu xmm10,XMMWORD[32+rdi]
+ movdqu xmm11,XMMWORD[48+rdi]
+ movdqu xmm12,XMMWORD[64+rdi]
+ movdqu xmm13,XMMWORD[80+rdi]
+ lea rdi,[96+rdi]
+ movups xmm1,XMMWORD[((-64))+r10*1+rcx]
+ pxor xmm8,xmm2
+ movaps xmm2,XMMWORD[rsp]
+ pxor xmm9,xmm3
+ movaps xmm3,XMMWORD[16+rsp]
+ pxor xmm10,xmm4
+ movaps xmm4,XMMWORD[32+rsp]
+ pxor xmm11,xmm5
+ movaps xmm5,XMMWORD[48+rsp]
+ pxor xmm12,xmm6
+ movaps xmm6,XMMWORD[64+rsp]
+ pxor xmm13,xmm7
+ movaps xmm7,XMMWORD[80+rsp]
+ movdqu XMMWORD[rsi],xmm8
+ movdqu XMMWORD[16+rsi],xmm9
+ movdqu XMMWORD[32+rsi],xmm10
+ movdqu XMMWORD[48+rsi],xmm11
+ movdqu XMMWORD[64+rsi],xmm12
+ movdqu XMMWORD[80+rsi],xmm13
+ lea rsi,[96+rsi]
+
+ sub rdx,6
+ jnc NEAR $L$ctr32_loop6
+
+ add rdx,6
+ jz NEAR $L$ctr32_done
+
+ lea eax,[((-48))+r10]
+ lea rcx,[((-80))+r10*1+rcx]
+ neg eax
+ shr eax,4
+ jmp NEAR $L$ctr32_tail
+
+ALIGN 32
+$L$ctr32_loop8:
+ add r8d,8
+ movdqa xmm8,XMMWORD[96+rsp]
+DB 102,15,56,220,209
+ mov r9d,r8d
+ movdqa xmm9,XMMWORD[112+rsp]
+DB 102,15,56,220,217
+ bswap r9d
+ movups xmm0,XMMWORD[((32-128))+rcx]
+DB 102,15,56,220,225
+ xor r9d,ebp
+ nop
+DB 102,15,56,220,233
+ mov DWORD[((0+12))+rsp],r9d
+ lea r9,[1+r8]
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((48-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ mov DWORD[((16+12))+rsp],r9d
+ lea r9,[2+r8]
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((64-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ mov DWORD[((32+12))+rsp],r9d
+ lea r9,[3+r8]
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((80-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ mov DWORD[((48+12))+rsp],r9d
+ lea r9,[4+r8]
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((96-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ mov DWORD[((64+12))+rsp],r9d
+ lea r9,[5+r8]
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((112-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ mov DWORD[((80+12))+rsp],r9d
+ lea r9,[6+r8]
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((128-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ xor r9d,ebp
+DB 0x66,0x90
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ mov DWORD[((96+12))+rsp],r9d
+ lea r9,[7+r8]
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((144-128))+rcx]
+ bswap r9d
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+ xor r9d,ebp
+ movdqu xmm10,XMMWORD[rdi]
+DB 102,15,56,220,232
+ mov DWORD[((112+12))+rsp],r9d
+ cmp eax,11
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((160-128))+rcx]
+
+ jb NEAR $L$ctr32_enc_done
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((176-128))+rcx]
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((192-128))+rcx]
+ je NEAR $L$ctr32_enc_done
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movups xmm1,XMMWORD[((208-128))+rcx]
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+DB 102,68,15,56,220,192
+DB 102,68,15,56,220,200
+ movups xmm0,XMMWORD[((224-128))+rcx]
+ jmp NEAR $L$ctr32_enc_done
+
+ALIGN 16
+$L$ctr32_enc_done:
+ movdqu xmm11,XMMWORD[16+rdi]
+ pxor xmm10,xmm0
+ movdqu xmm12,XMMWORD[32+rdi]
+ pxor xmm11,xmm0
+ movdqu xmm13,XMMWORD[48+rdi]
+ pxor xmm12,xmm0
+ movdqu xmm14,XMMWORD[64+rdi]
+ pxor xmm13,xmm0
+ movdqu xmm15,XMMWORD[80+rdi]
+ pxor xmm14,xmm0
+ pxor xmm15,xmm0
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+DB 102,68,15,56,220,201
+ movdqu xmm1,XMMWORD[96+rdi]
+ lea rdi,[128+rdi]
+
+DB 102,65,15,56,221,210
+ pxor xmm1,xmm0
+ movdqu xmm10,XMMWORD[((112-128))+rdi]
+DB 102,65,15,56,221,219
+ pxor xmm10,xmm0
+ movdqa xmm11,XMMWORD[rsp]
+DB 102,65,15,56,221,228
+DB 102,65,15,56,221,237
+ movdqa xmm12,XMMWORD[16+rsp]
+ movdqa xmm13,XMMWORD[32+rsp]
+DB 102,65,15,56,221,246
+DB 102,65,15,56,221,255
+ movdqa xmm14,XMMWORD[48+rsp]
+ movdqa xmm15,XMMWORD[64+rsp]
+DB 102,68,15,56,221,193
+ movdqa xmm0,XMMWORD[80+rsp]
+ movups xmm1,XMMWORD[((16-128))+rcx]
+DB 102,69,15,56,221,202
+
+ movups XMMWORD[rsi],xmm2
+ movdqa xmm2,xmm11
+ movups XMMWORD[16+rsi],xmm3
+ movdqa xmm3,xmm12
+ movups XMMWORD[32+rsi],xmm4
+ movdqa xmm4,xmm13
+ movups XMMWORD[48+rsi],xmm5
+ movdqa xmm5,xmm14
+ movups XMMWORD[64+rsi],xmm6
+ movdqa xmm6,xmm15
+ movups XMMWORD[80+rsi],xmm7
+ movdqa xmm7,xmm0
+ movups XMMWORD[96+rsi],xmm8
+ movups XMMWORD[112+rsi],xmm9
+ lea rsi,[128+rsi]
+
+ sub rdx,8
+ jnc NEAR $L$ctr32_loop8
+
+ add rdx,8
+ jz NEAR $L$ctr32_done
+ lea rcx,[((-128))+rcx]
+
+$L$ctr32_tail:
+
+
+ lea rcx,[16+rcx]
+ cmp rdx,4
+ jb NEAR $L$ctr32_loop3
+ je NEAR $L$ctr32_loop4
+
+
+ shl eax,4
+ movdqa xmm8,XMMWORD[96+rsp]
+ pxor xmm9,xmm9
+
+ movups xmm0,XMMWORD[16+rcx]
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+ lea rcx,[((32-16))+rax*1+rcx]
+ neg rax
+DB 102,15,56,220,225
+ add rax,16
+ movups xmm10,XMMWORD[rdi]
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+ movups xmm11,XMMWORD[16+rdi]
+ movups xmm12,XMMWORD[32+rdi]
+DB 102,15,56,220,249
+DB 102,68,15,56,220,193
+
+ call $L$enc_loop8_enter
+
+ movdqu xmm13,XMMWORD[48+rdi]
+ pxor xmm2,xmm10
+ movdqu xmm10,XMMWORD[64+rdi]
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm6,xmm10
+ movdqu XMMWORD[48+rsi],xmm5
+ movdqu XMMWORD[64+rsi],xmm6
+ cmp rdx,6
+ jb NEAR $L$ctr32_done
+
+ movups xmm11,XMMWORD[80+rdi]
+ xorps xmm7,xmm11
+ movups XMMWORD[80+rsi],xmm7
+ je NEAR $L$ctr32_done
+
+ movups xmm12,XMMWORD[96+rdi]
+ xorps xmm8,xmm12
+ movups XMMWORD[96+rsi],xmm8
+ jmp NEAR $L$ctr32_done
+
+ALIGN 32
+$L$ctr32_loop4:
+DB 102,15,56,220,209
+ lea rcx,[16+rcx]
+ dec eax
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[rcx]
+ jnz NEAR $L$ctr32_loop4
+DB 102,15,56,221,209
+DB 102,15,56,221,217
+ movups xmm10,XMMWORD[rdi]
+ movups xmm11,XMMWORD[16+rdi]
+DB 102,15,56,221,225
+DB 102,15,56,221,233
+ movups xmm12,XMMWORD[32+rdi]
+ movups xmm13,XMMWORD[48+rdi]
+
+ xorps xmm2,xmm10
+ movups XMMWORD[rsi],xmm2
+ xorps xmm3,xmm11
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm4,xmm12
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm5,xmm13
+ movdqu XMMWORD[48+rsi],xmm5
+ jmp NEAR $L$ctr32_done
+
+ALIGN 32
+$L$ctr32_loop3:
+DB 102,15,56,220,209
+ lea rcx,[16+rcx]
+ dec eax
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+ movups xmm1,XMMWORD[rcx]
+ jnz NEAR $L$ctr32_loop3
+DB 102,15,56,221,209
+DB 102,15,56,221,217
+DB 102,15,56,221,225
+
+ movups xmm10,XMMWORD[rdi]
+ xorps xmm2,xmm10
+ movups XMMWORD[rsi],xmm2
+ cmp rdx,2
+ jb NEAR $L$ctr32_done
+
+ movups xmm11,XMMWORD[16+rdi]
+ xorps xmm3,xmm11
+ movups XMMWORD[16+rsi],xmm3
+ je NEAR $L$ctr32_done
+
+ movups xmm12,XMMWORD[32+rdi]
+ xorps xmm4,xmm12
+ movups XMMWORD[32+rsi],xmm4
+
+$L$ctr32_done:
+ xorps xmm0,xmm0
+ xor ebp,ebp
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ movaps xmm6,XMMWORD[((-168))+r11]
+ movaps XMMWORD[(-168)+r11],xmm0
+ movaps xmm7,XMMWORD[((-152))+r11]
+ movaps XMMWORD[(-152)+r11],xmm0
+ movaps xmm8,XMMWORD[((-136))+r11]
+ movaps XMMWORD[(-136)+r11],xmm0
+ movaps xmm9,XMMWORD[((-120))+r11]
+ movaps XMMWORD[(-120)+r11],xmm0
+ movaps xmm10,XMMWORD[((-104))+r11]
+ movaps XMMWORD[(-104)+r11],xmm0
+ movaps xmm11,XMMWORD[((-88))+r11]
+ movaps XMMWORD[(-88)+r11],xmm0
+ movaps xmm12,XMMWORD[((-72))+r11]
+ movaps XMMWORD[(-72)+r11],xmm0
+ movaps xmm13,XMMWORD[((-56))+r11]
+ movaps XMMWORD[(-56)+r11],xmm0
+ movaps xmm14,XMMWORD[((-40))+r11]
+ movaps XMMWORD[(-40)+r11],xmm0
+ movaps xmm15,XMMWORD[((-24))+r11]
+ movaps XMMWORD[(-24)+r11],xmm0
+ movaps XMMWORD[rsp],xmm0
+ movaps XMMWORD[16+rsp],xmm0
+ movaps XMMWORD[32+rsp],xmm0
+ movaps XMMWORD[48+rsp],xmm0
+ movaps XMMWORD[64+rsp],xmm0
+ movaps XMMWORD[80+rsp],xmm0
+ movaps XMMWORD[96+rsp],xmm0
+ movaps XMMWORD[112+rsp],xmm0
+ mov rbp,QWORD[((-8))+r11]
+
+ lea rsp,[r11]
+
+$L$ctr32_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ctr32_encrypt_blocks:
+global aesni_xts_encrypt
+
+ALIGN 16
+aesni_xts_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_xts_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea r11,[rsp]
+
+ push rbp
+
+ sub rsp,272
+ and rsp,-16
+ movaps XMMWORD[(-168)+r11],xmm6
+ movaps XMMWORD[(-152)+r11],xmm7
+ movaps XMMWORD[(-136)+r11],xmm8
+ movaps XMMWORD[(-120)+r11],xmm9
+ movaps XMMWORD[(-104)+r11],xmm10
+ movaps XMMWORD[(-88)+r11],xmm11
+ movaps XMMWORD[(-72)+r11],xmm12
+ movaps XMMWORD[(-56)+r11],xmm13
+ movaps XMMWORD[(-40)+r11],xmm14
+ movaps XMMWORD[(-24)+r11],xmm15
+$L$xts_enc_body:
+ movups xmm2,XMMWORD[r9]
+ mov eax,DWORD[240+r8]
+ mov r10d,DWORD[240+rcx]
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[16+r8]
+ lea r8,[32+r8]
+ xorps xmm2,xmm0
+$L$oop_enc1_8:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[r8]
+ lea r8,[16+r8]
+ jnz NEAR $L$oop_enc1_8
+DB 102,15,56,221,209
+ movups xmm0,XMMWORD[rcx]
+ mov rbp,rcx
+ mov eax,r10d
+ shl r10d,4
+ mov r9,rdx
+ and rdx,-16
+
+ movups xmm1,XMMWORD[16+r10*1+rcx]
+
+ movdqa xmm8,XMMWORD[$L$xts_magic]
+ movdqa xmm15,xmm2
+ pshufd xmm9,xmm2,0x5f
+ pxor xmm1,xmm0
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm10,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm10,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm11,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm11,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm12,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm12,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm13,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm13,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm15
+ psrad xmm9,31
+ paddq xmm15,xmm15
+ pand xmm9,xmm8
+ pxor xmm14,xmm0
+ pxor xmm15,xmm9
+ movaps XMMWORD[96+rsp],xmm1
+
+ sub rdx,16*6
+ jc NEAR $L$xts_enc_short
+
+ mov eax,16+96
+ lea rcx,[32+r10*1+rbp]
+ sub rax,r10
+ movups xmm1,XMMWORD[16+rbp]
+ mov r10,rax
+ lea r8,[$L$xts_magic]
+ jmp NEAR $L$xts_enc_grandloop
+
+ALIGN 32
+$L$xts_enc_grandloop:
+ movdqu xmm2,XMMWORD[rdi]
+ movdqa xmm8,xmm0
+ movdqu xmm3,XMMWORD[16+rdi]
+ pxor xmm2,xmm10
+ movdqu xmm4,XMMWORD[32+rdi]
+ pxor xmm3,xmm11
+DB 102,15,56,220,209
+ movdqu xmm5,XMMWORD[48+rdi]
+ pxor xmm4,xmm12
+DB 102,15,56,220,217
+ movdqu xmm6,XMMWORD[64+rdi]
+ pxor xmm5,xmm13
+DB 102,15,56,220,225
+ movdqu xmm7,XMMWORD[80+rdi]
+ pxor xmm8,xmm15
+ movdqa xmm9,XMMWORD[96+rsp]
+ pxor xmm6,xmm14
+DB 102,15,56,220,233
+ movups xmm0,XMMWORD[32+rbp]
+ lea rdi,[96+rdi]
+ pxor xmm7,xmm8
+
+ pxor xmm10,xmm9
+DB 102,15,56,220,241
+ pxor xmm11,xmm9
+ movdqa XMMWORD[rsp],xmm10
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[48+rbp]
+ pxor xmm12,xmm9
+
+DB 102,15,56,220,208
+ pxor xmm13,xmm9
+ movdqa XMMWORD[16+rsp],xmm11
+DB 102,15,56,220,216
+ pxor xmm14,xmm9
+ movdqa XMMWORD[32+rsp],xmm12
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ pxor xmm8,xmm9
+ movdqa XMMWORD[64+rsp],xmm14
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[64+rbp]
+ movdqa XMMWORD[80+rsp],xmm8
+ pshufd xmm9,xmm15,0x5f
+ jmp NEAR $L$xts_enc_loop6
+ALIGN 32
+$L$xts_enc_loop6:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[((-64))+rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[((-80))+rax*1+rcx]
+ jnz NEAR $L$xts_enc_loop6
+
+ movdqa xmm8,XMMWORD[r8]
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+DB 102,15,56,220,209
+ paddq xmm15,xmm15
+ psrad xmm14,31
+DB 102,15,56,220,217
+ pand xmm14,xmm8
+ movups xmm10,XMMWORD[rbp]
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+ pxor xmm15,xmm14
+ movaps xmm11,xmm10
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[((-64))+rcx]
+
+ movdqa xmm14,xmm9
+DB 102,15,56,220,208
+ paddd xmm9,xmm9
+ pxor xmm10,xmm15
+DB 102,15,56,220,216
+ psrad xmm14,31
+ paddq xmm15,xmm15
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ pand xmm14,xmm8
+ movaps xmm12,xmm11
+DB 102,15,56,220,240
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[((-48))+rcx]
+
+ paddd xmm9,xmm9
+DB 102,15,56,220,209
+ pxor xmm11,xmm15
+ psrad xmm14,31
+DB 102,15,56,220,217
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movdqa XMMWORD[48+rsp],xmm13
+ pxor xmm15,xmm14
+DB 102,15,56,220,241
+ movaps xmm13,xmm12
+ movdqa xmm14,xmm9
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[((-32))+rcx]
+
+ paddd xmm9,xmm9
+DB 102,15,56,220,208
+ pxor xmm12,xmm15
+ psrad xmm14,31
+DB 102,15,56,220,216
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+ pxor xmm15,xmm14
+ movaps xmm14,xmm13
+DB 102,15,56,220,248
+
+ movdqa xmm0,xmm9
+ paddd xmm9,xmm9
+DB 102,15,56,220,209
+ pxor xmm13,xmm15
+ psrad xmm0,31
+DB 102,15,56,220,217
+ paddq xmm15,xmm15
+ pand xmm0,xmm8
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ pxor xmm15,xmm0
+ movups xmm0,XMMWORD[rbp]
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[16+rbp]
+
+ pxor xmm14,xmm15
+DB 102,15,56,221,84,36,0
+ psrad xmm9,31
+ paddq xmm15,xmm15
+DB 102,15,56,221,92,36,16
+DB 102,15,56,221,100,36,32
+ pand xmm9,xmm8
+ mov rax,r10
+DB 102,15,56,221,108,36,48
+DB 102,15,56,221,116,36,64
+DB 102,15,56,221,124,36,80
+ pxor xmm15,xmm9
+
+ lea rsi,[96+rsi]
+ movups XMMWORD[(-96)+rsi],xmm2
+ movups XMMWORD[(-80)+rsi],xmm3
+ movups XMMWORD[(-64)+rsi],xmm4
+ movups XMMWORD[(-48)+rsi],xmm5
+ movups XMMWORD[(-32)+rsi],xmm6
+ movups XMMWORD[(-16)+rsi],xmm7
+ sub rdx,16*6
+ jnc NEAR $L$xts_enc_grandloop
+
+ mov eax,16+96
+ sub eax,r10d
+ mov rcx,rbp
+ shr eax,4
+
+$L$xts_enc_short:
+
+ mov r10d,eax
+ pxor xmm10,xmm0
+ add rdx,16*6
+ jz NEAR $L$xts_enc_done
+
+ pxor xmm11,xmm0
+ cmp rdx,0x20
+ jb NEAR $L$xts_enc_one
+ pxor xmm12,xmm0
+ je NEAR $L$xts_enc_two
+
+ pxor xmm13,xmm0
+ cmp rdx,0x40
+ jb NEAR $L$xts_enc_three
+ pxor xmm14,xmm0
+ je NEAR $L$xts_enc_four
+
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ pxor xmm2,xmm10
+ movdqu xmm5,XMMWORD[48+rdi]
+ pxor xmm3,xmm11
+ movdqu xmm6,XMMWORD[64+rdi]
+ lea rdi,[80+rdi]
+ pxor xmm4,xmm12
+ pxor xmm5,xmm13
+ pxor xmm6,xmm14
+ pxor xmm7,xmm7
+
+ call _aesni_encrypt6
+
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm15
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+ movdqu XMMWORD[rsi],xmm2
+ xorps xmm5,xmm13
+ movdqu XMMWORD[16+rsi],xmm3
+ xorps xmm6,xmm14
+ movdqu XMMWORD[32+rsi],xmm4
+ movdqu XMMWORD[48+rsi],xmm5
+ movdqu XMMWORD[64+rsi],xmm6
+ lea rsi,[80+rsi]
+ jmp NEAR $L$xts_enc_done
+
+ALIGN 16
+$L$xts_enc_one:
+ movups xmm2,XMMWORD[rdi]
+ lea rdi,[16+rdi]
+ xorps xmm2,xmm10
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_enc1_9:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_9
+DB 102,15,56,221,209
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm11
+ movups XMMWORD[rsi],xmm2
+ lea rsi,[16+rsi]
+ jmp NEAR $L$xts_enc_done
+
+ALIGN 16
+$L$xts_enc_two:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ lea rdi,[32+rdi]
+ xorps xmm2,xmm10
+ xorps xmm3,xmm11
+
+ call _aesni_encrypt2
+
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm12
+ xorps xmm3,xmm11
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ lea rsi,[32+rsi]
+ jmp NEAR $L$xts_enc_done
+
+ALIGN 16
+$L$xts_enc_three:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ movups xmm4,XMMWORD[32+rdi]
+ lea rdi,[48+rdi]
+ xorps xmm2,xmm10
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+
+ call _aesni_encrypt3
+
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm13
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ lea rsi,[48+rsi]
+ jmp NEAR $L$xts_enc_done
+
+ALIGN 16
+$L$xts_enc_four:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ movups xmm4,XMMWORD[32+rdi]
+ xorps xmm2,xmm10
+ movups xmm5,XMMWORD[48+rdi]
+ lea rdi,[64+rdi]
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+ xorps xmm5,xmm13
+
+ call _aesni_encrypt4
+
+ pxor xmm2,xmm10
+ movdqa xmm10,xmm14
+ pxor xmm3,xmm11
+ pxor xmm4,xmm12
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm5,xmm13
+ movdqu XMMWORD[16+rsi],xmm3
+ movdqu XMMWORD[32+rsi],xmm4
+ movdqu XMMWORD[48+rsi],xmm5
+ lea rsi,[64+rsi]
+ jmp NEAR $L$xts_enc_done
+
+ALIGN 16
+$L$xts_enc_done:
+ and r9,15
+ jz NEAR $L$xts_enc_ret
+ mov rdx,r9
+
+$L$xts_enc_steal:
+ movzx eax,BYTE[rdi]
+ movzx ecx,BYTE[((-16))+rsi]
+ lea rdi,[1+rdi]
+ mov BYTE[((-16))+rsi],al
+ mov BYTE[rsi],cl
+ lea rsi,[1+rsi]
+ sub rdx,1
+ jnz NEAR $L$xts_enc_steal
+
+ sub rsi,r9
+ mov rcx,rbp
+ mov eax,r10d
+
+ movups xmm2,XMMWORD[((-16))+rsi]
+ xorps xmm2,xmm10
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_enc1_10:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_10
+DB 102,15,56,221,209
+ xorps xmm2,xmm10
+ movups XMMWORD[(-16)+rsi],xmm2
+
+$L$xts_enc_ret:
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ movaps xmm6,XMMWORD[((-168))+r11]
+ movaps XMMWORD[(-168)+r11],xmm0
+ movaps xmm7,XMMWORD[((-152))+r11]
+ movaps XMMWORD[(-152)+r11],xmm0
+ movaps xmm8,XMMWORD[((-136))+r11]
+ movaps XMMWORD[(-136)+r11],xmm0
+ movaps xmm9,XMMWORD[((-120))+r11]
+ movaps XMMWORD[(-120)+r11],xmm0
+ movaps xmm10,XMMWORD[((-104))+r11]
+ movaps XMMWORD[(-104)+r11],xmm0
+ movaps xmm11,XMMWORD[((-88))+r11]
+ movaps XMMWORD[(-88)+r11],xmm0
+ movaps xmm12,XMMWORD[((-72))+r11]
+ movaps XMMWORD[(-72)+r11],xmm0
+ movaps xmm13,XMMWORD[((-56))+r11]
+ movaps XMMWORD[(-56)+r11],xmm0
+ movaps xmm14,XMMWORD[((-40))+r11]
+ movaps XMMWORD[(-40)+r11],xmm0
+ movaps xmm15,XMMWORD[((-24))+r11]
+ movaps XMMWORD[(-24)+r11],xmm0
+ movaps XMMWORD[rsp],xmm0
+ movaps XMMWORD[16+rsp],xmm0
+ movaps XMMWORD[32+rsp],xmm0
+ movaps XMMWORD[48+rsp],xmm0
+ movaps XMMWORD[64+rsp],xmm0
+ movaps XMMWORD[80+rsp],xmm0
+ movaps XMMWORD[96+rsp],xmm0
+ mov rbp,QWORD[((-8))+r11]
+
+ lea rsp,[r11]
+
+$L$xts_enc_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_xts_encrypt:
+global aesni_xts_decrypt
+
+ALIGN 16
+aesni_xts_decrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_xts_decrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea r11,[rsp]
+
+ push rbp
+
+ sub rsp,272
+ and rsp,-16
+ movaps XMMWORD[(-168)+r11],xmm6
+ movaps XMMWORD[(-152)+r11],xmm7
+ movaps XMMWORD[(-136)+r11],xmm8
+ movaps XMMWORD[(-120)+r11],xmm9
+ movaps XMMWORD[(-104)+r11],xmm10
+ movaps XMMWORD[(-88)+r11],xmm11
+ movaps XMMWORD[(-72)+r11],xmm12
+ movaps XMMWORD[(-56)+r11],xmm13
+ movaps XMMWORD[(-40)+r11],xmm14
+ movaps XMMWORD[(-24)+r11],xmm15
+$L$xts_dec_body:
+ movups xmm2,XMMWORD[r9]
+ mov eax,DWORD[240+r8]
+ mov r10d,DWORD[240+rcx]
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[16+r8]
+ lea r8,[32+r8]
+ xorps xmm2,xmm0
+$L$oop_enc1_11:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[r8]
+ lea r8,[16+r8]
+ jnz NEAR $L$oop_enc1_11
+DB 102,15,56,221,209
+ xor eax,eax
+ test rdx,15
+ setnz al
+ shl rax,4
+ sub rdx,rax
+
+ movups xmm0,XMMWORD[rcx]
+ mov rbp,rcx
+ mov eax,r10d
+ shl r10d,4
+ mov r9,rdx
+ and rdx,-16
+
+ movups xmm1,XMMWORD[16+r10*1+rcx]
+
+ movdqa xmm8,XMMWORD[$L$xts_magic]
+ movdqa xmm15,xmm2
+ pshufd xmm9,xmm2,0x5f
+ pxor xmm1,xmm0
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm10,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm10,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm11,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm11,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm12,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm12,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+ movdqa xmm13,xmm15
+ psrad xmm14,31
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+ pxor xmm13,xmm0
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm15
+ psrad xmm9,31
+ paddq xmm15,xmm15
+ pand xmm9,xmm8
+ pxor xmm14,xmm0
+ pxor xmm15,xmm9
+ movaps XMMWORD[96+rsp],xmm1
+
+ sub rdx,16*6
+ jc NEAR $L$xts_dec_short
+
+ mov eax,16+96
+ lea rcx,[32+r10*1+rbp]
+ sub rax,r10
+ movups xmm1,XMMWORD[16+rbp]
+ mov r10,rax
+ lea r8,[$L$xts_magic]
+ jmp NEAR $L$xts_dec_grandloop
+
+ALIGN 32
+$L$xts_dec_grandloop:
+ movdqu xmm2,XMMWORD[rdi]
+ movdqa xmm8,xmm0
+ movdqu xmm3,XMMWORD[16+rdi]
+ pxor xmm2,xmm10
+ movdqu xmm4,XMMWORD[32+rdi]
+ pxor xmm3,xmm11
+DB 102,15,56,222,209
+ movdqu xmm5,XMMWORD[48+rdi]
+ pxor xmm4,xmm12
+DB 102,15,56,222,217
+ movdqu xmm6,XMMWORD[64+rdi]
+ pxor xmm5,xmm13
+DB 102,15,56,222,225
+ movdqu xmm7,XMMWORD[80+rdi]
+ pxor xmm8,xmm15
+ movdqa xmm9,XMMWORD[96+rsp]
+ pxor xmm6,xmm14
+DB 102,15,56,222,233
+ movups xmm0,XMMWORD[32+rbp]
+ lea rdi,[96+rdi]
+ pxor xmm7,xmm8
+
+ pxor xmm10,xmm9
+DB 102,15,56,222,241
+ pxor xmm11,xmm9
+ movdqa XMMWORD[rsp],xmm10
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[48+rbp]
+ pxor xmm12,xmm9
+
+DB 102,15,56,222,208
+ pxor xmm13,xmm9
+ movdqa XMMWORD[16+rsp],xmm11
+DB 102,15,56,222,216
+ pxor xmm14,xmm9
+ movdqa XMMWORD[32+rsp],xmm12
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ pxor xmm8,xmm9
+ movdqa XMMWORD[64+rsp],xmm14
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[64+rbp]
+ movdqa XMMWORD[80+rsp],xmm8
+ pshufd xmm9,xmm15,0x5f
+ jmp NEAR $L$xts_dec_loop6
+ALIGN 32
+$L$xts_dec_loop6:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[((-64))+rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[((-80))+rax*1+rcx]
+ jnz NEAR $L$xts_dec_loop6
+
+ movdqa xmm8,XMMWORD[r8]
+ movdqa xmm14,xmm9
+ paddd xmm9,xmm9
+DB 102,15,56,222,209
+ paddq xmm15,xmm15
+ psrad xmm14,31
+DB 102,15,56,222,217
+ pand xmm14,xmm8
+ movups xmm10,XMMWORD[rbp]
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+ pxor xmm15,xmm14
+ movaps xmm11,xmm10
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[((-64))+rcx]
+
+ movdqa xmm14,xmm9
+DB 102,15,56,222,208
+ paddd xmm9,xmm9
+ pxor xmm10,xmm15
+DB 102,15,56,222,216
+ psrad xmm14,31
+ paddq xmm15,xmm15
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ pand xmm14,xmm8
+ movaps xmm12,xmm11
+DB 102,15,56,222,240
+ pxor xmm15,xmm14
+ movdqa xmm14,xmm9
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[((-48))+rcx]
+
+ paddd xmm9,xmm9
+DB 102,15,56,222,209
+ pxor xmm11,xmm15
+ psrad xmm14,31
+DB 102,15,56,222,217
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movdqa XMMWORD[48+rsp],xmm13
+ pxor xmm15,xmm14
+DB 102,15,56,222,241
+ movaps xmm13,xmm12
+ movdqa xmm14,xmm9
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[((-32))+rcx]
+
+ paddd xmm9,xmm9
+DB 102,15,56,222,208
+ pxor xmm12,xmm15
+ psrad xmm14,31
+DB 102,15,56,222,216
+ paddq xmm15,xmm15
+ pand xmm14,xmm8
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+ pxor xmm15,xmm14
+ movaps xmm14,xmm13
+DB 102,15,56,222,248
+
+ movdqa xmm0,xmm9
+ paddd xmm9,xmm9
+DB 102,15,56,222,209
+ pxor xmm13,xmm15
+ psrad xmm0,31
+DB 102,15,56,222,217
+ paddq xmm15,xmm15
+ pand xmm0,xmm8
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ pxor xmm15,xmm0
+ movups xmm0,XMMWORD[rbp]
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[16+rbp]
+
+ pxor xmm14,xmm15
+DB 102,15,56,223,84,36,0
+ psrad xmm9,31
+ paddq xmm15,xmm15
+DB 102,15,56,223,92,36,16
+DB 102,15,56,223,100,36,32
+ pand xmm9,xmm8
+ mov rax,r10
+DB 102,15,56,223,108,36,48
+DB 102,15,56,223,116,36,64
+DB 102,15,56,223,124,36,80
+ pxor xmm15,xmm9
+
+ lea rsi,[96+rsi]
+ movups XMMWORD[(-96)+rsi],xmm2
+ movups XMMWORD[(-80)+rsi],xmm3
+ movups XMMWORD[(-64)+rsi],xmm4
+ movups XMMWORD[(-48)+rsi],xmm5
+ movups XMMWORD[(-32)+rsi],xmm6
+ movups XMMWORD[(-16)+rsi],xmm7
+ sub rdx,16*6
+ jnc NEAR $L$xts_dec_grandloop
+
+ mov eax,16+96
+ sub eax,r10d
+ mov rcx,rbp
+ shr eax,4
+
+$L$xts_dec_short:
+
+ mov r10d,eax
+ pxor xmm10,xmm0
+ pxor xmm11,xmm0
+ add rdx,16*6
+ jz NEAR $L$xts_dec_done
+
+ pxor xmm12,xmm0
+ cmp rdx,0x20
+ jb NEAR $L$xts_dec_one
+ pxor xmm13,xmm0
+ je NEAR $L$xts_dec_two
+
+ pxor xmm14,xmm0
+ cmp rdx,0x40
+ jb NEAR $L$xts_dec_three
+ je NEAR $L$xts_dec_four
+
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ pxor xmm2,xmm10
+ movdqu xmm5,XMMWORD[48+rdi]
+ pxor xmm3,xmm11
+ movdqu xmm6,XMMWORD[64+rdi]
+ lea rdi,[80+rdi]
+ pxor xmm4,xmm12
+ pxor xmm5,xmm13
+ pxor xmm6,xmm14
+
+ call _aesni_decrypt6
+
+ xorps xmm2,xmm10
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+ movdqu XMMWORD[rsi],xmm2
+ xorps xmm5,xmm13
+ movdqu XMMWORD[16+rsi],xmm3
+ xorps xmm6,xmm14
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm14,xmm14
+ movdqu XMMWORD[48+rsi],xmm5
+ pcmpgtd xmm14,xmm15
+ movdqu XMMWORD[64+rsi],xmm6
+ lea rsi,[80+rsi]
+ pshufd xmm11,xmm14,0x13
+ and r9,15
+ jz NEAR $L$xts_dec_ret
+
+ movdqa xmm10,xmm15
+ paddq xmm15,xmm15
+ pand xmm11,xmm8
+ pxor xmm11,xmm15
+ jmp NEAR $L$xts_dec_done2
+
+ALIGN 16
+$L$xts_dec_one:
+ movups xmm2,XMMWORD[rdi]
+ lea rdi,[16+rdi]
+ xorps xmm2,xmm10
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_12:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_12
+DB 102,15,56,223,209
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm11
+ movups XMMWORD[rsi],xmm2
+ movdqa xmm11,xmm12
+ lea rsi,[16+rsi]
+ jmp NEAR $L$xts_dec_done
+
+ALIGN 16
+$L$xts_dec_two:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ lea rdi,[32+rdi]
+ xorps xmm2,xmm10
+ xorps xmm3,xmm11
+
+ call _aesni_decrypt2
+
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm12
+ xorps xmm3,xmm11
+ movdqa xmm11,xmm13
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ lea rsi,[32+rsi]
+ jmp NEAR $L$xts_dec_done
+
+ALIGN 16
+$L$xts_dec_three:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ movups xmm4,XMMWORD[32+rdi]
+ lea rdi,[48+rdi]
+ xorps xmm2,xmm10
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+
+ call _aesni_decrypt3
+
+ xorps xmm2,xmm10
+ movdqa xmm10,xmm13
+ xorps xmm3,xmm11
+ movdqa xmm11,xmm14
+ xorps xmm4,xmm12
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ lea rsi,[48+rsi]
+ jmp NEAR $L$xts_dec_done
+
+ALIGN 16
+$L$xts_dec_four:
+ movups xmm2,XMMWORD[rdi]
+ movups xmm3,XMMWORD[16+rdi]
+ movups xmm4,XMMWORD[32+rdi]
+ xorps xmm2,xmm10
+ movups xmm5,XMMWORD[48+rdi]
+ lea rdi,[64+rdi]
+ xorps xmm3,xmm11
+ xorps xmm4,xmm12
+ xorps xmm5,xmm13
+
+ call _aesni_decrypt4
+
+ pxor xmm2,xmm10
+ movdqa xmm10,xmm14
+ pxor xmm3,xmm11
+ movdqa xmm11,xmm15
+ pxor xmm4,xmm12
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm5,xmm13
+ movdqu XMMWORD[16+rsi],xmm3
+ movdqu XMMWORD[32+rsi],xmm4
+ movdqu XMMWORD[48+rsi],xmm5
+ lea rsi,[64+rsi]
+ jmp NEAR $L$xts_dec_done
+
+ALIGN 16
+$L$xts_dec_done:
+ and r9,15
+ jz NEAR $L$xts_dec_ret
+$L$xts_dec_done2:
+ mov rdx,r9
+ mov rcx,rbp
+ mov eax,r10d
+
+ movups xmm2,XMMWORD[rdi]
+ xorps xmm2,xmm11
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_13:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_13
+DB 102,15,56,223,209
+ xorps xmm2,xmm11
+ movups XMMWORD[rsi],xmm2
+
+$L$xts_dec_steal:
+ movzx eax,BYTE[16+rdi]
+ movzx ecx,BYTE[rsi]
+ lea rdi,[1+rdi]
+ mov BYTE[rsi],al
+ mov BYTE[16+rsi],cl
+ lea rsi,[1+rsi]
+ sub rdx,1
+ jnz NEAR $L$xts_dec_steal
+
+ sub rsi,r9
+ mov rcx,rbp
+ mov eax,r10d
+
+ movups xmm2,XMMWORD[rsi]
+ xorps xmm2,xmm10
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_14:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_14
+DB 102,15,56,223,209
+ xorps xmm2,xmm10
+ movups XMMWORD[rsi],xmm2
+
+$L$xts_dec_ret:
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ movaps xmm6,XMMWORD[((-168))+r11]
+ movaps XMMWORD[(-168)+r11],xmm0
+ movaps xmm7,XMMWORD[((-152))+r11]
+ movaps XMMWORD[(-152)+r11],xmm0
+ movaps xmm8,XMMWORD[((-136))+r11]
+ movaps XMMWORD[(-136)+r11],xmm0
+ movaps xmm9,XMMWORD[((-120))+r11]
+ movaps XMMWORD[(-120)+r11],xmm0
+ movaps xmm10,XMMWORD[((-104))+r11]
+ movaps XMMWORD[(-104)+r11],xmm0
+ movaps xmm11,XMMWORD[((-88))+r11]
+ movaps XMMWORD[(-88)+r11],xmm0
+ movaps xmm12,XMMWORD[((-72))+r11]
+ movaps XMMWORD[(-72)+r11],xmm0
+ movaps xmm13,XMMWORD[((-56))+r11]
+ movaps XMMWORD[(-56)+r11],xmm0
+ movaps xmm14,XMMWORD[((-40))+r11]
+ movaps XMMWORD[(-40)+r11],xmm0
+ movaps xmm15,XMMWORD[((-24))+r11]
+ movaps XMMWORD[(-24)+r11],xmm0
+ movaps XMMWORD[rsp],xmm0
+ movaps XMMWORD[16+rsp],xmm0
+ movaps XMMWORD[32+rsp],xmm0
+ movaps XMMWORD[48+rsp],xmm0
+ movaps XMMWORD[64+rsp],xmm0
+ movaps XMMWORD[80+rsp],xmm0
+ movaps XMMWORD[96+rsp],xmm0
+ mov rbp,QWORD[((-8))+r11]
+
+ lea rsp,[r11]
+
+$L$xts_dec_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_xts_decrypt:
+global aesni_ocb_encrypt
+
+ALIGN 32
+aesni_ocb_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ocb_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea rax,[rsp]
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[96+rsp],xmm12
+ movaps XMMWORD[112+rsp],xmm13
+ movaps XMMWORD[128+rsp],xmm14
+ movaps XMMWORD[144+rsp],xmm15
+$L$ocb_enc_body:
+ mov rbx,QWORD[56+rax]
+ mov rbp,QWORD[((56+8))+rax]
+
+ mov r10d,DWORD[240+rcx]
+ mov r11,rcx
+ shl r10d,4
+ movups xmm9,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+r10*1+rcx]
+
+ movdqu xmm15,XMMWORD[r9]
+ pxor xmm9,xmm1
+ pxor xmm15,xmm1
+
+ mov eax,16+32
+ lea rcx,[32+r10*1+r11]
+ movups xmm1,XMMWORD[16+r11]
+ sub rax,r10
+ mov r10,rax
+
+ movdqu xmm10,XMMWORD[rbx]
+ movdqu xmm8,XMMWORD[rbp]
+
+ test r8,1
+ jnz NEAR $L$ocb_enc_odd
+
+ bsf r12,r8
+ add r8,1
+ shl r12,4
+ movdqu xmm7,XMMWORD[r12*1+rbx]
+ movdqu xmm2,XMMWORD[rdi]
+ lea rdi,[16+rdi]
+
+ call __ocb_encrypt1
+
+ movdqa xmm15,xmm7
+ movups XMMWORD[rsi],xmm2
+ lea rsi,[16+rsi]
+ sub rdx,1
+ jz NEAR $L$ocb_enc_done
+
+$L$ocb_enc_odd:
+ lea r12,[1+r8]
+ lea r13,[3+r8]
+ lea r14,[5+r8]
+ lea r8,[6+r8]
+ bsf r12,r12
+ bsf r13,r13
+ bsf r14,r14
+ shl r12,4
+ shl r13,4
+ shl r14,4
+
+ sub rdx,6
+ jc NEAR $L$ocb_enc_short
+ jmp NEAR $L$ocb_enc_grandloop
+
+ALIGN 32
+$L$ocb_enc_grandloop:
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqu xmm7,XMMWORD[80+rdi]
+ lea rdi,[96+rdi]
+
+ call __ocb_encrypt6
+
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+ movups XMMWORD[80+rsi],xmm7
+ lea rsi,[96+rsi]
+ sub rdx,6
+ jnc NEAR $L$ocb_enc_grandloop
+
+$L$ocb_enc_short:
+ add rdx,6
+ jz NEAR $L$ocb_enc_done
+
+ movdqu xmm2,XMMWORD[rdi]
+ cmp rdx,2
+ jb NEAR $L$ocb_enc_one
+ movdqu xmm3,XMMWORD[16+rdi]
+ je NEAR $L$ocb_enc_two
+
+ movdqu xmm4,XMMWORD[32+rdi]
+ cmp rdx,4
+ jb NEAR $L$ocb_enc_three
+ movdqu xmm5,XMMWORD[48+rdi]
+ je NEAR $L$ocb_enc_four
+
+ movdqu xmm6,XMMWORD[64+rdi]
+ pxor xmm7,xmm7
+
+ call __ocb_encrypt6
+
+ movdqa xmm15,xmm14
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+ movups XMMWORD[64+rsi],xmm6
+
+ jmp NEAR $L$ocb_enc_done
+
+ALIGN 16
+$L$ocb_enc_one:
+ movdqa xmm7,xmm10
+
+ call __ocb_encrypt1
+
+ movdqa xmm15,xmm7
+ movups XMMWORD[rsi],xmm2
+ jmp NEAR $L$ocb_enc_done
+
+ALIGN 16
+$L$ocb_enc_two:
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+
+ call __ocb_encrypt4
+
+ movdqa xmm15,xmm11
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+
+ jmp NEAR $L$ocb_enc_done
+
+ALIGN 16
+$L$ocb_enc_three:
+ pxor xmm5,xmm5
+
+ call __ocb_encrypt4
+
+ movdqa xmm15,xmm12
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+
+ jmp NEAR $L$ocb_enc_done
+
+ALIGN 16
+$L$ocb_enc_four:
+ call __ocb_encrypt4
+
+ movdqa xmm15,xmm13
+ movups XMMWORD[rsi],xmm2
+ movups XMMWORD[16+rsi],xmm3
+ movups XMMWORD[32+rsi],xmm4
+ movups XMMWORD[48+rsi],xmm5
+
+$L$ocb_enc_done:
+ pxor xmm15,xmm0
+ movdqu XMMWORD[rbp],xmm8
+ movdqu XMMWORD[r9],xmm15
+
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ movaps xmm6,XMMWORD[rsp]
+ movaps XMMWORD[rsp],xmm0
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ movaps xmm10,XMMWORD[64+rsp]
+ movaps XMMWORD[64+rsp],xmm0
+ movaps xmm11,XMMWORD[80+rsp]
+ movaps XMMWORD[80+rsp],xmm0
+ movaps xmm12,XMMWORD[96+rsp]
+ movaps XMMWORD[96+rsp],xmm0
+ movaps xmm13,XMMWORD[112+rsp]
+ movaps XMMWORD[112+rsp],xmm0
+ movaps xmm14,XMMWORD[128+rsp]
+ movaps XMMWORD[128+rsp],xmm0
+ movaps xmm15,XMMWORD[144+rsp]
+ movaps XMMWORD[144+rsp],xmm0
+ lea rax,[((160+40))+rsp]
+$L$ocb_enc_pop:
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$ocb_enc_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ocb_encrypt:
+
+
+ALIGN 32
+__ocb_encrypt6:
+
+ pxor xmm15,xmm9
+ movdqu xmm11,XMMWORD[r12*1+rbx]
+ movdqa xmm12,xmm10
+ movdqu xmm13,XMMWORD[r13*1+rbx]
+ movdqa xmm14,xmm10
+ pxor xmm10,xmm15
+ movdqu xmm15,XMMWORD[r14*1+rbx]
+ pxor xmm11,xmm10
+ pxor xmm8,xmm2
+ pxor xmm2,xmm10
+ pxor xmm12,xmm11
+ pxor xmm8,xmm3
+ pxor xmm3,xmm11
+ pxor xmm13,xmm12
+ pxor xmm8,xmm4
+ pxor xmm4,xmm12
+ pxor xmm14,xmm13
+ pxor xmm8,xmm5
+ pxor xmm5,xmm13
+ pxor xmm15,xmm14
+ pxor xmm8,xmm6
+ pxor xmm6,xmm14
+ pxor xmm8,xmm7
+ pxor xmm7,xmm15
+ movups xmm0,XMMWORD[32+r11]
+
+ lea r12,[1+r8]
+ lea r13,[3+r8]
+ lea r14,[5+r8]
+ add r8,6
+ pxor xmm10,xmm9
+ bsf r12,r12
+ bsf r13,r13
+ bsf r14,r14
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ pxor xmm11,xmm9
+ pxor xmm12,xmm9
+DB 102,15,56,220,241
+ pxor xmm13,xmm9
+ pxor xmm14,xmm9
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[48+r11]
+ pxor xmm15,xmm9
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[64+r11]
+ shl r12,4
+ shl r13,4
+ jmp NEAR $L$ocb_enc_loop6
+
+ALIGN 32
+$L$ocb_enc_loop6:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+DB 102,15,56,220,240
+DB 102,15,56,220,248
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_enc_loop6
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+DB 102,15,56,220,241
+DB 102,15,56,220,249
+ movups xmm1,XMMWORD[16+r11]
+ shl r14,4
+
+DB 102,65,15,56,221,210
+ movdqu xmm10,XMMWORD[rbx]
+ mov rax,r10
+DB 102,65,15,56,221,219
+DB 102,65,15,56,221,228
+DB 102,65,15,56,221,237
+DB 102,65,15,56,221,246
+DB 102,65,15,56,221,255
+ DB 0F3h,0C3h ;repret
+
+
+
+
+ALIGN 32
+__ocb_encrypt4:
+
+ pxor xmm15,xmm9
+ movdqu xmm11,XMMWORD[r12*1+rbx]
+ movdqa xmm12,xmm10
+ movdqu xmm13,XMMWORD[r13*1+rbx]
+ pxor xmm10,xmm15
+ pxor xmm11,xmm10
+ pxor xmm8,xmm2
+ pxor xmm2,xmm10
+ pxor xmm12,xmm11
+ pxor xmm8,xmm3
+ pxor xmm3,xmm11
+ pxor xmm13,xmm12
+ pxor xmm8,xmm4
+ pxor xmm4,xmm12
+ pxor xmm8,xmm5
+ pxor xmm5,xmm13
+ movups xmm0,XMMWORD[32+r11]
+
+ pxor xmm10,xmm9
+ pxor xmm11,xmm9
+ pxor xmm12,xmm9
+ pxor xmm13,xmm9
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[48+r11]
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[64+r11]
+ jmp NEAR $L$ocb_enc_loop4
+
+ALIGN 32
+$L$ocb_enc_loop4:
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,220,208
+DB 102,15,56,220,216
+DB 102,15,56,220,224
+DB 102,15,56,220,232
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_enc_loop4
+
+DB 102,15,56,220,209
+DB 102,15,56,220,217
+DB 102,15,56,220,225
+DB 102,15,56,220,233
+ movups xmm1,XMMWORD[16+r11]
+ mov rax,r10
+
+DB 102,65,15,56,221,210
+DB 102,65,15,56,221,219
+DB 102,65,15,56,221,228
+DB 102,65,15,56,221,237
+ DB 0F3h,0C3h ;repret
+
+
+
+
+ALIGN 32
+__ocb_encrypt1:
+
+ pxor xmm7,xmm15
+ pxor xmm7,xmm9
+ pxor xmm8,xmm2
+ pxor xmm2,xmm7
+ movups xmm0,XMMWORD[32+r11]
+
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[48+r11]
+ pxor xmm7,xmm9
+
+DB 102,15,56,220,208
+ movups xmm0,XMMWORD[64+r11]
+ jmp NEAR $L$ocb_enc_loop1
+
+ALIGN 32
+$L$ocb_enc_loop1:
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,220,208
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_enc_loop1
+
+DB 102,15,56,220,209
+ movups xmm1,XMMWORD[16+r11]
+ mov rax,r10
+
+DB 102,15,56,221,215
+ DB 0F3h,0C3h ;repret
+
+
+
+global aesni_ocb_decrypt
+
+ALIGN 32
+aesni_ocb_decrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_ocb_decrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ lea rax,[rsp]
+ push rbx
+
+ push rbp
+
+ push r12
+
+ push r13
+
+ push r14
+
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[rsp],xmm6
+ movaps XMMWORD[16+rsp],xmm7
+ movaps XMMWORD[32+rsp],xmm8
+ movaps XMMWORD[48+rsp],xmm9
+ movaps XMMWORD[64+rsp],xmm10
+ movaps XMMWORD[80+rsp],xmm11
+ movaps XMMWORD[96+rsp],xmm12
+ movaps XMMWORD[112+rsp],xmm13
+ movaps XMMWORD[128+rsp],xmm14
+ movaps XMMWORD[144+rsp],xmm15
+$L$ocb_dec_body:
+ mov rbx,QWORD[56+rax]
+ mov rbp,QWORD[((56+8))+rax]
+
+ mov r10d,DWORD[240+rcx]
+ mov r11,rcx
+ shl r10d,4
+ movups xmm9,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+r10*1+rcx]
+
+ movdqu xmm15,XMMWORD[r9]
+ pxor xmm9,xmm1
+ pxor xmm15,xmm1
+
+ mov eax,16+32
+ lea rcx,[32+r10*1+r11]
+ movups xmm1,XMMWORD[16+r11]
+ sub rax,r10
+ mov r10,rax
+
+ movdqu xmm10,XMMWORD[rbx]
+ movdqu xmm8,XMMWORD[rbp]
+
+ test r8,1
+ jnz NEAR $L$ocb_dec_odd
+
+ bsf r12,r8
+ add r8,1
+ shl r12,4
+ movdqu xmm7,XMMWORD[r12*1+rbx]
+ movdqu xmm2,XMMWORD[rdi]
+ lea rdi,[16+rdi]
+
+ call __ocb_decrypt1
+
+ movdqa xmm15,xmm7
+ movups XMMWORD[rsi],xmm2
+ xorps xmm8,xmm2
+ lea rsi,[16+rsi]
+ sub rdx,1
+ jz NEAR $L$ocb_dec_done
+
+$L$ocb_dec_odd:
+ lea r12,[1+r8]
+ lea r13,[3+r8]
+ lea r14,[5+r8]
+ lea r8,[6+r8]
+ bsf r12,r12
+ bsf r13,r13
+ bsf r14,r14
+ shl r12,4
+ shl r13,4
+ shl r14,4
+
+ sub rdx,6
+ jc NEAR $L$ocb_dec_short
+ jmp NEAR $L$ocb_dec_grandloop
+
+ALIGN 32
+$L$ocb_dec_grandloop:
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqu xmm7,XMMWORD[80+rdi]
+ lea rdi,[96+rdi]
+
+ call __ocb_decrypt6
+
+ movups XMMWORD[rsi],xmm2
+ pxor xmm8,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm8,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm8,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm8,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm8,xmm6
+ movups XMMWORD[80+rsi],xmm7
+ pxor xmm8,xmm7
+ lea rsi,[96+rsi]
+ sub rdx,6
+ jnc NEAR $L$ocb_dec_grandloop
+
+$L$ocb_dec_short:
+ add rdx,6
+ jz NEAR $L$ocb_dec_done
+
+ movdqu xmm2,XMMWORD[rdi]
+ cmp rdx,2
+ jb NEAR $L$ocb_dec_one
+ movdqu xmm3,XMMWORD[16+rdi]
+ je NEAR $L$ocb_dec_two
+
+ movdqu xmm4,XMMWORD[32+rdi]
+ cmp rdx,4
+ jb NEAR $L$ocb_dec_three
+ movdqu xmm5,XMMWORD[48+rdi]
+ je NEAR $L$ocb_dec_four
+
+ movdqu xmm6,XMMWORD[64+rdi]
+ pxor xmm7,xmm7
+
+ call __ocb_decrypt6
+
+ movdqa xmm15,xmm14
+ movups XMMWORD[rsi],xmm2
+ pxor xmm8,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm8,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm8,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm8,xmm5
+ movups XMMWORD[64+rsi],xmm6
+ pxor xmm8,xmm6
+
+ jmp NEAR $L$ocb_dec_done
+
+ALIGN 16
+$L$ocb_dec_one:
+ movdqa xmm7,xmm10
+
+ call __ocb_decrypt1
+
+ movdqa xmm15,xmm7
+ movups XMMWORD[rsi],xmm2
+ xorps xmm8,xmm2
+ jmp NEAR $L$ocb_dec_done
+
+ALIGN 16
+$L$ocb_dec_two:
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+
+ call __ocb_decrypt4
+
+ movdqa xmm15,xmm11
+ movups XMMWORD[rsi],xmm2
+ xorps xmm8,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ xorps xmm8,xmm3
+
+ jmp NEAR $L$ocb_dec_done
+
+ALIGN 16
+$L$ocb_dec_three:
+ pxor xmm5,xmm5
+
+ call __ocb_decrypt4
+
+ movdqa xmm15,xmm12
+ movups XMMWORD[rsi],xmm2
+ xorps xmm8,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ xorps xmm8,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ xorps xmm8,xmm4
+
+ jmp NEAR $L$ocb_dec_done
+
+ALIGN 16
+$L$ocb_dec_four:
+ call __ocb_decrypt4
+
+ movdqa xmm15,xmm13
+ movups XMMWORD[rsi],xmm2
+ pxor xmm8,xmm2
+ movups XMMWORD[16+rsi],xmm3
+ pxor xmm8,xmm3
+ movups XMMWORD[32+rsi],xmm4
+ pxor xmm8,xmm4
+ movups XMMWORD[48+rsi],xmm5
+ pxor xmm8,xmm5
+
+$L$ocb_dec_done:
+ pxor xmm15,xmm0
+ movdqu XMMWORD[rbp],xmm8
+ movdqu XMMWORD[r9],xmm15
+
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ movaps xmm6,XMMWORD[rsp]
+ movaps XMMWORD[rsp],xmm0
+ movaps xmm7,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm8,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm9,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ movaps xmm10,XMMWORD[64+rsp]
+ movaps XMMWORD[64+rsp],xmm0
+ movaps xmm11,XMMWORD[80+rsp]
+ movaps XMMWORD[80+rsp],xmm0
+ movaps xmm12,XMMWORD[96+rsp]
+ movaps XMMWORD[96+rsp],xmm0
+ movaps xmm13,XMMWORD[112+rsp]
+ movaps XMMWORD[112+rsp],xmm0
+ movaps xmm14,XMMWORD[128+rsp]
+ movaps XMMWORD[128+rsp],xmm0
+ movaps xmm15,XMMWORD[144+rsp]
+ movaps XMMWORD[144+rsp],xmm0
+ lea rax,[((160+40))+rsp]
+$L$ocb_dec_pop:
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbp,QWORD[((-16))+rax]
+
+ mov rbx,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$ocb_dec_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_ocb_decrypt:
+
+
+ALIGN 32
+__ocb_decrypt6:
+
+ pxor xmm15,xmm9
+ movdqu xmm11,XMMWORD[r12*1+rbx]
+ movdqa xmm12,xmm10
+ movdqu xmm13,XMMWORD[r13*1+rbx]
+ movdqa xmm14,xmm10
+ pxor xmm10,xmm15
+ movdqu xmm15,XMMWORD[r14*1+rbx]
+ pxor xmm11,xmm10
+ pxor xmm2,xmm10
+ pxor xmm12,xmm11
+ pxor xmm3,xmm11
+ pxor xmm13,xmm12
+ pxor xmm4,xmm12
+ pxor xmm14,xmm13
+ pxor xmm5,xmm13
+ pxor xmm15,xmm14
+ pxor xmm6,xmm14
+ pxor xmm7,xmm15
+ movups xmm0,XMMWORD[32+r11]
+
+ lea r12,[1+r8]
+ lea r13,[3+r8]
+ lea r14,[5+r8]
+ add r8,6
+ pxor xmm10,xmm9
+ bsf r12,r12
+ bsf r13,r13
+ bsf r14,r14
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ pxor xmm11,xmm9
+ pxor xmm12,xmm9
+DB 102,15,56,222,241
+ pxor xmm13,xmm9
+ pxor xmm14,xmm9
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[48+r11]
+ pxor xmm15,xmm9
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[64+r11]
+ shl r12,4
+ shl r13,4
+ jmp NEAR $L$ocb_dec_loop6
+
+ALIGN 32
+$L$ocb_dec_loop6:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_dec_loop6
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ movups xmm1,XMMWORD[16+r11]
+ shl r14,4
+
+DB 102,65,15,56,223,210
+ movdqu xmm10,XMMWORD[rbx]
+ mov rax,r10
+DB 102,65,15,56,223,219
+DB 102,65,15,56,223,228
+DB 102,65,15,56,223,237
+DB 102,65,15,56,223,246
+DB 102,65,15,56,223,255
+ DB 0F3h,0C3h ;repret
+
+
+
+
+ALIGN 32
+__ocb_decrypt4:
+
+ pxor xmm15,xmm9
+ movdqu xmm11,XMMWORD[r12*1+rbx]
+ movdqa xmm12,xmm10
+ movdqu xmm13,XMMWORD[r13*1+rbx]
+ pxor xmm10,xmm15
+ pxor xmm11,xmm10
+ pxor xmm2,xmm10
+ pxor xmm12,xmm11
+ pxor xmm3,xmm11
+ pxor xmm13,xmm12
+ pxor xmm4,xmm12
+ pxor xmm5,xmm13
+ movups xmm0,XMMWORD[32+r11]
+
+ pxor xmm10,xmm9
+ pxor xmm11,xmm9
+ pxor xmm12,xmm9
+ pxor xmm13,xmm9
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[48+r11]
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[64+r11]
+ jmp NEAR $L$ocb_dec_loop4
+
+ALIGN 32
+$L$ocb_dec_loop4:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_dec_loop4
+
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ movups xmm1,XMMWORD[16+r11]
+ mov rax,r10
+
+DB 102,65,15,56,223,210
+DB 102,65,15,56,223,219
+DB 102,65,15,56,223,228
+DB 102,65,15,56,223,237
+ DB 0F3h,0C3h ;repret
+
+
+
+
+ALIGN 32
+__ocb_decrypt1:
+
+ pxor xmm7,xmm15
+ pxor xmm7,xmm9
+ pxor xmm2,xmm7
+ movups xmm0,XMMWORD[32+r11]
+
+DB 102,15,56,222,209
+ movups xmm1,XMMWORD[48+r11]
+ pxor xmm7,xmm9
+
+DB 102,15,56,222,208
+ movups xmm0,XMMWORD[64+r11]
+ jmp NEAR $L$ocb_dec_loop1
+
+ALIGN 32
+$L$ocb_dec_loop1:
+DB 102,15,56,222,209
+ movups xmm1,XMMWORD[rax*1+rcx]
+ add rax,32
+
+DB 102,15,56,222,208
+ movups xmm0,XMMWORD[((-16))+rax*1+rcx]
+ jnz NEAR $L$ocb_dec_loop1
+
+DB 102,15,56,222,209
+ movups xmm1,XMMWORD[16+r11]
+ mov rax,r10
+
+DB 102,15,56,223,215
+ DB 0F3h,0C3h ;repret
+
+
+global aesni_cbc_encrypt
+
+ALIGN 16
+aesni_cbc_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_aesni_cbc_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ test rdx,rdx
+ jz NEAR $L$cbc_ret
+
+ mov r10d,DWORD[240+rcx]
+ mov r11,rcx
+ test r9d,r9d
+ jz NEAR $L$cbc_decrypt
+
+ movups xmm2,XMMWORD[r8]
+ mov eax,r10d
+ cmp rdx,16
+ jb NEAR $L$cbc_enc_tail
+ sub rdx,16
+ jmp NEAR $L$cbc_enc_loop
+ALIGN 16
+$L$cbc_enc_loop:
+ movups xmm3,XMMWORD[rdi]
+ lea rdi,[16+rdi]
+
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ xorps xmm3,xmm0
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm3
+$L$oop_enc1_15:
+DB 102,15,56,220,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_enc1_15
+DB 102,15,56,221,209
+ mov eax,r10d
+ mov rcx,r11
+ movups XMMWORD[rsi],xmm2
+ lea rsi,[16+rsi]
+ sub rdx,16
+ jnc NEAR $L$cbc_enc_loop
+ add rdx,16
+ jnz NEAR $L$cbc_enc_tail
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ movups XMMWORD[r8],xmm2
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ jmp NEAR $L$cbc_ret
+
+$L$cbc_enc_tail:
+ mov rcx,rdx
+ xchg rsi,rdi
+ DD 0x9066A4F3
+ mov ecx,16
+ sub rcx,rdx
+ xor eax,eax
+ DD 0x9066AAF3
+ lea rdi,[((-16))+rdi]
+ mov eax,r10d
+ mov rsi,rdi
+ mov rcx,r11
+ xor rdx,rdx
+ jmp NEAR $L$cbc_enc_loop
+
+ALIGN 16
+$L$cbc_decrypt:
+ cmp rdx,16
+ jne NEAR $L$cbc_decrypt_bulk
+
+
+
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[r8]
+ movdqa xmm4,xmm2
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_16:
+DB 102,15,56,222,209
+ dec r10d
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_16
+DB 102,15,56,223,209
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ movdqu XMMWORD[r8],xmm4
+ xorps xmm2,xmm3
+ pxor xmm3,xmm3
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ jmp NEAR $L$cbc_ret
+ALIGN 16
+$L$cbc_decrypt_bulk:
+ lea r11,[rsp]
+
+ push rbp
+
+ sub rsp,176
+ and rsp,-16
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$cbc_decrypt_body:
+ mov rbp,rcx
+ movups xmm10,XMMWORD[r8]
+ mov eax,r10d
+ cmp rdx,0x50
+ jbe NEAR $L$cbc_dec_tail
+
+ movups xmm0,XMMWORD[rcx]
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqa xmm11,xmm2
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqa xmm12,xmm3
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqa xmm13,xmm4
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqa xmm14,xmm5
+ movdqu xmm7,XMMWORD[80+rdi]
+ movdqa xmm15,xmm6
+ mov r9d,DWORD[((OPENSSL_ia32cap_P+4))]
+ cmp rdx,0x70
+ jbe NEAR $L$cbc_dec_six_or_seven
+
+ and r9d,71303168
+ sub rdx,0x50
+ cmp r9d,4194304
+ je NEAR $L$cbc_dec_loop6_enter
+ sub rdx,0x20
+ lea rcx,[112+rcx]
+ jmp NEAR $L$cbc_dec_loop8_enter
+ALIGN 16
+$L$cbc_dec_loop8:
+ movups XMMWORD[rsi],xmm9
+ lea rsi,[16+rsi]
+$L$cbc_dec_loop8_enter:
+ movdqu xmm8,XMMWORD[96+rdi]
+ pxor xmm2,xmm0
+ movdqu xmm9,XMMWORD[112+rdi]
+ pxor xmm3,xmm0
+ movups xmm1,XMMWORD[((16-112))+rcx]
+ pxor xmm4,xmm0
+ mov rbp,-1
+ cmp rdx,0x70
+ pxor xmm5,xmm0
+ pxor xmm6,xmm0
+ pxor xmm7,xmm0
+ pxor xmm8,xmm0
+
+DB 102,15,56,222,209
+ pxor xmm9,xmm0
+ movups xmm0,XMMWORD[((32-112))+rcx]
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+ adc rbp,0
+ and rbp,128
+DB 102,68,15,56,222,201
+ add rbp,rdi
+ movups xmm1,XMMWORD[((48-112))+rcx]
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((64-112))+rcx]
+ nop
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movups xmm1,XMMWORD[((80-112))+rcx]
+ nop
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((96-112))+rcx]
+ nop
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movups xmm1,XMMWORD[((112-112))+rcx]
+ nop
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((128-112))+rcx]
+ nop
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movups xmm1,XMMWORD[((144-112))+rcx]
+ cmp eax,11
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((160-112))+rcx]
+ jb NEAR $L$cbc_dec_done
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movups xmm1,XMMWORD[((176-112))+rcx]
+ nop
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((192-112))+rcx]
+ je NEAR $L$cbc_dec_done
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movups xmm1,XMMWORD[((208-112))+rcx]
+ nop
+DB 102,15,56,222,208
+DB 102,15,56,222,216
+DB 102,15,56,222,224
+DB 102,15,56,222,232
+DB 102,15,56,222,240
+DB 102,15,56,222,248
+DB 102,68,15,56,222,192
+DB 102,68,15,56,222,200
+ movups xmm0,XMMWORD[((224-112))+rcx]
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_done:
+DB 102,15,56,222,209
+DB 102,15,56,222,217
+ pxor xmm10,xmm0
+ pxor xmm11,xmm0
+DB 102,15,56,222,225
+DB 102,15,56,222,233
+ pxor xmm12,xmm0
+ pxor xmm13,xmm0
+DB 102,15,56,222,241
+DB 102,15,56,222,249
+ pxor xmm14,xmm0
+ pxor xmm15,xmm0
+DB 102,68,15,56,222,193
+DB 102,68,15,56,222,201
+ movdqu xmm1,XMMWORD[80+rdi]
+
+DB 102,65,15,56,223,210
+ movdqu xmm10,XMMWORD[96+rdi]
+ pxor xmm1,xmm0
+DB 102,65,15,56,223,219
+ pxor xmm10,xmm0
+ movdqu xmm0,XMMWORD[112+rdi]
+DB 102,65,15,56,223,228
+ lea rdi,[128+rdi]
+ movdqu xmm11,XMMWORD[rbp]
+DB 102,65,15,56,223,237
+DB 102,65,15,56,223,246
+ movdqu xmm12,XMMWORD[16+rbp]
+ movdqu xmm13,XMMWORD[32+rbp]
+DB 102,65,15,56,223,255
+DB 102,68,15,56,223,193
+ movdqu xmm14,XMMWORD[48+rbp]
+ movdqu xmm15,XMMWORD[64+rbp]
+DB 102,69,15,56,223,202
+ movdqa xmm10,xmm0
+ movdqu xmm1,XMMWORD[80+rbp]
+ movups xmm0,XMMWORD[((-112))+rcx]
+
+ movups XMMWORD[rsi],xmm2
+ movdqa xmm2,xmm11
+ movups XMMWORD[16+rsi],xmm3
+ movdqa xmm3,xmm12
+ movups XMMWORD[32+rsi],xmm4
+ movdqa xmm4,xmm13
+ movups XMMWORD[48+rsi],xmm5
+ movdqa xmm5,xmm14
+ movups XMMWORD[64+rsi],xmm6
+ movdqa xmm6,xmm15
+ movups XMMWORD[80+rsi],xmm7
+ movdqa xmm7,xmm1
+ movups XMMWORD[96+rsi],xmm8
+ lea rsi,[112+rsi]
+
+ sub rdx,0x80
+ ja NEAR $L$cbc_dec_loop8
+
+ movaps xmm2,xmm9
+ lea rcx,[((-112))+rcx]
+ add rdx,0x70
+ jle NEAR $L$cbc_dec_clear_tail_collected
+ movups XMMWORD[rsi],xmm9
+ lea rsi,[16+rsi]
+ cmp rdx,0x50
+ jbe NEAR $L$cbc_dec_tail
+
+ movaps xmm2,xmm11
+$L$cbc_dec_six_or_seven:
+ cmp rdx,0x60
+ ja NEAR $L$cbc_dec_seven
+
+ movaps xmm8,xmm7
+ call _aesni_decrypt6
+ pxor xmm2,xmm10
+ movaps xmm10,xmm8
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ pxor xmm6,xmm14
+ movdqu XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ pxor xmm7,xmm15
+ movdqu XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ lea rsi,[80+rsi]
+ movdqa xmm2,xmm7
+ pxor xmm7,xmm7
+ jmp NEAR $L$cbc_dec_tail_collected
+
+ALIGN 16
+$L$cbc_dec_seven:
+ movups xmm8,XMMWORD[96+rdi]
+ xorps xmm9,xmm9
+ call _aesni_decrypt8
+ movups xmm9,XMMWORD[80+rdi]
+ pxor xmm2,xmm10
+ movups xmm10,XMMWORD[96+rdi]
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ pxor xmm6,xmm14
+ movdqu XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ pxor xmm7,xmm15
+ movdqu XMMWORD[64+rsi],xmm6
+ pxor xmm6,xmm6
+ pxor xmm8,xmm9
+ movdqu XMMWORD[80+rsi],xmm7
+ pxor xmm7,xmm7
+ lea rsi,[96+rsi]
+ movdqa xmm2,xmm8
+ pxor xmm8,xmm8
+ pxor xmm9,xmm9
+ jmp NEAR $L$cbc_dec_tail_collected
+
+ALIGN 16
+$L$cbc_dec_loop6:
+ movups XMMWORD[rsi],xmm7
+ lea rsi,[16+rsi]
+ movdqu xmm2,XMMWORD[rdi]
+ movdqu xmm3,XMMWORD[16+rdi]
+ movdqa xmm11,xmm2
+ movdqu xmm4,XMMWORD[32+rdi]
+ movdqa xmm12,xmm3
+ movdqu xmm5,XMMWORD[48+rdi]
+ movdqa xmm13,xmm4
+ movdqu xmm6,XMMWORD[64+rdi]
+ movdqa xmm14,xmm5
+ movdqu xmm7,XMMWORD[80+rdi]
+ movdqa xmm15,xmm6
+$L$cbc_dec_loop6_enter:
+ lea rdi,[96+rdi]
+ movdqa xmm8,xmm7
+
+ call _aesni_decrypt6
+
+ pxor xmm2,xmm10
+ movdqa xmm10,xmm8
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm6,xmm14
+ mov rcx,rbp
+ movdqu XMMWORD[48+rsi],xmm5
+ pxor xmm7,xmm15
+ mov eax,r10d
+ movdqu XMMWORD[64+rsi],xmm6
+ lea rsi,[80+rsi]
+ sub rdx,0x60
+ ja NEAR $L$cbc_dec_loop6
+
+ movdqa xmm2,xmm7
+ add rdx,0x50
+ jle NEAR $L$cbc_dec_clear_tail_collected
+ movups XMMWORD[rsi],xmm7
+ lea rsi,[16+rsi]
+
+$L$cbc_dec_tail:
+ movups xmm2,XMMWORD[rdi]
+ sub rdx,0x10
+ jbe NEAR $L$cbc_dec_one
+
+ movups xmm3,XMMWORD[16+rdi]
+ movaps xmm11,xmm2
+ sub rdx,0x10
+ jbe NEAR $L$cbc_dec_two
+
+ movups xmm4,XMMWORD[32+rdi]
+ movaps xmm12,xmm3
+ sub rdx,0x10
+ jbe NEAR $L$cbc_dec_three
+
+ movups xmm5,XMMWORD[48+rdi]
+ movaps xmm13,xmm4
+ sub rdx,0x10
+ jbe NEAR $L$cbc_dec_four
+
+ movups xmm6,XMMWORD[64+rdi]
+ movaps xmm14,xmm5
+ movaps xmm15,xmm6
+ xorps xmm7,xmm7
+ call _aesni_decrypt6
+ pxor xmm2,xmm10
+ movaps xmm10,xmm15
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ pxor xmm6,xmm14
+ movdqu XMMWORD[48+rsi],xmm5
+ pxor xmm5,xmm5
+ lea rsi,[64+rsi]
+ movdqa xmm2,xmm6
+ pxor xmm6,xmm6
+ pxor xmm7,xmm7
+ sub rdx,0x10
+ jmp NEAR $L$cbc_dec_tail_collected
+
+ALIGN 16
+$L$cbc_dec_one:
+ movaps xmm11,xmm2
+ movups xmm0,XMMWORD[rcx]
+ movups xmm1,XMMWORD[16+rcx]
+ lea rcx,[32+rcx]
+ xorps xmm2,xmm0
+$L$oop_dec1_17:
+DB 102,15,56,222,209
+ dec eax
+ movups xmm1,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ jnz NEAR $L$oop_dec1_17
+DB 102,15,56,223,209
+ xorps xmm2,xmm10
+ movaps xmm10,xmm11
+ jmp NEAR $L$cbc_dec_tail_collected
+ALIGN 16
+$L$cbc_dec_two:
+ movaps xmm12,xmm3
+ call _aesni_decrypt2
+ pxor xmm2,xmm10
+ movaps xmm10,xmm12
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ movdqa xmm2,xmm3
+ pxor xmm3,xmm3
+ lea rsi,[16+rsi]
+ jmp NEAR $L$cbc_dec_tail_collected
+ALIGN 16
+$L$cbc_dec_three:
+ movaps xmm13,xmm4
+ call _aesni_decrypt3
+ pxor xmm2,xmm10
+ movaps xmm10,xmm13
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ movdqa xmm2,xmm4
+ pxor xmm4,xmm4
+ lea rsi,[32+rsi]
+ jmp NEAR $L$cbc_dec_tail_collected
+ALIGN 16
+$L$cbc_dec_four:
+ movaps xmm14,xmm5
+ call _aesni_decrypt4
+ pxor xmm2,xmm10
+ movaps xmm10,xmm14
+ pxor xmm3,xmm11
+ movdqu XMMWORD[rsi],xmm2
+ pxor xmm4,xmm12
+ movdqu XMMWORD[16+rsi],xmm3
+ pxor xmm3,xmm3
+ pxor xmm5,xmm13
+ movdqu XMMWORD[32+rsi],xmm4
+ pxor xmm4,xmm4
+ movdqa xmm2,xmm5
+ pxor xmm5,xmm5
+ lea rsi,[48+rsi]
+ jmp NEAR $L$cbc_dec_tail_collected
+
+ALIGN 16
+$L$cbc_dec_clear_tail_collected:
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+$L$cbc_dec_tail_collected:
+ movups XMMWORD[r8],xmm10
+ and rdx,15
+ jnz NEAR $L$cbc_dec_tail_partial
+ movups XMMWORD[rsi],xmm2
+ pxor xmm2,xmm2
+ jmp NEAR $L$cbc_dec_ret
+ALIGN 16
+$L$cbc_dec_tail_partial:
+ movaps XMMWORD[rsp],xmm2
+ pxor xmm2,xmm2
+ mov rcx,16
+ mov rdi,rsi
+ sub rcx,rdx
+ lea rsi,[rsp]
+ DD 0x9066A4F3
+ movdqa XMMWORD[rsp],xmm2
+
+$L$cbc_dec_ret:
+ xorps xmm0,xmm0
+ pxor xmm1,xmm1
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps XMMWORD[16+rsp],xmm0
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps XMMWORD[32+rsp],xmm0
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps XMMWORD[48+rsp],xmm0
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps XMMWORD[64+rsp],xmm0
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps XMMWORD[80+rsp],xmm0
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps XMMWORD[96+rsp],xmm0
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps XMMWORD[112+rsp],xmm0
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps XMMWORD[128+rsp],xmm0
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps XMMWORD[144+rsp],xmm0
+ movaps xmm15,XMMWORD[160+rsp]
+ movaps XMMWORD[160+rsp],xmm0
+ mov rbp,QWORD[((-8))+r11]
+
+ lea rsp,[r11]
+
+$L$cbc_ret:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_aesni_cbc_encrypt:
+global aesni_set_decrypt_key
+
+ALIGN 16
+aesni_set_decrypt_key:
+
+DB 0x48,0x83,0xEC,0x08
+
+ call __aesni_set_encrypt_key
+ shl edx,4
+ test eax,eax
+ jnz NEAR $L$dec_key_ret
+ lea rcx,[16+rdx*1+r8]
+
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[rcx]
+ movups XMMWORD[rcx],xmm0
+ movups XMMWORD[r8],xmm1
+ lea r8,[16+r8]
+ lea rcx,[((-16))+rcx]
+
+$L$dec_key_inverse:
+ movups xmm0,XMMWORD[r8]
+ movups xmm1,XMMWORD[rcx]
+DB 102,15,56,219,192
+DB 102,15,56,219,201
+ lea r8,[16+r8]
+ lea rcx,[((-16))+rcx]
+ movups XMMWORD[16+rcx],xmm0
+ movups XMMWORD[(-16)+r8],xmm1
+ cmp rcx,r8
+ ja NEAR $L$dec_key_inverse
+
+ movups xmm0,XMMWORD[r8]
+DB 102,15,56,219,192
+ pxor xmm1,xmm1
+ movups XMMWORD[rcx],xmm0
+ pxor xmm0,xmm0
+$L$dec_key_ret:
+ add rsp,8
+
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_set_decrypt_key:
+
+global aesni_set_encrypt_key
+
+ALIGN 16
+aesni_set_encrypt_key:
+__aesni_set_encrypt_key:
+
+DB 0x48,0x83,0xEC,0x08
+
+ mov rax,-1
+ test rcx,rcx
+ jz NEAR $L$enc_key_ret
+ test r8,r8
+ jz NEAR $L$enc_key_ret
+
+ mov r10d,268437504
+ movups xmm0,XMMWORD[rcx]
+ xorps xmm4,xmm4
+ and r10d,DWORD[((OPENSSL_ia32cap_P+4))]
+ lea rax,[16+r8]
+ cmp edx,256
+ je NEAR $L$14rounds
+ cmp edx,192
+ je NEAR $L$12rounds
+ cmp edx,128
+ jne NEAR $L$bad_keybits
+
+$L$10rounds:
+ mov edx,9
+ cmp r10d,268435456
+ je NEAR $L$10rounds_alt
+
+ movups XMMWORD[r8],xmm0
+DB 102,15,58,223,200,1
+ call $L$key_expansion_128_cold
+DB 102,15,58,223,200,2
+ call $L$key_expansion_128
+DB 102,15,58,223,200,4
+ call $L$key_expansion_128
+DB 102,15,58,223,200,8
+ call $L$key_expansion_128
+DB 102,15,58,223,200,16
+ call $L$key_expansion_128
+DB 102,15,58,223,200,32
+ call $L$key_expansion_128
+DB 102,15,58,223,200,64
+ call $L$key_expansion_128
+DB 102,15,58,223,200,128
+ call $L$key_expansion_128
+DB 102,15,58,223,200,27
+ call $L$key_expansion_128
+DB 102,15,58,223,200,54
+ call $L$key_expansion_128
+ movups XMMWORD[rax],xmm0
+ mov DWORD[80+rax],edx
+ xor eax,eax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$10rounds_alt:
+ movdqa xmm5,XMMWORD[$L$key_rotate]
+ mov r10d,8
+ movdqa xmm4,XMMWORD[$L$key_rcon1]
+ movdqa xmm2,xmm0
+ movdqu XMMWORD[r8],xmm0
+ jmp NEAR $L$oop_key128
+
+ALIGN 16
+$L$oop_key128:
+DB 102,15,56,0,197
+DB 102,15,56,221,196
+ pslld xmm4,1
+ lea rax,[16+rax]
+
+ movdqa xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm2,xmm3
+
+ pxor xmm0,xmm2
+ movdqu XMMWORD[(-16)+rax],xmm0
+ movdqa xmm2,xmm0
+
+ dec r10d
+ jnz NEAR $L$oop_key128
+
+ movdqa xmm4,XMMWORD[$L$key_rcon1b]
+
+DB 102,15,56,0,197
+DB 102,15,56,221,196
+ pslld xmm4,1
+
+ movdqa xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm2,xmm3
+
+ pxor xmm0,xmm2
+ movdqu XMMWORD[rax],xmm0
+
+ movdqa xmm2,xmm0
+DB 102,15,56,0,197
+DB 102,15,56,221,196
+
+ movdqa xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm3,xmm2
+ pslldq xmm2,4
+ pxor xmm2,xmm3
+
+ pxor xmm0,xmm2
+ movdqu XMMWORD[16+rax],xmm0
+
+ mov DWORD[96+rax],edx
+ xor eax,eax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$12rounds:
+ movq xmm2,QWORD[16+rcx]
+ mov edx,11
+ cmp r10d,268435456
+ je NEAR $L$12rounds_alt
+
+ movups XMMWORD[r8],xmm0
+DB 102,15,58,223,202,1
+ call $L$key_expansion_192a_cold
+DB 102,15,58,223,202,2
+ call $L$key_expansion_192b
+DB 102,15,58,223,202,4
+ call $L$key_expansion_192a
+DB 102,15,58,223,202,8
+ call $L$key_expansion_192b
+DB 102,15,58,223,202,16
+ call $L$key_expansion_192a
+DB 102,15,58,223,202,32
+ call $L$key_expansion_192b
+DB 102,15,58,223,202,64
+ call $L$key_expansion_192a
+DB 102,15,58,223,202,128
+ call $L$key_expansion_192b
+ movups XMMWORD[rax],xmm0
+ mov DWORD[48+rax],edx
+ xor rax,rax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$12rounds_alt:
+ movdqa xmm5,XMMWORD[$L$key_rotate192]
+ movdqa xmm4,XMMWORD[$L$key_rcon1]
+ mov r10d,8
+ movdqu XMMWORD[r8],xmm0
+ jmp NEAR $L$oop_key192
+
+ALIGN 16
+$L$oop_key192:
+ movq QWORD[rax],xmm2
+ movdqa xmm1,xmm2
+DB 102,15,56,0,213
+DB 102,15,56,221,212
+ pslld xmm4,1
+ lea rax,[24+rax]
+
+ movdqa xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm0,xmm3
+
+ pshufd xmm3,xmm0,0xff
+ pxor xmm3,xmm1
+ pslldq xmm1,4
+ pxor xmm3,xmm1
+
+ pxor xmm0,xmm2
+ pxor xmm2,xmm3
+ movdqu XMMWORD[(-16)+rax],xmm0
+
+ dec r10d
+ jnz NEAR $L$oop_key192
+
+ mov DWORD[32+rax],edx
+ xor eax,eax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$14rounds:
+ movups xmm2,XMMWORD[16+rcx]
+ mov edx,13
+ lea rax,[16+rax]
+ cmp r10d,268435456
+ je NEAR $L$14rounds_alt
+
+ movups XMMWORD[r8],xmm0
+ movups XMMWORD[16+r8],xmm2
+DB 102,15,58,223,202,1
+ call $L$key_expansion_256a_cold
+DB 102,15,58,223,200,1
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,2
+ call $L$key_expansion_256a
+DB 102,15,58,223,200,2
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,4
+ call $L$key_expansion_256a
+DB 102,15,58,223,200,4
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,8
+ call $L$key_expansion_256a
+DB 102,15,58,223,200,8
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,16
+ call $L$key_expansion_256a
+DB 102,15,58,223,200,16
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,32
+ call $L$key_expansion_256a
+DB 102,15,58,223,200,32
+ call $L$key_expansion_256b
+DB 102,15,58,223,202,64
+ call $L$key_expansion_256a
+ movups XMMWORD[rax],xmm0
+ mov DWORD[16+rax],edx
+ xor rax,rax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$14rounds_alt:
+ movdqa xmm5,XMMWORD[$L$key_rotate]
+ movdqa xmm4,XMMWORD[$L$key_rcon1]
+ mov r10d,7
+ movdqu XMMWORD[r8],xmm0
+ movdqa xmm1,xmm2
+ movdqu XMMWORD[16+r8],xmm2
+ jmp NEAR $L$oop_key256
+
+ALIGN 16
+$L$oop_key256:
+DB 102,15,56,0,213
+DB 102,15,56,221,212
+
+ movdqa xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm3,xmm0
+ pslldq xmm0,4
+ pxor xmm0,xmm3
+ pslld xmm4,1
+
+ pxor xmm0,xmm2
+ movdqu XMMWORD[rax],xmm0
+
+ dec r10d
+ jz NEAR $L$done_key256
+
+ pshufd xmm2,xmm0,0xff
+ pxor xmm3,xmm3
+DB 102,15,56,221,211
+
+ movdqa xmm3,xmm1
+ pslldq xmm1,4
+ pxor xmm3,xmm1
+ pslldq xmm1,4
+ pxor xmm3,xmm1
+ pslldq xmm1,4
+ pxor xmm1,xmm3
+
+ pxor xmm2,xmm1
+ movdqu XMMWORD[16+rax],xmm2
+ lea rax,[32+rax]
+ movdqa xmm1,xmm2
+
+ jmp NEAR $L$oop_key256
+
+$L$done_key256:
+ mov DWORD[16+rax],edx
+ xor eax,eax
+ jmp NEAR $L$enc_key_ret
+
+ALIGN 16
+$L$bad_keybits:
+ mov rax,-2
+$L$enc_key_ret:
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ add rsp,8
+
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_set_encrypt_key:
+
+ALIGN 16
+$L$key_expansion_128:
+ movups XMMWORD[rax],xmm0
+ lea rax,[16+rax]
+$L$key_expansion_128_cold:
+ shufps xmm4,xmm0,16
+ xorps xmm0,xmm4
+ shufps xmm4,xmm0,140
+ xorps xmm0,xmm4
+ shufps xmm1,xmm1,255
+ xorps xmm0,xmm1
+ DB 0F3h,0C3h ;repret
+
+ALIGN 16
+$L$key_expansion_192a:
+ movups XMMWORD[rax],xmm0
+ lea rax,[16+rax]
+$L$key_expansion_192a_cold:
+ movaps xmm5,xmm2
+$L$key_expansion_192b_warm:
+ shufps xmm4,xmm0,16
+ movdqa xmm3,xmm2
+ xorps xmm0,xmm4
+ shufps xmm4,xmm0,140
+ pslldq xmm3,4
+ xorps xmm0,xmm4
+ pshufd xmm1,xmm1,85
+ pxor xmm2,xmm3
+ pxor xmm0,xmm1
+ pshufd xmm3,xmm0,255
+ pxor xmm2,xmm3
+ DB 0F3h,0C3h ;repret
+
+ALIGN 16
+$L$key_expansion_192b:
+ movaps xmm3,xmm0
+ shufps xmm5,xmm0,68
+ movups XMMWORD[rax],xmm5
+ shufps xmm3,xmm2,78
+ movups XMMWORD[16+rax],xmm3
+ lea rax,[32+rax]
+ jmp NEAR $L$key_expansion_192b_warm
+
+ALIGN 16
+$L$key_expansion_256a:
+ movups XMMWORD[rax],xmm2
+ lea rax,[16+rax]
+$L$key_expansion_256a_cold:
+ shufps xmm4,xmm0,16
+ xorps xmm0,xmm4
+ shufps xmm4,xmm0,140
+ xorps xmm0,xmm4
+ shufps xmm1,xmm1,255
+ xorps xmm0,xmm1
+ DB 0F3h,0C3h ;repret
+
+ALIGN 16
+$L$key_expansion_256b:
+ movups XMMWORD[rax],xmm0
+ lea rax,[16+rax]
+
+ shufps xmm4,xmm2,16
+ xorps xmm2,xmm4
+ shufps xmm4,xmm2,140
+ xorps xmm2,xmm4
+ shufps xmm1,xmm1,170
+ xorps xmm2,xmm1
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 64
+$L$bswap_mask:
+DB 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
+$L$increment32:
+ DD 6,6,6,0
+$L$increment64:
+ DD 1,0,0,0
+$L$xts_magic:
+ DD 0x87,0,1,0
+$L$increment1:
+DB 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
+$L$key_rotate:
+ DD 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d
+$L$key_rotate192:
+ DD 0x04070605,0x04070605,0x04070605,0x04070605
+$L$key_rcon1:
+ DD 1,1,1,1
+$L$key_rcon1b:
+ DD 0x1b,0x1b,0x1b,0x1b
+
+DB 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
+DB 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
+DB 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+DB 115,108,46,111,114,103,62,0
+ALIGN 64
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+ecb_ccm64_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$common_seh_tail
+
+ lea rsi,[rax]
+ lea rdi,[512+r8]
+ mov ecx,8
+ DD 0xa548f3fc
+ lea rax,[88+rax]
+
+ jmp NEAR $L$common_seh_tail
+
+
+
+ALIGN 16
+ctr_xts_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$common_seh_tail
+
+ mov rax,QWORD[208+r8]
+
+ lea rsi,[((-168))+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+
+ mov rbp,QWORD[((-8))+rax]
+ mov QWORD[160+r8],rbp
+ jmp NEAR $L$common_seh_tail
+
+
+
+ALIGN 16
+ocb_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$common_seh_tail
+
+ mov r10d,DWORD[8+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$ocb_no_xmm
+
+ mov rax,QWORD[152+r8]
+
+ lea rsi,[rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[((160+40))+rax]
+
+$L$ocb_no_xmm:
+ mov rbx,QWORD[((-8))+rax]
+ mov rbp,QWORD[((-16))+rax]
+ mov r12,QWORD[((-24))+rax]
+ mov r13,QWORD[((-32))+rax]
+ mov r14,QWORD[((-40))+rax]
+
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+
+ jmp NEAR $L$common_seh_tail
+
+
+ALIGN 16
+cbc_se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[152+r8]
+ mov rbx,QWORD[248+r8]
+
+ lea r10,[$L$cbc_decrypt_bulk]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov rax,QWORD[120+r8]
+
+ lea r10,[$L$cbc_decrypt_body]
+ cmp rbx,r10
+ jb NEAR $L$common_seh_tail
+
+ mov rax,QWORD[152+r8]
+
+ lea r10,[$L$cbc_ret]
+ cmp rbx,r10
+ jae NEAR $L$common_seh_tail
+
+ lea rsi,[16+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+
+ mov rax,QWORD[208+r8]
+
+ mov rbp,QWORD[((-8))+rax]
+ mov QWORD[160+r8],rbp
+
+$L$common_seh_tail:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$SEH_begin_aesni_ecb_encrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_ecb_encrypt wrt ..imagebase
+ DD $L$SEH_info_ecb wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_ccm64_encrypt_blocks wrt ..imagebase
+ DD $L$SEH_end_aesni_ccm64_encrypt_blocks wrt ..imagebase
+ DD $L$SEH_info_ccm64_enc wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_ccm64_decrypt_blocks wrt ..imagebase
+ DD $L$SEH_end_aesni_ccm64_decrypt_blocks wrt ..imagebase
+ DD $L$SEH_info_ccm64_dec wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_ctr32_encrypt_blocks wrt ..imagebase
+ DD $L$SEH_end_aesni_ctr32_encrypt_blocks wrt ..imagebase
+ DD $L$SEH_info_ctr32 wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_xts_encrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_xts_encrypt wrt ..imagebase
+ DD $L$SEH_info_xts_enc wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_xts_decrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_xts_decrypt wrt ..imagebase
+ DD $L$SEH_info_xts_dec wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_ocb_encrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_ocb_encrypt wrt ..imagebase
+ DD $L$SEH_info_ocb_enc wrt ..imagebase
+
+ DD $L$SEH_begin_aesni_ocb_decrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_ocb_decrypt wrt ..imagebase
+ DD $L$SEH_info_ocb_dec wrt ..imagebase
+ DD $L$SEH_begin_aesni_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_end_aesni_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_info_cbc wrt ..imagebase
+
+ DD aesni_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_end_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_info_key wrt ..imagebase
+
+ DD aesni_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_end_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_info_key wrt ..imagebase
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_ecb:
+DB 9,0,0,0
+ DD ecb_ccm64_se_handler wrt ..imagebase
+ DD $L$ecb_enc_body wrt ..imagebase,$L$ecb_enc_ret wrt ..imagebase
+$L$SEH_info_ccm64_enc:
+DB 9,0,0,0
+ DD ecb_ccm64_se_handler wrt ..imagebase
+ DD $L$ccm64_enc_body wrt ..imagebase,$L$ccm64_enc_ret wrt ..imagebase
+$L$SEH_info_ccm64_dec:
+DB 9,0,0,0
+ DD ecb_ccm64_se_handler wrt ..imagebase
+ DD $L$ccm64_dec_body wrt ..imagebase,$L$ccm64_dec_ret wrt ..imagebase
+$L$SEH_info_ctr32:
+DB 9,0,0,0
+ DD ctr_xts_se_handler wrt ..imagebase
+ DD $L$ctr32_body wrt ..imagebase,$L$ctr32_epilogue wrt ..imagebase
+$L$SEH_info_xts_enc:
+DB 9,0,0,0
+ DD ctr_xts_se_handler wrt ..imagebase
+ DD $L$xts_enc_body wrt ..imagebase,$L$xts_enc_epilogue wrt ..imagebase
+$L$SEH_info_xts_dec:
+DB 9,0,0,0
+ DD ctr_xts_se_handler wrt ..imagebase
+ DD $L$xts_dec_body wrt ..imagebase,$L$xts_dec_epilogue wrt ..imagebase
+$L$SEH_info_ocb_enc:
+DB 9,0,0,0
+ DD ocb_se_handler wrt ..imagebase
+ DD $L$ocb_enc_body wrt ..imagebase,$L$ocb_enc_epilogue wrt ..imagebase
+ DD $L$ocb_enc_pop wrt ..imagebase
+ DD 0
+$L$SEH_info_ocb_dec:
+DB 9,0,0,0
+ DD ocb_se_handler wrt ..imagebase
+ DD $L$ocb_dec_body wrt ..imagebase,$L$ocb_dec_epilogue wrt ..imagebase
+ DD $L$ocb_dec_pop wrt ..imagebase
+ DD 0
+$L$SEH_info_cbc:
+DB 9,0,0,0
+ DD cbc_se_handler wrt ..imagebase
+$L$SEH_info_key:
+DB 0x01,0x04,0x01,0x00
+DB 0x04,0x02,0x00,0x00
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/bsaes-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/bsaes-x86_64.nasm
new file mode 100644
index 0000000000..3ef944cab2
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/bsaes-x86_64.nasm
@@ -0,0 +1,2823 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+
+EXTERN asm_AES_encrypt
+EXTERN asm_AES_decrypt
+
+
+ALIGN 64
+_bsaes_encrypt8:
+
+ lea r11,[$L$BS0]
+
+ movdqa xmm8,XMMWORD[rax]
+ lea rax,[16+rax]
+ movdqa xmm7,XMMWORD[80+r11]
+ pxor xmm15,xmm8
+ pxor xmm0,xmm8
+ pxor xmm1,xmm8
+ pxor xmm2,xmm8
+DB 102,68,15,56,0,255
+DB 102,15,56,0,199
+ pxor xmm3,xmm8
+ pxor xmm4,xmm8
+DB 102,15,56,0,207
+DB 102,15,56,0,215
+ pxor xmm5,xmm8
+ pxor xmm6,xmm8
+DB 102,15,56,0,223
+DB 102,15,56,0,231
+DB 102,15,56,0,239
+DB 102,15,56,0,247
+_bsaes_encrypt8_bitslice:
+ movdqa xmm7,XMMWORD[r11]
+ movdqa xmm8,XMMWORD[16+r11]
+ movdqa xmm9,xmm5
+ psrlq xmm5,1
+ movdqa xmm10,xmm3
+ psrlq xmm3,1
+ pxor xmm5,xmm6
+ pxor xmm3,xmm4
+ pand xmm5,xmm7
+ pand xmm3,xmm7
+ pxor xmm6,xmm5
+ psllq xmm5,1
+ pxor xmm4,xmm3
+ psllq xmm3,1
+ pxor xmm5,xmm9
+ pxor xmm3,xmm10
+ movdqa xmm9,xmm1
+ psrlq xmm1,1
+ movdqa xmm10,xmm15
+ psrlq xmm15,1
+ pxor xmm1,xmm2
+ pxor xmm15,xmm0
+ pand xmm1,xmm7
+ pand xmm15,xmm7
+ pxor xmm2,xmm1
+ psllq xmm1,1
+ pxor xmm0,xmm15
+ psllq xmm15,1
+ pxor xmm1,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[32+r11]
+ movdqa xmm9,xmm4
+ psrlq xmm4,2
+ movdqa xmm10,xmm3
+ psrlq xmm3,2
+ pxor xmm4,xmm6
+ pxor xmm3,xmm5
+ pand xmm4,xmm8
+ pand xmm3,xmm8
+ pxor xmm6,xmm4
+ psllq xmm4,2
+ pxor xmm5,xmm3
+ psllq xmm3,2
+ pxor xmm4,xmm9
+ pxor xmm3,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,2
+ movdqa xmm10,xmm15
+ psrlq xmm15,2
+ pxor xmm0,xmm2
+ pxor xmm15,xmm1
+ pand xmm0,xmm8
+ pand xmm15,xmm8
+ pxor xmm2,xmm0
+ psllq xmm0,2
+ pxor xmm1,xmm15
+ psllq xmm15,2
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm9,xmm2
+ psrlq xmm2,4
+ movdqa xmm10,xmm1
+ psrlq xmm1,4
+ pxor xmm2,xmm6
+ pxor xmm1,xmm5
+ pand xmm2,xmm7
+ pand xmm1,xmm7
+ pxor xmm6,xmm2
+ psllq xmm2,4
+ pxor xmm5,xmm1
+ psllq xmm1,4
+ pxor xmm2,xmm9
+ pxor xmm1,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,4
+ movdqa xmm10,xmm15
+ psrlq xmm15,4
+ pxor xmm0,xmm4
+ pxor xmm15,xmm3
+ pand xmm0,xmm7
+ pand xmm15,xmm7
+ pxor xmm4,xmm0
+ psllq xmm0,4
+ pxor xmm3,xmm15
+ psllq xmm15,4
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ dec r10d
+ jmp NEAR $L$enc_sbox
+ALIGN 16
+$L$enc_loop:
+ pxor xmm15,XMMWORD[rax]
+ pxor xmm0,XMMWORD[16+rax]
+ pxor xmm1,XMMWORD[32+rax]
+ pxor xmm2,XMMWORD[48+rax]
+DB 102,68,15,56,0,255
+DB 102,15,56,0,199
+ pxor xmm3,XMMWORD[64+rax]
+ pxor xmm4,XMMWORD[80+rax]
+DB 102,15,56,0,207
+DB 102,15,56,0,215
+ pxor xmm5,XMMWORD[96+rax]
+ pxor xmm6,XMMWORD[112+rax]
+DB 102,15,56,0,223
+DB 102,15,56,0,231
+DB 102,15,56,0,239
+DB 102,15,56,0,247
+ lea rax,[128+rax]
+$L$enc_sbox:
+ pxor xmm4,xmm5
+ pxor xmm1,xmm0
+ pxor xmm2,xmm15
+ pxor xmm5,xmm1
+ pxor xmm4,xmm15
+
+ pxor xmm5,xmm2
+ pxor xmm2,xmm6
+ pxor xmm6,xmm4
+ pxor xmm2,xmm3
+ pxor xmm3,xmm4
+ pxor xmm2,xmm0
+
+ pxor xmm1,xmm6
+ pxor xmm0,xmm4
+ movdqa xmm10,xmm6
+ movdqa xmm9,xmm0
+ movdqa xmm8,xmm4
+ movdqa xmm12,xmm1
+ movdqa xmm11,xmm5
+
+ pxor xmm10,xmm3
+ pxor xmm9,xmm1
+ pxor xmm8,xmm2
+ movdqa xmm13,xmm10
+ pxor xmm12,xmm3
+ movdqa xmm7,xmm9
+ pxor xmm11,xmm15
+ movdqa xmm14,xmm10
+
+ por xmm9,xmm8
+ por xmm10,xmm11
+ pxor xmm14,xmm7
+ pand xmm13,xmm11
+ pxor xmm11,xmm8
+ pand xmm7,xmm8
+ pand xmm14,xmm11
+ movdqa xmm11,xmm2
+ pxor xmm11,xmm15
+ pand xmm12,xmm11
+ pxor xmm10,xmm12
+ pxor xmm9,xmm12
+ movdqa xmm12,xmm6
+ movdqa xmm11,xmm4
+ pxor xmm12,xmm0
+ pxor xmm11,xmm5
+ movdqa xmm8,xmm12
+ pand xmm12,xmm11
+ por xmm8,xmm11
+ pxor xmm7,xmm12
+ pxor xmm10,xmm14
+ pxor xmm9,xmm13
+ pxor xmm8,xmm14
+ movdqa xmm11,xmm1
+ pxor xmm7,xmm13
+ movdqa xmm12,xmm3
+ pxor xmm8,xmm13
+ movdqa xmm13,xmm0
+ pand xmm11,xmm2
+ movdqa xmm14,xmm6
+ pand xmm12,xmm15
+ pand xmm13,xmm4
+ por xmm14,xmm5
+ pxor xmm10,xmm11
+ pxor xmm9,xmm12
+ pxor xmm8,xmm13
+ pxor xmm7,xmm14
+
+
+
+
+
+ movdqa xmm11,xmm10
+ pand xmm10,xmm8
+ pxor xmm11,xmm9
+
+ movdqa xmm13,xmm7
+ movdqa xmm14,xmm11
+ pxor xmm13,xmm10
+ pand xmm14,xmm13
+
+ movdqa xmm12,xmm8
+ pxor xmm14,xmm9
+ pxor xmm12,xmm7
+
+ pxor xmm10,xmm9
+
+ pand xmm12,xmm10
+
+ movdqa xmm9,xmm13
+ pxor xmm12,xmm7
+
+ pxor xmm9,xmm12
+ pxor xmm8,xmm12
+
+ pand xmm9,xmm7
+
+ pxor xmm13,xmm9
+ pxor xmm8,xmm9
+
+ pand xmm13,xmm14
+
+ pxor xmm13,xmm11
+ movdqa xmm11,xmm5
+ movdqa xmm7,xmm4
+ movdqa xmm9,xmm14
+ pxor xmm9,xmm13
+ pand xmm9,xmm5
+ pxor xmm5,xmm4
+ pand xmm4,xmm14
+ pand xmm5,xmm13
+ pxor xmm5,xmm4
+ pxor xmm4,xmm9
+ pxor xmm11,xmm15
+ pxor xmm7,xmm2
+ pxor xmm14,xmm12
+ pxor xmm13,xmm8
+ movdqa xmm10,xmm14
+ movdqa xmm9,xmm12
+ pxor xmm10,xmm13
+ pxor xmm9,xmm8
+ pand xmm10,xmm11
+ pand xmm9,xmm15
+ pxor xmm11,xmm7
+ pxor xmm15,xmm2
+ pand xmm7,xmm14
+ pand xmm2,xmm12
+ pand xmm11,xmm13
+ pand xmm15,xmm8
+ pxor xmm7,xmm11
+ pxor xmm15,xmm2
+ pxor xmm11,xmm10
+ pxor xmm2,xmm9
+ pxor xmm5,xmm11
+ pxor xmm15,xmm11
+ pxor xmm4,xmm7
+ pxor xmm2,xmm7
+
+ movdqa xmm11,xmm6
+ movdqa xmm7,xmm0
+ pxor xmm11,xmm3
+ pxor xmm7,xmm1
+ movdqa xmm10,xmm14
+ movdqa xmm9,xmm12
+ pxor xmm10,xmm13
+ pxor xmm9,xmm8
+ pand xmm10,xmm11
+ pand xmm9,xmm3
+ pxor xmm11,xmm7
+ pxor xmm3,xmm1
+ pand xmm7,xmm14
+ pand xmm1,xmm12
+ pand xmm11,xmm13
+ pand xmm3,xmm8
+ pxor xmm7,xmm11
+ pxor xmm3,xmm1
+ pxor xmm11,xmm10
+ pxor xmm1,xmm9
+ pxor xmm14,xmm12
+ pxor xmm13,xmm8
+ movdqa xmm10,xmm14
+ pxor xmm10,xmm13
+ pand xmm10,xmm6
+ pxor xmm6,xmm0
+ pand xmm0,xmm14
+ pand xmm6,xmm13
+ pxor xmm6,xmm0
+ pxor xmm0,xmm10
+ pxor xmm6,xmm11
+ pxor xmm3,xmm11
+ pxor xmm0,xmm7
+ pxor xmm1,xmm7
+ pxor xmm6,xmm15
+ pxor xmm0,xmm5
+ pxor xmm3,xmm6
+ pxor xmm5,xmm15
+ pxor xmm15,xmm0
+
+ pxor xmm0,xmm4
+ pxor xmm4,xmm1
+ pxor xmm1,xmm2
+ pxor xmm2,xmm4
+ pxor xmm3,xmm4
+
+ pxor xmm5,xmm2
+ dec r10d
+ jl NEAR $L$enc_done
+ pshufd xmm7,xmm15,0x93
+ pshufd xmm8,xmm0,0x93
+ pxor xmm15,xmm7
+ pshufd xmm9,xmm3,0x93
+ pxor xmm0,xmm8
+ pshufd xmm10,xmm5,0x93
+ pxor xmm3,xmm9
+ pshufd xmm11,xmm2,0x93
+ pxor xmm5,xmm10
+ pshufd xmm12,xmm6,0x93
+ pxor xmm2,xmm11
+ pshufd xmm13,xmm1,0x93
+ pxor xmm6,xmm12
+ pshufd xmm14,xmm4,0x93
+ pxor xmm1,xmm13
+ pxor xmm4,xmm14
+
+ pxor xmm8,xmm15
+ pxor xmm7,xmm4
+ pxor xmm8,xmm4
+ pshufd xmm15,xmm15,0x4E
+ pxor xmm9,xmm0
+ pshufd xmm0,xmm0,0x4E
+ pxor xmm12,xmm2
+ pxor xmm15,xmm7
+ pxor xmm13,xmm6
+ pxor xmm0,xmm8
+ pxor xmm11,xmm5
+ pshufd xmm7,xmm2,0x4E
+ pxor xmm14,xmm1
+ pshufd xmm8,xmm6,0x4E
+ pxor xmm10,xmm3
+ pshufd xmm2,xmm5,0x4E
+ pxor xmm10,xmm4
+ pshufd xmm6,xmm4,0x4E
+ pxor xmm11,xmm4
+ pshufd xmm5,xmm1,0x4E
+ pxor xmm7,xmm11
+ pshufd xmm1,xmm3,0x4E
+ pxor xmm8,xmm12
+ pxor xmm2,xmm10
+ pxor xmm6,xmm14
+ pxor xmm5,xmm13
+ movdqa xmm3,xmm7
+ pxor xmm1,xmm9
+ movdqa xmm4,xmm8
+ movdqa xmm7,XMMWORD[48+r11]
+ jnz NEAR $L$enc_loop
+ movdqa xmm7,XMMWORD[64+r11]
+ jmp NEAR $L$enc_loop
+ALIGN 16
+$L$enc_done:
+ movdqa xmm7,XMMWORD[r11]
+ movdqa xmm8,XMMWORD[16+r11]
+ movdqa xmm9,xmm1
+ psrlq xmm1,1
+ movdqa xmm10,xmm2
+ psrlq xmm2,1
+ pxor xmm1,xmm4
+ pxor xmm2,xmm6
+ pand xmm1,xmm7
+ pand xmm2,xmm7
+ pxor xmm4,xmm1
+ psllq xmm1,1
+ pxor xmm6,xmm2
+ psllq xmm2,1
+ pxor xmm1,xmm9
+ pxor xmm2,xmm10
+ movdqa xmm9,xmm3
+ psrlq xmm3,1
+ movdqa xmm10,xmm15
+ psrlq xmm15,1
+ pxor xmm3,xmm5
+ pxor xmm15,xmm0
+ pand xmm3,xmm7
+ pand xmm15,xmm7
+ pxor xmm5,xmm3
+ psllq xmm3,1
+ pxor xmm0,xmm15
+ psllq xmm15,1
+ pxor xmm3,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[32+r11]
+ movdqa xmm9,xmm6
+ psrlq xmm6,2
+ movdqa xmm10,xmm2
+ psrlq xmm2,2
+ pxor xmm6,xmm4
+ pxor xmm2,xmm1
+ pand xmm6,xmm8
+ pand xmm2,xmm8
+ pxor xmm4,xmm6
+ psllq xmm6,2
+ pxor xmm1,xmm2
+ psllq xmm2,2
+ pxor xmm6,xmm9
+ pxor xmm2,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,2
+ movdqa xmm10,xmm15
+ psrlq xmm15,2
+ pxor xmm0,xmm5
+ pxor xmm15,xmm3
+ pand xmm0,xmm8
+ pand xmm15,xmm8
+ pxor xmm5,xmm0
+ psllq xmm0,2
+ pxor xmm3,xmm15
+ psllq xmm15,2
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm9,xmm5
+ psrlq xmm5,4
+ movdqa xmm10,xmm3
+ psrlq xmm3,4
+ pxor xmm5,xmm4
+ pxor xmm3,xmm1
+ pand xmm5,xmm7
+ pand xmm3,xmm7
+ pxor xmm4,xmm5
+ psllq xmm5,4
+ pxor xmm1,xmm3
+ psllq xmm3,4
+ pxor xmm5,xmm9
+ pxor xmm3,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,4
+ movdqa xmm10,xmm15
+ psrlq xmm15,4
+ pxor xmm0,xmm6
+ pxor xmm15,xmm2
+ pand xmm0,xmm7
+ pand xmm15,xmm7
+ pxor xmm6,xmm0
+ psllq xmm0,4
+ pxor xmm2,xmm15
+ psllq xmm15,4
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[rax]
+ pxor xmm3,xmm7
+ pxor xmm5,xmm7
+ pxor xmm2,xmm7
+ pxor xmm6,xmm7
+ pxor xmm1,xmm7
+ pxor xmm4,xmm7
+ pxor xmm15,xmm7
+ pxor xmm0,xmm7
+ DB 0F3h,0C3h ;repret
+
+
+
+
+ALIGN 64
+_bsaes_decrypt8:
+
+ lea r11,[$L$BS0]
+
+ movdqa xmm8,XMMWORD[rax]
+ lea rax,[16+rax]
+ movdqa xmm7,XMMWORD[((-48))+r11]
+ pxor xmm15,xmm8
+ pxor xmm0,xmm8
+ pxor xmm1,xmm8
+ pxor xmm2,xmm8
+DB 102,68,15,56,0,255
+DB 102,15,56,0,199
+ pxor xmm3,xmm8
+ pxor xmm4,xmm8
+DB 102,15,56,0,207
+DB 102,15,56,0,215
+ pxor xmm5,xmm8
+ pxor xmm6,xmm8
+DB 102,15,56,0,223
+DB 102,15,56,0,231
+DB 102,15,56,0,239
+DB 102,15,56,0,247
+ movdqa xmm7,XMMWORD[r11]
+ movdqa xmm8,XMMWORD[16+r11]
+ movdqa xmm9,xmm5
+ psrlq xmm5,1
+ movdqa xmm10,xmm3
+ psrlq xmm3,1
+ pxor xmm5,xmm6
+ pxor xmm3,xmm4
+ pand xmm5,xmm7
+ pand xmm3,xmm7
+ pxor xmm6,xmm5
+ psllq xmm5,1
+ pxor xmm4,xmm3
+ psllq xmm3,1
+ pxor xmm5,xmm9
+ pxor xmm3,xmm10
+ movdqa xmm9,xmm1
+ psrlq xmm1,1
+ movdqa xmm10,xmm15
+ psrlq xmm15,1
+ pxor xmm1,xmm2
+ pxor xmm15,xmm0
+ pand xmm1,xmm7
+ pand xmm15,xmm7
+ pxor xmm2,xmm1
+ psllq xmm1,1
+ pxor xmm0,xmm15
+ psllq xmm15,1
+ pxor xmm1,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[32+r11]
+ movdqa xmm9,xmm4
+ psrlq xmm4,2
+ movdqa xmm10,xmm3
+ psrlq xmm3,2
+ pxor xmm4,xmm6
+ pxor xmm3,xmm5
+ pand xmm4,xmm8
+ pand xmm3,xmm8
+ pxor xmm6,xmm4
+ psllq xmm4,2
+ pxor xmm5,xmm3
+ psllq xmm3,2
+ pxor xmm4,xmm9
+ pxor xmm3,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,2
+ movdqa xmm10,xmm15
+ psrlq xmm15,2
+ pxor xmm0,xmm2
+ pxor xmm15,xmm1
+ pand xmm0,xmm8
+ pand xmm15,xmm8
+ pxor xmm2,xmm0
+ psllq xmm0,2
+ pxor xmm1,xmm15
+ psllq xmm15,2
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm9,xmm2
+ psrlq xmm2,4
+ movdqa xmm10,xmm1
+ psrlq xmm1,4
+ pxor xmm2,xmm6
+ pxor xmm1,xmm5
+ pand xmm2,xmm7
+ pand xmm1,xmm7
+ pxor xmm6,xmm2
+ psllq xmm2,4
+ pxor xmm5,xmm1
+ psllq xmm1,4
+ pxor xmm2,xmm9
+ pxor xmm1,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,4
+ movdqa xmm10,xmm15
+ psrlq xmm15,4
+ pxor xmm0,xmm4
+ pxor xmm15,xmm3
+ pand xmm0,xmm7
+ pand xmm15,xmm7
+ pxor xmm4,xmm0
+ psllq xmm0,4
+ pxor xmm3,xmm15
+ psllq xmm15,4
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ dec r10d
+ jmp NEAR $L$dec_sbox
+ALIGN 16
+$L$dec_loop:
+ pxor xmm15,XMMWORD[rax]
+ pxor xmm0,XMMWORD[16+rax]
+ pxor xmm1,XMMWORD[32+rax]
+ pxor xmm2,XMMWORD[48+rax]
+DB 102,68,15,56,0,255
+DB 102,15,56,0,199
+ pxor xmm3,XMMWORD[64+rax]
+ pxor xmm4,XMMWORD[80+rax]
+DB 102,15,56,0,207
+DB 102,15,56,0,215
+ pxor xmm5,XMMWORD[96+rax]
+ pxor xmm6,XMMWORD[112+rax]
+DB 102,15,56,0,223
+DB 102,15,56,0,231
+DB 102,15,56,0,239
+DB 102,15,56,0,247
+ lea rax,[128+rax]
+$L$dec_sbox:
+ pxor xmm2,xmm3
+
+ pxor xmm3,xmm6
+ pxor xmm1,xmm6
+ pxor xmm5,xmm3
+ pxor xmm6,xmm5
+ pxor xmm0,xmm6
+
+ pxor xmm15,xmm0
+ pxor xmm1,xmm4
+ pxor xmm2,xmm15
+ pxor xmm4,xmm15
+ pxor xmm0,xmm2
+ movdqa xmm10,xmm2
+ movdqa xmm9,xmm6
+ movdqa xmm8,xmm0
+ movdqa xmm12,xmm3
+ movdqa xmm11,xmm4
+
+ pxor xmm10,xmm15
+ pxor xmm9,xmm3
+ pxor xmm8,xmm5
+ movdqa xmm13,xmm10
+ pxor xmm12,xmm15
+ movdqa xmm7,xmm9
+ pxor xmm11,xmm1
+ movdqa xmm14,xmm10
+
+ por xmm9,xmm8
+ por xmm10,xmm11
+ pxor xmm14,xmm7
+ pand xmm13,xmm11
+ pxor xmm11,xmm8
+ pand xmm7,xmm8
+ pand xmm14,xmm11
+ movdqa xmm11,xmm5
+ pxor xmm11,xmm1
+ pand xmm12,xmm11
+ pxor xmm10,xmm12
+ pxor xmm9,xmm12
+ movdqa xmm12,xmm2
+ movdqa xmm11,xmm0
+ pxor xmm12,xmm6
+ pxor xmm11,xmm4
+ movdqa xmm8,xmm12
+ pand xmm12,xmm11
+ por xmm8,xmm11
+ pxor xmm7,xmm12
+ pxor xmm10,xmm14
+ pxor xmm9,xmm13
+ pxor xmm8,xmm14
+ movdqa xmm11,xmm3
+ pxor xmm7,xmm13
+ movdqa xmm12,xmm15
+ pxor xmm8,xmm13
+ movdqa xmm13,xmm6
+ pand xmm11,xmm5
+ movdqa xmm14,xmm2
+ pand xmm12,xmm1
+ pand xmm13,xmm0
+ por xmm14,xmm4
+ pxor xmm10,xmm11
+ pxor xmm9,xmm12
+ pxor xmm8,xmm13
+ pxor xmm7,xmm14
+
+
+
+
+
+ movdqa xmm11,xmm10
+ pand xmm10,xmm8
+ pxor xmm11,xmm9
+
+ movdqa xmm13,xmm7
+ movdqa xmm14,xmm11
+ pxor xmm13,xmm10
+ pand xmm14,xmm13
+
+ movdqa xmm12,xmm8
+ pxor xmm14,xmm9
+ pxor xmm12,xmm7
+
+ pxor xmm10,xmm9
+
+ pand xmm12,xmm10
+
+ movdqa xmm9,xmm13
+ pxor xmm12,xmm7
+
+ pxor xmm9,xmm12
+ pxor xmm8,xmm12
+
+ pand xmm9,xmm7
+
+ pxor xmm13,xmm9
+ pxor xmm8,xmm9
+
+ pand xmm13,xmm14
+
+ pxor xmm13,xmm11
+ movdqa xmm11,xmm4
+ movdqa xmm7,xmm0
+ movdqa xmm9,xmm14
+ pxor xmm9,xmm13
+ pand xmm9,xmm4
+ pxor xmm4,xmm0
+ pand xmm0,xmm14
+ pand xmm4,xmm13
+ pxor xmm4,xmm0
+ pxor xmm0,xmm9
+ pxor xmm11,xmm1
+ pxor xmm7,xmm5
+ pxor xmm14,xmm12
+ pxor xmm13,xmm8
+ movdqa xmm10,xmm14
+ movdqa xmm9,xmm12
+ pxor xmm10,xmm13
+ pxor xmm9,xmm8
+ pand xmm10,xmm11
+ pand xmm9,xmm1
+ pxor xmm11,xmm7
+ pxor xmm1,xmm5
+ pand xmm7,xmm14
+ pand xmm5,xmm12
+ pand xmm11,xmm13
+ pand xmm1,xmm8
+ pxor xmm7,xmm11
+ pxor xmm1,xmm5
+ pxor xmm11,xmm10
+ pxor xmm5,xmm9
+ pxor xmm4,xmm11
+ pxor xmm1,xmm11
+ pxor xmm0,xmm7
+ pxor xmm5,xmm7
+
+ movdqa xmm11,xmm2
+ movdqa xmm7,xmm6
+ pxor xmm11,xmm15
+ pxor xmm7,xmm3
+ movdqa xmm10,xmm14
+ movdqa xmm9,xmm12
+ pxor xmm10,xmm13
+ pxor xmm9,xmm8
+ pand xmm10,xmm11
+ pand xmm9,xmm15
+ pxor xmm11,xmm7
+ pxor xmm15,xmm3
+ pand xmm7,xmm14
+ pand xmm3,xmm12
+ pand xmm11,xmm13
+ pand xmm15,xmm8
+ pxor xmm7,xmm11
+ pxor xmm15,xmm3
+ pxor xmm11,xmm10
+ pxor xmm3,xmm9
+ pxor xmm14,xmm12
+ pxor xmm13,xmm8
+ movdqa xmm10,xmm14
+ pxor xmm10,xmm13
+ pand xmm10,xmm2
+ pxor xmm2,xmm6
+ pand xmm6,xmm14
+ pand xmm2,xmm13
+ pxor xmm2,xmm6
+ pxor xmm6,xmm10
+ pxor xmm2,xmm11
+ pxor xmm15,xmm11
+ pxor xmm6,xmm7
+ pxor xmm3,xmm7
+ pxor xmm0,xmm6
+ pxor xmm5,xmm4
+
+ pxor xmm3,xmm0
+ pxor xmm1,xmm6
+ pxor xmm4,xmm6
+ pxor xmm3,xmm1
+ pxor xmm6,xmm15
+ pxor xmm3,xmm4
+ pxor xmm2,xmm5
+ pxor xmm5,xmm0
+ pxor xmm2,xmm3
+
+ pxor xmm3,xmm15
+ pxor xmm6,xmm2
+ dec r10d
+ jl NEAR $L$dec_done
+
+ pshufd xmm7,xmm15,0x4E
+ pshufd xmm13,xmm2,0x4E
+ pxor xmm7,xmm15
+ pshufd xmm14,xmm4,0x4E
+ pxor xmm13,xmm2
+ pshufd xmm8,xmm0,0x4E
+ pxor xmm14,xmm4
+ pshufd xmm9,xmm5,0x4E
+ pxor xmm8,xmm0
+ pshufd xmm10,xmm3,0x4E
+ pxor xmm9,xmm5
+ pxor xmm15,xmm13
+ pxor xmm0,xmm13
+ pshufd xmm11,xmm1,0x4E
+ pxor xmm10,xmm3
+ pxor xmm5,xmm7
+ pxor xmm3,xmm8
+ pshufd xmm12,xmm6,0x4E
+ pxor xmm11,xmm1
+ pxor xmm0,xmm14
+ pxor xmm1,xmm9
+ pxor xmm12,xmm6
+
+ pxor xmm5,xmm14
+ pxor xmm3,xmm13
+ pxor xmm1,xmm13
+ pxor xmm6,xmm10
+ pxor xmm2,xmm11
+ pxor xmm1,xmm14
+ pxor xmm6,xmm14
+ pxor xmm4,xmm12
+ pshufd xmm7,xmm15,0x93
+ pshufd xmm8,xmm0,0x93
+ pxor xmm15,xmm7
+ pshufd xmm9,xmm5,0x93
+ pxor xmm0,xmm8
+ pshufd xmm10,xmm3,0x93
+ pxor xmm5,xmm9
+ pshufd xmm11,xmm1,0x93
+ pxor xmm3,xmm10
+ pshufd xmm12,xmm6,0x93
+ pxor xmm1,xmm11
+ pshufd xmm13,xmm2,0x93
+ pxor xmm6,xmm12
+ pshufd xmm14,xmm4,0x93
+ pxor xmm2,xmm13
+ pxor xmm4,xmm14
+
+ pxor xmm8,xmm15
+ pxor xmm7,xmm4
+ pxor xmm8,xmm4
+ pshufd xmm15,xmm15,0x4E
+ pxor xmm9,xmm0
+ pshufd xmm0,xmm0,0x4E
+ pxor xmm12,xmm1
+ pxor xmm15,xmm7
+ pxor xmm13,xmm6
+ pxor xmm0,xmm8
+ pxor xmm11,xmm3
+ pshufd xmm7,xmm1,0x4E
+ pxor xmm14,xmm2
+ pshufd xmm8,xmm6,0x4E
+ pxor xmm10,xmm5
+ pshufd xmm1,xmm3,0x4E
+ pxor xmm10,xmm4
+ pshufd xmm6,xmm4,0x4E
+ pxor xmm11,xmm4
+ pshufd xmm3,xmm2,0x4E
+ pxor xmm7,xmm11
+ pshufd xmm2,xmm5,0x4E
+ pxor xmm8,xmm12
+ pxor xmm10,xmm1
+ pxor xmm6,xmm14
+ pxor xmm13,xmm3
+ movdqa xmm3,xmm7
+ pxor xmm2,xmm9
+ movdqa xmm5,xmm13
+ movdqa xmm4,xmm8
+ movdqa xmm1,xmm2
+ movdqa xmm2,xmm10
+ movdqa xmm7,XMMWORD[((-16))+r11]
+ jnz NEAR $L$dec_loop
+ movdqa xmm7,XMMWORD[((-32))+r11]
+ jmp NEAR $L$dec_loop
+ALIGN 16
+$L$dec_done:
+ movdqa xmm7,XMMWORD[r11]
+ movdqa xmm8,XMMWORD[16+r11]
+ movdqa xmm9,xmm2
+ psrlq xmm2,1
+ movdqa xmm10,xmm1
+ psrlq xmm1,1
+ pxor xmm2,xmm4
+ pxor xmm1,xmm6
+ pand xmm2,xmm7
+ pand xmm1,xmm7
+ pxor xmm4,xmm2
+ psllq xmm2,1
+ pxor xmm6,xmm1
+ psllq xmm1,1
+ pxor xmm2,xmm9
+ pxor xmm1,xmm10
+ movdqa xmm9,xmm5
+ psrlq xmm5,1
+ movdqa xmm10,xmm15
+ psrlq xmm15,1
+ pxor xmm5,xmm3
+ pxor xmm15,xmm0
+ pand xmm5,xmm7
+ pand xmm15,xmm7
+ pxor xmm3,xmm5
+ psllq xmm5,1
+ pxor xmm0,xmm15
+ psllq xmm15,1
+ pxor xmm5,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[32+r11]
+ movdqa xmm9,xmm6
+ psrlq xmm6,2
+ movdqa xmm10,xmm1
+ psrlq xmm1,2
+ pxor xmm6,xmm4
+ pxor xmm1,xmm2
+ pand xmm6,xmm8
+ pand xmm1,xmm8
+ pxor xmm4,xmm6
+ psllq xmm6,2
+ pxor xmm2,xmm1
+ psllq xmm1,2
+ pxor xmm6,xmm9
+ pxor xmm1,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,2
+ movdqa xmm10,xmm15
+ psrlq xmm15,2
+ pxor xmm0,xmm3
+ pxor xmm15,xmm5
+ pand xmm0,xmm8
+ pand xmm15,xmm8
+ pxor xmm3,xmm0
+ psllq xmm0,2
+ pxor xmm5,xmm15
+ psllq xmm15,2
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm9,xmm3
+ psrlq xmm3,4
+ movdqa xmm10,xmm5
+ psrlq xmm5,4
+ pxor xmm3,xmm4
+ pxor xmm5,xmm2
+ pand xmm3,xmm7
+ pand xmm5,xmm7
+ pxor xmm4,xmm3
+ psllq xmm3,4
+ pxor xmm2,xmm5
+ psllq xmm5,4
+ pxor xmm3,xmm9
+ pxor xmm5,xmm10
+ movdqa xmm9,xmm0
+ psrlq xmm0,4
+ movdqa xmm10,xmm15
+ psrlq xmm15,4
+ pxor xmm0,xmm6
+ pxor xmm15,xmm1
+ pand xmm0,xmm7
+ pand xmm15,xmm7
+ pxor xmm6,xmm0
+ psllq xmm0,4
+ pxor xmm1,xmm15
+ psllq xmm15,4
+ pxor xmm0,xmm9
+ pxor xmm15,xmm10
+ movdqa xmm7,XMMWORD[rax]
+ pxor xmm5,xmm7
+ pxor xmm3,xmm7
+ pxor xmm1,xmm7
+ pxor xmm6,xmm7
+ pxor xmm2,xmm7
+ pxor xmm4,xmm7
+ pxor xmm15,xmm7
+ pxor xmm0,xmm7
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 16
+_bsaes_key_convert:
+
+ lea r11,[$L$masks]
+ movdqu xmm7,XMMWORD[rcx]
+ lea rcx,[16+rcx]
+ movdqa xmm0,XMMWORD[r11]
+ movdqa xmm1,XMMWORD[16+r11]
+ movdqa xmm2,XMMWORD[32+r11]
+ movdqa xmm3,XMMWORD[48+r11]
+ movdqa xmm4,XMMWORD[64+r11]
+ pcmpeqd xmm5,xmm5
+
+ movdqu xmm6,XMMWORD[rcx]
+ movdqa XMMWORD[rax],xmm7
+ lea rax,[16+rax]
+ dec r10d
+ jmp NEAR $L$key_loop
+ALIGN 16
+$L$key_loop:
+DB 102,15,56,0,244
+
+ movdqa xmm8,xmm0
+ movdqa xmm9,xmm1
+
+ pand xmm8,xmm6
+ pand xmm9,xmm6
+ movdqa xmm10,xmm2
+ pcmpeqb xmm8,xmm0
+ psllq xmm0,4
+ movdqa xmm11,xmm3
+ pcmpeqb xmm9,xmm1
+ psllq xmm1,4
+
+ pand xmm10,xmm6
+ pand xmm11,xmm6
+ movdqa xmm12,xmm0
+ pcmpeqb xmm10,xmm2
+ psllq xmm2,4
+ movdqa xmm13,xmm1
+ pcmpeqb xmm11,xmm3
+ psllq xmm3,4
+
+ movdqa xmm14,xmm2
+ movdqa xmm15,xmm3
+ pxor xmm8,xmm5
+ pxor xmm9,xmm5
+
+ pand xmm12,xmm6
+ pand xmm13,xmm6
+ movdqa XMMWORD[rax],xmm8
+ pcmpeqb xmm12,xmm0
+ psrlq xmm0,4
+ movdqa XMMWORD[16+rax],xmm9
+ pcmpeqb xmm13,xmm1
+ psrlq xmm1,4
+ lea rcx,[16+rcx]
+
+ pand xmm14,xmm6
+ pand xmm15,xmm6
+ movdqa XMMWORD[32+rax],xmm10
+ pcmpeqb xmm14,xmm2
+ psrlq xmm2,4
+ movdqa XMMWORD[48+rax],xmm11
+ pcmpeqb xmm15,xmm3
+ psrlq xmm3,4
+ movdqu xmm6,XMMWORD[rcx]
+
+ pxor xmm13,xmm5
+ pxor xmm14,xmm5
+ movdqa XMMWORD[64+rax],xmm12
+ movdqa XMMWORD[80+rax],xmm13
+ movdqa XMMWORD[96+rax],xmm14
+ movdqa XMMWORD[112+rax],xmm15
+ lea rax,[128+rax]
+ dec r10d
+ jnz NEAR $L$key_loop
+
+ movdqa xmm7,XMMWORD[80+r11]
+
+ DB 0F3h,0C3h ;repret
+
+
+EXTERN asm_AES_cbc_encrypt
+global ossl_bsaes_cbc_encrypt
+
+ALIGN 16
+ossl_bsaes_cbc_encrypt:
+
+DB 243,15,30,250
+ mov r11d,DWORD[48+rsp]
+ cmp r11d,0
+ jne NEAR asm_AES_cbc_encrypt
+ cmp r8,128
+ jb NEAR asm_AES_cbc_encrypt
+
+ mov rax,rsp
+$L$cbc_dec_prologue:
+ push rbp
+
+ push rbx
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-72))+rsp]
+
+ mov r10,QWORD[160+rsp]
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[64+rsp],xmm6
+ movaps XMMWORD[80+rsp],xmm7
+ movaps XMMWORD[96+rsp],xmm8
+ movaps XMMWORD[112+rsp],xmm9
+ movaps XMMWORD[128+rsp],xmm10
+ movaps XMMWORD[144+rsp],xmm11
+ movaps XMMWORD[160+rsp],xmm12
+ movaps XMMWORD[176+rsp],xmm13
+ movaps XMMWORD[192+rsp],xmm14
+ movaps XMMWORD[208+rsp],xmm15
+$L$cbc_dec_body:
+ mov rbp,rsp
+
+ mov eax,DWORD[240+r9]
+ mov r12,rcx
+ mov r13,rdx
+ mov r14,r8
+ mov r15,r9
+ mov rbx,r10
+ shr r14,4
+
+ mov edx,eax
+ shl rax,7
+ sub rax,96
+ sub rsp,rax
+
+ mov rax,rsp
+ mov rcx,r15
+ mov r10d,edx
+ call _bsaes_key_convert
+ pxor xmm7,XMMWORD[rsp]
+ movdqa XMMWORD[rax],xmm6
+ movdqa XMMWORD[rsp],xmm7
+
+ movdqu xmm14,XMMWORD[rbx]
+ sub r14,8
+$L$cbc_dec_loop:
+ movdqu xmm15,XMMWORD[r12]
+ movdqu xmm0,XMMWORD[16+r12]
+ movdqu xmm1,XMMWORD[32+r12]
+ movdqu xmm2,XMMWORD[48+r12]
+ movdqu xmm3,XMMWORD[64+r12]
+ movdqu xmm4,XMMWORD[80+r12]
+ mov rax,rsp
+ movdqu xmm5,XMMWORD[96+r12]
+ mov r10d,edx
+ movdqu xmm6,XMMWORD[112+r12]
+ movdqa XMMWORD[32+rbp],xmm14
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm3,xmm9
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm1,xmm10
+ movdqu xmm12,XMMWORD[80+r12]
+ pxor xmm6,xmm11
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm2,xmm12
+ movdqu xmm14,XMMWORD[112+r12]
+ pxor xmm4,xmm13
+ movdqu XMMWORD[r13],xmm15
+ lea r12,[128+r12]
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ movdqu XMMWORD[64+r13],xmm1
+ movdqu XMMWORD[80+r13],xmm6
+ movdqu XMMWORD[96+r13],xmm2
+ movdqu XMMWORD[112+r13],xmm4
+ lea r13,[128+r13]
+ sub r14,8
+ jnc NEAR $L$cbc_dec_loop
+
+ add r14,8
+ jz NEAR $L$cbc_dec_done
+
+ movdqu xmm15,XMMWORD[r12]
+ mov rax,rsp
+ mov r10d,edx
+ cmp r14,2
+ jb NEAR $L$cbc_dec_one
+ movdqu xmm0,XMMWORD[16+r12]
+ je NEAR $L$cbc_dec_two
+ movdqu xmm1,XMMWORD[32+r12]
+ cmp r14,4
+ jb NEAR $L$cbc_dec_three
+ movdqu xmm2,XMMWORD[48+r12]
+ je NEAR $L$cbc_dec_four
+ movdqu xmm3,XMMWORD[64+r12]
+ cmp r14,6
+ jb NEAR $L$cbc_dec_five
+ movdqu xmm4,XMMWORD[80+r12]
+ je NEAR $L$cbc_dec_six
+ movdqu xmm5,XMMWORD[96+r12]
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm3,xmm9
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm1,xmm10
+ movdqu xmm12,XMMWORD[80+r12]
+ pxor xmm6,xmm11
+ movdqu xmm14,XMMWORD[96+r12]
+ pxor xmm2,xmm12
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ movdqu XMMWORD[64+r13],xmm1
+ movdqu XMMWORD[80+r13],xmm6
+ movdqu XMMWORD[96+r13],xmm2
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_six:
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm3,xmm9
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm1,xmm10
+ movdqu xmm14,XMMWORD[80+r12]
+ pxor xmm6,xmm11
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ movdqu XMMWORD[64+r13],xmm1
+ movdqu XMMWORD[80+r13],xmm6
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_five:
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm3,xmm9
+ movdqu xmm14,XMMWORD[64+r12]
+ pxor xmm1,xmm10
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ movdqu XMMWORD[64+r13],xmm1
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_four:
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu xmm14,XMMWORD[48+r12]
+ pxor xmm3,xmm9
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_three:
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu xmm14,XMMWORD[32+r12]
+ pxor xmm5,xmm8
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_two:
+ movdqa XMMWORD[32+rbp],xmm14
+ call _bsaes_decrypt8
+ pxor xmm15,XMMWORD[32+rbp]
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm14,XMMWORD[16+r12]
+ pxor xmm0,xmm7
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ jmp NEAR $L$cbc_dec_done
+ALIGN 16
+$L$cbc_dec_one:
+ lea rcx,[r12]
+ lea rdx,[32+rbp]
+ lea r8,[r15]
+ call asm_AES_decrypt
+ pxor xmm14,XMMWORD[32+rbp]
+ movdqu XMMWORD[r13],xmm14
+ movdqa xmm14,xmm15
+
+$L$cbc_dec_done:
+ movdqu XMMWORD[rbx],xmm14
+ lea rax,[rsp]
+ pxor xmm0,xmm0
+$L$cbc_dec_bzero:
+ movdqa XMMWORD[rax],xmm0
+ movdqa XMMWORD[16+rax],xmm0
+ lea rax,[32+rax]
+ cmp rbp,rax
+ ja NEAR $L$cbc_dec_bzero
+
+ lea rax,[120+rbp]
+
+ movaps xmm6,XMMWORD[64+rbp]
+ movaps xmm7,XMMWORD[80+rbp]
+ movaps xmm8,XMMWORD[96+rbp]
+ movaps xmm9,XMMWORD[112+rbp]
+ movaps xmm10,XMMWORD[128+rbp]
+ movaps xmm11,XMMWORD[144+rbp]
+ movaps xmm12,XMMWORD[160+rbp]
+ movaps xmm13,XMMWORD[176+rbp]
+ movaps xmm14,XMMWORD[192+rbp]
+ movaps xmm15,XMMWORD[208+rbp]
+ lea rax,[160+rax]
+$L$cbc_dec_tail:
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbx,QWORD[((-16))+rax]
+
+ mov rbp,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$cbc_dec_epilogue:
+ DB 0F3h,0C3h ;repret
+
+
+
+global ossl_bsaes_ctr32_encrypt_blocks
+
+ALIGN 16
+ossl_bsaes_ctr32_encrypt_blocks:
+
+DB 243,15,30,250
+ mov rax,rsp
+$L$ctr_enc_prologue:
+ push rbp
+
+ push rbx
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-72))+rsp]
+
+ mov r10,QWORD[160+rsp]
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[64+rsp],xmm6
+ movaps XMMWORD[80+rsp],xmm7
+ movaps XMMWORD[96+rsp],xmm8
+ movaps XMMWORD[112+rsp],xmm9
+ movaps XMMWORD[128+rsp],xmm10
+ movaps XMMWORD[144+rsp],xmm11
+ movaps XMMWORD[160+rsp],xmm12
+ movaps XMMWORD[176+rsp],xmm13
+ movaps XMMWORD[192+rsp],xmm14
+ movaps XMMWORD[208+rsp],xmm15
+$L$ctr_enc_body:
+ mov rbp,rsp
+
+ movdqu xmm0,XMMWORD[r10]
+ mov eax,DWORD[240+r9]
+ mov r12,rcx
+ mov r13,rdx
+ mov r14,r8
+ mov r15,r9
+ movdqa XMMWORD[32+rbp],xmm0
+ cmp r8,8
+ jb NEAR $L$ctr_enc_short
+
+ mov ebx,eax
+ shl rax,7
+ sub rax,96
+ sub rsp,rax
+
+ mov rax,rsp
+ mov rcx,r15
+ mov r10d,ebx
+ call _bsaes_key_convert
+ pxor xmm7,xmm6
+ movdqa XMMWORD[rax],xmm7
+
+ movdqa xmm8,XMMWORD[rsp]
+ lea r11,[$L$ADD1]
+ movdqa xmm15,XMMWORD[32+rbp]
+ movdqa xmm7,XMMWORD[((-32))+r11]
+DB 102,68,15,56,0,199
+DB 102,68,15,56,0,255
+ movdqa XMMWORD[rsp],xmm8
+ jmp NEAR $L$ctr_enc_loop
+ALIGN 16
+$L$ctr_enc_loop:
+ movdqa XMMWORD[32+rbp],xmm15
+ movdqa xmm0,xmm15
+ movdqa xmm1,xmm15
+ paddd xmm0,XMMWORD[r11]
+ movdqa xmm2,xmm15
+ paddd xmm1,XMMWORD[16+r11]
+ movdqa xmm3,xmm15
+ paddd xmm2,XMMWORD[32+r11]
+ movdqa xmm4,xmm15
+ paddd xmm3,XMMWORD[48+r11]
+ movdqa xmm5,xmm15
+ paddd xmm4,XMMWORD[64+r11]
+ movdqa xmm6,xmm15
+ paddd xmm5,XMMWORD[80+r11]
+ paddd xmm6,XMMWORD[96+r11]
+
+
+
+ movdqa xmm8,XMMWORD[rsp]
+ lea rax,[16+rsp]
+ movdqa xmm7,XMMWORD[((-16))+r11]
+ pxor xmm15,xmm8
+ pxor xmm0,xmm8
+ pxor xmm1,xmm8
+ pxor xmm2,xmm8
+DB 102,68,15,56,0,255
+DB 102,15,56,0,199
+ pxor xmm3,xmm8
+ pxor xmm4,xmm8
+DB 102,15,56,0,207
+DB 102,15,56,0,215
+ pxor xmm5,xmm8
+ pxor xmm6,xmm8
+DB 102,15,56,0,223
+DB 102,15,56,0,231
+DB 102,15,56,0,239
+DB 102,15,56,0,247
+ lea r11,[$L$BS0]
+ mov r10d,ebx
+
+ call _bsaes_encrypt8_bitslice
+
+ sub r14,8
+ jc NEAR $L$ctr_enc_loop_done
+
+ movdqu xmm7,XMMWORD[r12]
+ movdqu xmm8,XMMWORD[16+r12]
+ movdqu xmm9,XMMWORD[32+r12]
+ movdqu xmm10,XMMWORD[48+r12]
+ movdqu xmm11,XMMWORD[64+r12]
+ movdqu xmm12,XMMWORD[80+r12]
+ movdqu xmm13,XMMWORD[96+r12]
+ movdqu xmm14,XMMWORD[112+r12]
+ lea r12,[128+r12]
+ pxor xmm7,xmm15
+ movdqa xmm15,XMMWORD[32+rbp]
+ pxor xmm0,xmm8
+ movdqu XMMWORD[r13],xmm7
+ pxor xmm3,xmm9
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,xmm10
+ movdqu XMMWORD[32+r13],xmm3
+ pxor xmm2,xmm11
+ movdqu XMMWORD[48+r13],xmm5
+ pxor xmm6,xmm12
+ movdqu XMMWORD[64+r13],xmm2
+ pxor xmm1,xmm13
+ movdqu XMMWORD[80+r13],xmm6
+ pxor xmm4,xmm14
+ movdqu XMMWORD[96+r13],xmm1
+ lea r11,[$L$ADD1]
+ movdqu XMMWORD[112+r13],xmm4
+ lea r13,[128+r13]
+ paddd xmm15,XMMWORD[112+r11]
+ jnz NEAR $L$ctr_enc_loop
+
+ jmp NEAR $L$ctr_enc_done
+ALIGN 16
+$L$ctr_enc_loop_done:
+ add r14,8
+ movdqu xmm7,XMMWORD[r12]
+ pxor xmm15,xmm7
+ movdqu XMMWORD[r13],xmm15
+ cmp r14,2
+ jb NEAR $L$ctr_enc_done
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm0,xmm8
+ movdqu XMMWORD[16+r13],xmm0
+ je NEAR $L$ctr_enc_done
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm3,xmm9
+ movdqu XMMWORD[32+r13],xmm3
+ cmp r14,4
+ jb NEAR $L$ctr_enc_done
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm5,xmm10
+ movdqu XMMWORD[48+r13],xmm5
+ je NEAR $L$ctr_enc_done
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm2,xmm11
+ movdqu XMMWORD[64+r13],xmm2
+ cmp r14,6
+ jb NEAR $L$ctr_enc_done
+ movdqu xmm12,XMMWORD[80+r12]
+ pxor xmm6,xmm12
+ movdqu XMMWORD[80+r13],xmm6
+ je NEAR $L$ctr_enc_done
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm1,xmm13
+ movdqu XMMWORD[96+r13],xmm1
+ jmp NEAR $L$ctr_enc_done
+
+ALIGN 16
+$L$ctr_enc_short:
+ lea rcx,[32+rbp]
+ lea rdx,[48+rbp]
+ lea r8,[r15]
+ call asm_AES_encrypt
+ movdqu xmm0,XMMWORD[r12]
+ lea r12,[16+r12]
+ mov eax,DWORD[44+rbp]
+ bswap eax
+ pxor xmm0,XMMWORD[48+rbp]
+ inc eax
+ movdqu XMMWORD[r13],xmm0
+ bswap eax
+ lea r13,[16+r13]
+ mov DWORD[44+rsp],eax
+ dec r14
+ jnz NEAR $L$ctr_enc_short
+
+$L$ctr_enc_done:
+ lea rax,[rsp]
+ pxor xmm0,xmm0
+$L$ctr_enc_bzero:
+ movdqa XMMWORD[rax],xmm0
+ movdqa XMMWORD[16+rax],xmm0
+ lea rax,[32+rax]
+ cmp rbp,rax
+ ja NEAR $L$ctr_enc_bzero
+
+ lea rax,[120+rbp]
+
+ movaps xmm6,XMMWORD[64+rbp]
+ movaps xmm7,XMMWORD[80+rbp]
+ movaps xmm8,XMMWORD[96+rbp]
+ movaps xmm9,XMMWORD[112+rbp]
+ movaps xmm10,XMMWORD[128+rbp]
+ movaps xmm11,XMMWORD[144+rbp]
+ movaps xmm12,XMMWORD[160+rbp]
+ movaps xmm13,XMMWORD[176+rbp]
+ movaps xmm14,XMMWORD[192+rbp]
+ movaps xmm15,XMMWORD[208+rbp]
+ lea rax,[160+rax]
+$L$ctr_enc_tail:
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbx,QWORD[((-16))+rax]
+
+ mov rbp,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$ctr_enc_epilogue:
+ DB 0F3h,0C3h ;repret
+
+
+global ossl_bsaes_xts_encrypt
+
+ALIGN 16
+ossl_bsaes_xts_encrypt:
+
+ mov rax,rsp
+$L$xts_enc_prologue:
+ push rbp
+
+ push rbx
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-72))+rsp]
+
+ mov r10,QWORD[160+rsp]
+ mov r11,QWORD[168+rsp]
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[64+rsp],xmm6
+ movaps XMMWORD[80+rsp],xmm7
+ movaps XMMWORD[96+rsp],xmm8
+ movaps XMMWORD[112+rsp],xmm9
+ movaps XMMWORD[128+rsp],xmm10
+ movaps XMMWORD[144+rsp],xmm11
+ movaps XMMWORD[160+rsp],xmm12
+ movaps XMMWORD[176+rsp],xmm13
+ movaps XMMWORD[192+rsp],xmm14
+ movaps XMMWORD[208+rsp],xmm15
+$L$xts_enc_body:
+ mov rbp,rsp
+
+ mov r12,rcx
+ mov r13,rdx
+ mov r14,r8
+ mov r15,r9
+
+ lea rcx,[r11]
+ lea rdx,[32+rbp]
+ lea r8,[r10]
+ call asm_AES_encrypt
+
+ mov eax,DWORD[240+r15]
+ mov rbx,r14
+
+ mov edx,eax
+ shl rax,7
+ sub rax,96
+ sub rsp,rax
+
+ mov rax,rsp
+ mov rcx,r15
+ mov r10d,edx
+ call _bsaes_key_convert
+ pxor xmm7,xmm6
+ movdqa XMMWORD[rax],xmm7
+
+ and r14,-16
+ sub rsp,0x80
+ movdqa xmm6,XMMWORD[32+rbp]
+
+ pxor xmm14,xmm14
+ movdqa xmm12,XMMWORD[$L$xts_magic]
+ pcmpgtd xmm14,xmm6
+
+ sub r14,0x80
+ jc NEAR $L$xts_enc_short
+ jmp NEAR $L$xts_enc_loop
+
+ALIGN 16
+$L$xts_enc_loop:
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm15,xmm6
+ movdqa XMMWORD[rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm0,xmm6
+ movdqa XMMWORD[16+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm7,XMMWORD[r12]
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm1,xmm6
+ movdqa XMMWORD[32+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm15,xmm7
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm2,xmm6
+ movdqa XMMWORD[48+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm0,xmm8
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm3,xmm6
+ movdqa XMMWORD[64+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm1,xmm9
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm4,xmm6
+ movdqa XMMWORD[80+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm2,xmm10
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm5,xmm6
+ movdqa XMMWORD[96+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm12,XMMWORD[80+r12]
+ pxor xmm3,xmm11
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm4,xmm12
+ movdqu xmm14,XMMWORD[112+r12]
+ lea r12,[128+r12]
+ movdqa XMMWORD[112+rsp],xmm6
+ pxor xmm5,xmm13
+ lea rax,[128+rsp]
+ pxor xmm6,xmm14
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm3
+ pxor xmm2,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm5
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm2
+ pxor xmm1,XMMWORD[96+rsp]
+ movdqu XMMWORD[80+r13],xmm6
+ pxor xmm4,XMMWORD[112+rsp]
+ movdqu XMMWORD[96+r13],xmm1
+ movdqu XMMWORD[112+r13],xmm4
+ lea r13,[128+r13]
+
+ movdqa xmm6,XMMWORD[112+rsp]
+ pxor xmm14,xmm14
+ movdqa xmm12,XMMWORD[$L$xts_magic]
+ pcmpgtd xmm14,xmm6
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+
+ sub r14,0x80
+ jnc NEAR $L$xts_enc_loop
+
+$L$xts_enc_short:
+ add r14,0x80
+ jz NEAR $L$xts_enc_done
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm15,xmm6
+ movdqa XMMWORD[rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm0,xmm6
+ movdqa XMMWORD[16+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm7,XMMWORD[r12]
+ cmp r14,16
+ je NEAR $L$xts_enc_1
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm1,xmm6
+ movdqa XMMWORD[32+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm8,XMMWORD[16+r12]
+ cmp r14,32
+ je NEAR $L$xts_enc_2
+ pxor xmm15,xmm7
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm2,xmm6
+ movdqa XMMWORD[48+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm9,XMMWORD[32+r12]
+ cmp r14,48
+ je NEAR $L$xts_enc_3
+ pxor xmm0,xmm8
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm3,xmm6
+ movdqa XMMWORD[64+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm10,XMMWORD[48+r12]
+ cmp r14,64
+ je NEAR $L$xts_enc_4
+ pxor xmm1,xmm9
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm4,xmm6
+ movdqa XMMWORD[80+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm11,XMMWORD[64+r12]
+ cmp r14,80
+ je NEAR $L$xts_enc_5
+ pxor xmm2,xmm10
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm5,xmm6
+ movdqa XMMWORD[96+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm12,XMMWORD[80+r12]
+ cmp r14,96
+ je NEAR $L$xts_enc_6
+ pxor xmm3,xmm11
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm4,xmm12
+ movdqa XMMWORD[112+rsp],xmm6
+ lea r12,[112+r12]
+ pxor xmm5,xmm13
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm3
+ pxor xmm2,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm5
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm2
+ pxor xmm1,XMMWORD[96+rsp]
+ movdqu XMMWORD[80+r13],xmm6
+ movdqu XMMWORD[96+r13],xmm1
+ lea r13,[112+r13]
+
+ movdqa xmm6,XMMWORD[112+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_6:
+ pxor xmm3,xmm11
+ lea r12,[96+r12]
+ pxor xmm4,xmm12
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm3
+ pxor xmm2,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm5
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm2
+ movdqu XMMWORD[80+r13],xmm6
+ lea r13,[96+r13]
+
+ movdqa xmm6,XMMWORD[96+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_5:
+ pxor xmm2,xmm10
+ lea r12,[80+r12]
+ pxor xmm3,xmm11
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm3
+ pxor xmm2,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm5
+ movdqu XMMWORD[64+r13],xmm2
+ lea r13,[80+r13]
+
+ movdqa xmm6,XMMWORD[80+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_4:
+ pxor xmm1,xmm9
+ lea r12,[64+r12]
+ pxor xmm2,xmm10
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm5,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm3
+ movdqu XMMWORD[48+r13],xmm5
+ lea r13,[64+r13]
+
+ movdqa xmm6,XMMWORD[64+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_3:
+ pxor xmm0,xmm8
+ lea r12,[48+r12]
+ pxor xmm1,xmm9
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm3,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm3
+ lea r13,[48+r13]
+
+ movdqa xmm6,XMMWORD[48+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_2:
+ pxor xmm15,xmm7
+ lea r12,[32+r12]
+ pxor xmm0,xmm8
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_encrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ lea r13,[32+r13]
+
+ movdqa xmm6,XMMWORD[32+rsp]
+ jmp NEAR $L$xts_enc_done
+ALIGN 16
+$L$xts_enc_1:
+ pxor xmm7,xmm15
+ lea r12,[16+r12]
+ movdqa XMMWORD[32+rbp],xmm7
+ lea rcx,[32+rbp]
+ lea rdx,[32+rbp]
+ lea r8,[r15]
+ call asm_AES_encrypt
+ pxor xmm15,XMMWORD[32+rbp]
+
+
+
+
+
+ movdqu XMMWORD[r13],xmm15
+ lea r13,[16+r13]
+
+ movdqa xmm6,XMMWORD[16+rsp]
+
+$L$xts_enc_done:
+ and ebx,15
+ jz NEAR $L$xts_enc_ret
+ mov rdx,r13
+
+$L$xts_enc_steal:
+ movzx eax,BYTE[r12]
+ movzx ecx,BYTE[((-16))+rdx]
+ lea r12,[1+r12]
+ mov BYTE[((-16))+rdx],al
+ mov BYTE[rdx],cl
+ lea rdx,[1+rdx]
+ sub ebx,1
+ jnz NEAR $L$xts_enc_steal
+
+ movdqu xmm15,XMMWORD[((-16))+r13]
+ lea rcx,[32+rbp]
+ pxor xmm15,xmm6
+ lea rdx,[32+rbp]
+ movdqa XMMWORD[32+rbp],xmm15
+ lea r8,[r15]
+ call asm_AES_encrypt
+ pxor xmm6,XMMWORD[32+rbp]
+ movdqu XMMWORD[(-16)+r13],xmm6
+
+$L$xts_enc_ret:
+ lea rax,[rsp]
+ pxor xmm0,xmm0
+$L$xts_enc_bzero:
+ movdqa XMMWORD[rax],xmm0
+ movdqa XMMWORD[16+rax],xmm0
+ lea rax,[32+rax]
+ cmp rbp,rax
+ ja NEAR $L$xts_enc_bzero
+
+ lea rax,[120+rbp]
+
+ movaps xmm6,XMMWORD[64+rbp]
+ movaps xmm7,XMMWORD[80+rbp]
+ movaps xmm8,XMMWORD[96+rbp]
+ movaps xmm9,XMMWORD[112+rbp]
+ movaps xmm10,XMMWORD[128+rbp]
+ movaps xmm11,XMMWORD[144+rbp]
+ movaps xmm12,XMMWORD[160+rbp]
+ movaps xmm13,XMMWORD[176+rbp]
+ movaps xmm14,XMMWORD[192+rbp]
+ movaps xmm15,XMMWORD[208+rbp]
+ lea rax,[160+rax]
+$L$xts_enc_tail:
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbx,QWORD[((-16))+rax]
+
+ mov rbp,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$xts_enc_epilogue:
+ DB 0F3h,0C3h ;repret
+
+
+
+global ossl_bsaes_xts_decrypt
+
+ALIGN 16
+ossl_bsaes_xts_decrypt:
+
+ mov rax,rsp
+$L$xts_dec_prologue:
+ push rbp
+
+ push rbx
+
+ push r12
+
+ push r13
+
+ push r14
+
+ push r15
+
+ lea rsp,[((-72))+rsp]
+
+ mov r10,QWORD[160+rsp]
+ mov r11,QWORD[168+rsp]
+ lea rsp,[((-160))+rsp]
+ movaps XMMWORD[64+rsp],xmm6
+ movaps XMMWORD[80+rsp],xmm7
+ movaps XMMWORD[96+rsp],xmm8
+ movaps XMMWORD[112+rsp],xmm9
+ movaps XMMWORD[128+rsp],xmm10
+ movaps XMMWORD[144+rsp],xmm11
+ movaps XMMWORD[160+rsp],xmm12
+ movaps XMMWORD[176+rsp],xmm13
+ movaps XMMWORD[192+rsp],xmm14
+ movaps XMMWORD[208+rsp],xmm15
+$L$xts_dec_body:
+ mov rbp,rsp
+ mov r12,rcx
+ mov r13,rdx
+ mov r14,r8
+ mov r15,r9
+
+ lea rcx,[r11]
+ lea rdx,[32+rbp]
+ lea r8,[r10]
+ call asm_AES_encrypt
+
+ mov eax,DWORD[240+r15]
+ mov rbx,r14
+
+ mov edx,eax
+ shl rax,7
+ sub rax,96
+ sub rsp,rax
+
+ mov rax,rsp
+ mov rcx,r15
+ mov r10d,edx
+ call _bsaes_key_convert
+ pxor xmm7,XMMWORD[rsp]
+ movdqa XMMWORD[rax],xmm6
+ movdqa XMMWORD[rsp],xmm7
+
+ xor eax,eax
+ and r14,-16
+ test ebx,15
+ setnz al
+ shl rax,4
+ sub r14,rax
+
+ sub rsp,0x80
+ movdqa xmm6,XMMWORD[32+rbp]
+
+ pxor xmm14,xmm14
+ movdqa xmm12,XMMWORD[$L$xts_magic]
+ pcmpgtd xmm14,xmm6
+
+ sub r14,0x80
+ jc NEAR $L$xts_dec_short
+ jmp NEAR $L$xts_dec_loop
+
+ALIGN 16
+$L$xts_dec_loop:
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm15,xmm6
+ movdqa XMMWORD[rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm0,xmm6
+ movdqa XMMWORD[16+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm7,XMMWORD[r12]
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm1,xmm6
+ movdqa XMMWORD[32+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm8,XMMWORD[16+r12]
+ pxor xmm15,xmm7
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm2,xmm6
+ movdqa XMMWORD[48+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm9,XMMWORD[32+r12]
+ pxor xmm0,xmm8
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm3,xmm6
+ movdqa XMMWORD[64+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm10,XMMWORD[48+r12]
+ pxor xmm1,xmm9
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm4,xmm6
+ movdqa XMMWORD[80+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm11,XMMWORD[64+r12]
+ pxor xmm2,xmm10
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm5,xmm6
+ movdqa XMMWORD[96+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm12,XMMWORD[80+r12]
+ pxor xmm3,xmm11
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm4,xmm12
+ movdqu xmm14,XMMWORD[112+r12]
+ lea r12,[128+r12]
+ movdqa XMMWORD[112+rsp],xmm6
+ pxor xmm5,xmm13
+ lea rax,[128+rsp]
+ pxor xmm6,xmm14
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm3,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm5
+ pxor xmm1,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm3
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm1
+ pxor xmm2,XMMWORD[96+rsp]
+ movdqu XMMWORD[80+r13],xmm6
+ pxor xmm4,XMMWORD[112+rsp]
+ movdqu XMMWORD[96+r13],xmm2
+ movdqu XMMWORD[112+r13],xmm4
+ lea r13,[128+r13]
+
+ movdqa xmm6,XMMWORD[112+rsp]
+ pxor xmm14,xmm14
+ movdqa xmm12,XMMWORD[$L$xts_magic]
+ pcmpgtd xmm14,xmm6
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+
+ sub r14,0x80
+ jnc NEAR $L$xts_dec_loop
+
+$L$xts_dec_short:
+ add r14,0x80
+ jz NEAR $L$xts_dec_done
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm15,xmm6
+ movdqa XMMWORD[rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm0,xmm6
+ movdqa XMMWORD[16+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm7,XMMWORD[r12]
+ cmp r14,16
+ je NEAR $L$xts_dec_1
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm1,xmm6
+ movdqa XMMWORD[32+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm8,XMMWORD[16+r12]
+ cmp r14,32
+ je NEAR $L$xts_dec_2
+ pxor xmm15,xmm7
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm2,xmm6
+ movdqa XMMWORD[48+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm9,XMMWORD[32+r12]
+ cmp r14,48
+ je NEAR $L$xts_dec_3
+ pxor xmm0,xmm8
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm3,xmm6
+ movdqa XMMWORD[64+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm10,XMMWORD[48+r12]
+ cmp r14,64
+ je NEAR $L$xts_dec_4
+ pxor xmm1,xmm9
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm4,xmm6
+ movdqa XMMWORD[80+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm11,XMMWORD[64+r12]
+ cmp r14,80
+ je NEAR $L$xts_dec_5
+ pxor xmm2,xmm10
+ pshufd xmm13,xmm14,0x13
+ pxor xmm14,xmm14
+ movdqa xmm5,xmm6
+ movdqa XMMWORD[96+rsp],xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ pcmpgtd xmm14,xmm6
+ pxor xmm6,xmm13
+ movdqu xmm12,XMMWORD[80+r12]
+ cmp r14,96
+ je NEAR $L$xts_dec_6
+ pxor xmm3,xmm11
+ movdqu xmm13,XMMWORD[96+r12]
+ pxor xmm4,xmm12
+ movdqa XMMWORD[112+rsp],xmm6
+ lea r12,[112+r12]
+ pxor xmm5,xmm13
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm3,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm5
+ pxor xmm1,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm3
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm1
+ pxor xmm2,XMMWORD[96+rsp]
+ movdqu XMMWORD[80+r13],xmm6
+ movdqu XMMWORD[96+r13],xmm2
+ lea r13,[112+r13]
+
+ movdqa xmm6,XMMWORD[112+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_6:
+ pxor xmm3,xmm11
+ lea r12,[96+r12]
+ pxor xmm4,xmm12
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm3,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm5
+ pxor xmm1,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm3
+ pxor xmm6,XMMWORD[80+rsp]
+ movdqu XMMWORD[64+r13],xmm1
+ movdqu XMMWORD[80+r13],xmm6
+ lea r13,[96+r13]
+
+ movdqa xmm6,XMMWORD[96+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_5:
+ pxor xmm2,xmm10
+ lea r12,[80+r12]
+ pxor xmm3,xmm11
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm3,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm5
+ pxor xmm1,XMMWORD[64+rsp]
+ movdqu XMMWORD[48+r13],xmm3
+ movdqu XMMWORD[64+r13],xmm1
+ lea r13,[80+r13]
+
+ movdqa xmm6,XMMWORD[80+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_4:
+ pxor xmm1,xmm9
+ lea r12,[64+r12]
+ pxor xmm2,xmm10
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ pxor xmm3,XMMWORD[48+rsp]
+ movdqu XMMWORD[32+r13],xmm5
+ movdqu XMMWORD[48+r13],xmm3
+ lea r13,[64+r13]
+
+ movdqa xmm6,XMMWORD[64+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_3:
+ pxor xmm0,xmm8
+ lea r12,[48+r12]
+ pxor xmm1,xmm9
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ pxor xmm5,XMMWORD[32+rsp]
+ movdqu XMMWORD[16+r13],xmm0
+ movdqu XMMWORD[32+r13],xmm5
+ lea r13,[48+r13]
+
+ movdqa xmm6,XMMWORD[48+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_2:
+ pxor xmm15,xmm7
+ lea r12,[32+r12]
+ pxor xmm0,xmm8
+ lea rax,[128+rsp]
+ mov r10d,edx
+
+ call _bsaes_decrypt8
+
+ pxor xmm15,XMMWORD[rsp]
+ pxor xmm0,XMMWORD[16+rsp]
+ movdqu XMMWORD[r13],xmm15
+ movdqu XMMWORD[16+r13],xmm0
+ lea r13,[32+r13]
+
+ movdqa xmm6,XMMWORD[32+rsp]
+ jmp NEAR $L$xts_dec_done
+ALIGN 16
+$L$xts_dec_1:
+ pxor xmm7,xmm15
+ lea r12,[16+r12]
+ movdqa XMMWORD[32+rbp],xmm7
+ lea rcx,[32+rbp]
+ lea rdx,[32+rbp]
+ lea r8,[r15]
+ call asm_AES_decrypt
+ pxor xmm15,XMMWORD[32+rbp]
+
+
+
+
+
+ movdqu XMMWORD[r13],xmm15
+ lea r13,[16+r13]
+
+ movdqa xmm6,XMMWORD[16+rsp]
+
+$L$xts_dec_done:
+ and ebx,15
+ jz NEAR $L$xts_dec_ret
+
+ pxor xmm14,xmm14
+ movdqa xmm12,XMMWORD[$L$xts_magic]
+ pcmpgtd xmm14,xmm6
+ pshufd xmm13,xmm14,0x13
+ movdqa xmm5,xmm6
+ paddq xmm6,xmm6
+ pand xmm13,xmm12
+ movdqu xmm15,XMMWORD[r12]
+ pxor xmm6,xmm13
+
+ lea rcx,[32+rbp]
+ pxor xmm15,xmm6
+ lea rdx,[32+rbp]
+ movdqa XMMWORD[32+rbp],xmm15
+ lea r8,[r15]
+ call asm_AES_decrypt
+ pxor xmm6,XMMWORD[32+rbp]
+ mov rdx,r13
+ movdqu XMMWORD[r13],xmm6
+
+$L$xts_dec_steal:
+ movzx eax,BYTE[16+r12]
+ movzx ecx,BYTE[rdx]
+ lea r12,[1+r12]
+ mov BYTE[rdx],al
+ mov BYTE[16+rdx],cl
+ lea rdx,[1+rdx]
+ sub ebx,1
+ jnz NEAR $L$xts_dec_steal
+
+ movdqu xmm15,XMMWORD[r13]
+ lea rcx,[32+rbp]
+ pxor xmm15,xmm5
+ lea rdx,[32+rbp]
+ movdqa XMMWORD[32+rbp],xmm15
+ lea r8,[r15]
+ call asm_AES_decrypt
+ pxor xmm5,XMMWORD[32+rbp]
+ movdqu XMMWORD[r13],xmm5
+
+$L$xts_dec_ret:
+ lea rax,[rsp]
+ pxor xmm0,xmm0
+$L$xts_dec_bzero:
+ movdqa XMMWORD[rax],xmm0
+ movdqa XMMWORD[16+rax],xmm0
+ lea rax,[32+rax]
+ cmp rbp,rax
+ ja NEAR $L$xts_dec_bzero
+
+ lea rax,[120+rbp]
+
+ movaps xmm6,XMMWORD[64+rbp]
+ movaps xmm7,XMMWORD[80+rbp]
+ movaps xmm8,XMMWORD[96+rbp]
+ movaps xmm9,XMMWORD[112+rbp]
+ movaps xmm10,XMMWORD[128+rbp]
+ movaps xmm11,XMMWORD[144+rbp]
+ movaps xmm12,XMMWORD[160+rbp]
+ movaps xmm13,XMMWORD[176+rbp]
+ movaps xmm14,XMMWORD[192+rbp]
+ movaps xmm15,XMMWORD[208+rbp]
+ lea rax,[160+rax]
+$L$xts_dec_tail:
+ mov r15,QWORD[((-48))+rax]
+
+ mov r14,QWORD[((-40))+rax]
+
+ mov r13,QWORD[((-32))+rax]
+
+ mov r12,QWORD[((-24))+rax]
+
+ mov rbx,QWORD[((-16))+rax]
+
+ mov rbp,QWORD[((-8))+rax]
+
+ lea rsp,[rax]
+
+$L$xts_dec_epilogue:
+ DB 0F3h,0C3h ;repret
+
+
+
+ALIGN 64
+_bsaes_const:
+$L$M0ISR:
+ DQ 0x0a0e0206070b0f03,0x0004080c0d010509
+$L$ISRM0:
+ DQ 0x01040b0e0205080f,0x0306090c00070a0d
+$L$ISR:
+ DQ 0x0504070602010003,0x0f0e0d0c080b0a09
+$L$BS0:
+ DQ 0x5555555555555555,0x5555555555555555
+$L$BS1:
+ DQ 0x3333333333333333,0x3333333333333333
+$L$BS2:
+ DQ 0x0f0f0f0f0f0f0f0f,0x0f0f0f0f0f0f0f0f
+$L$SR:
+ DQ 0x0504070600030201,0x0f0e0d0c0a09080b
+$L$SRM0:
+ DQ 0x0304090e00050a0f,0x01060b0c0207080d
+$L$M0SR:
+ DQ 0x0a0e02060f03070b,0x0004080c05090d01
+$L$SWPUP:
+ DQ 0x0706050403020100,0x0c0d0e0f0b0a0908
+$L$SWPUPM0SR:
+ DQ 0x0a0d02060c03070b,0x0004080f05090e01
+$L$ADD1:
+ DQ 0x0000000000000000,0x0000000100000000
+$L$ADD2:
+ DQ 0x0000000000000000,0x0000000200000000
+$L$ADD3:
+ DQ 0x0000000000000000,0x0000000300000000
+$L$ADD4:
+ DQ 0x0000000000000000,0x0000000400000000
+$L$ADD5:
+ DQ 0x0000000000000000,0x0000000500000000
+$L$ADD6:
+ DQ 0x0000000000000000,0x0000000600000000
+$L$ADD7:
+ DQ 0x0000000000000000,0x0000000700000000
+$L$ADD8:
+ DQ 0x0000000000000000,0x0000000800000000
+$L$xts_magic:
+ DD 0x87,0,1,0
+$L$masks:
+ DQ 0x0101010101010101,0x0101010101010101
+ DQ 0x0202020202020202,0x0202020202020202
+ DQ 0x0404040404040404,0x0404040404040404
+ DQ 0x0808080808080808,0x0808080808080808
+$L$M0:
+ DQ 0x02060a0e03070b0f,0x0004080c0105090d
+$L$63:
+ DQ 0x6363636363636363,0x6363636363636363
+DB 66,105,116,45,115,108,105,99,101,100,32,65,69,83,32,102
+DB 111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44
+DB 32,69,109,105,108,105,97,32,75,195,164,115,112,101,114,44
+DB 32,80,101,116,101,114,32,83,99,104,119,97,98,101,44,32
+DB 65,110,100,121,32,80,111,108,121,97,107,111,118,0
+ALIGN 64
+
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jbe NEAR $L$in_prologue
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_prologue
+
+ mov r10d,DWORD[8+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_tail
+
+ mov rax,QWORD[160+r8]
+
+ lea rsi,[64+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[((160+120))+rax]
+
+$L$in_tail:
+ mov rbp,QWORD[((-48))+rax]
+ mov rbx,QWORD[((-40))+rax]
+ mov r12,QWORD[((-32))+rax]
+ mov r13,QWORD[((-24))+rax]
+ mov r14,QWORD[((-16))+rax]
+ mov r15,QWORD[((-8))+rax]
+ mov QWORD[144+r8],rbx
+ mov QWORD[160+r8],rbp
+ mov QWORD[216+r8],r12
+ mov QWORD[224+r8],r13
+ mov QWORD[232+r8],r14
+ mov QWORD[240+r8],r15
+
+$L$in_prologue:
+ mov QWORD[152+r8],rax
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$cbc_dec_prologue wrt ..imagebase
+ DD $L$cbc_dec_epilogue wrt ..imagebase
+ DD $L$cbc_dec_info wrt ..imagebase
+
+ DD $L$ctr_enc_prologue wrt ..imagebase
+ DD $L$ctr_enc_epilogue wrt ..imagebase
+ DD $L$ctr_enc_info wrt ..imagebase
+
+ DD $L$xts_enc_prologue wrt ..imagebase
+ DD $L$xts_enc_epilogue wrt ..imagebase
+ DD $L$xts_enc_info wrt ..imagebase
+
+ DD $L$xts_dec_prologue wrt ..imagebase
+ DD $L$xts_dec_epilogue wrt ..imagebase
+ DD $L$xts_dec_info wrt ..imagebase
+
+section .xdata rdata align=8
+ALIGN 8
+$L$cbc_dec_info:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$cbc_dec_body wrt ..imagebase,$L$cbc_dec_epilogue wrt ..imagebase
+ DD $L$cbc_dec_tail wrt ..imagebase
+ DD 0
+$L$ctr_enc_info:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$ctr_enc_body wrt ..imagebase,$L$ctr_enc_epilogue wrt ..imagebase
+ DD $L$ctr_enc_tail wrt ..imagebase
+ DD 0
+$L$xts_enc_info:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$xts_enc_body wrt ..imagebase,$L$xts_enc_epilogue wrt ..imagebase
+ DD $L$xts_enc_tail wrt ..imagebase
+ DD 0
+$L$xts_dec_info:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$xts_dec_body wrt ..imagebase,$L$xts_dec_epilogue wrt ..imagebase
+ DD $L$xts_dec_tail wrt ..imagebase
+ DD 0
diff --git a/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/vpaes-x86_64.nasm b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/vpaes-x86_64.nasm
new file mode 100644
index 0000000000..74f87a0f87
--- /dev/null
+++ b/CryptoPkg/Library/OpensslLib/OpensslGen/X64-MSFT/crypto/aes/vpaes-x86_64.nasm
@@ -0,0 +1,1168 @@
+default rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+section .text code align=64
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_encrypt_core:
+
+ mov r9,rdx
+ mov r11,16
+ mov eax,DWORD[240+rdx]
+ movdqa xmm1,xmm9
+ movdqa xmm2,XMMWORD[$L$k_ipt]
+ pandn xmm1,xmm0
+ movdqu xmm5,XMMWORD[r9]
+ psrld xmm1,4
+ pand xmm0,xmm9
+DB 102,15,56,0,208
+ movdqa xmm0,XMMWORD[(($L$k_ipt+16))]
+DB 102,15,56,0,193
+ pxor xmm2,xmm5
+ add r9,16
+ pxor xmm0,xmm2
+ lea r10,[$L$k_mc_backward]
+ jmp NEAR $L$enc_entry
+
+ALIGN 16
+$L$enc_loop:
+
+ movdqa xmm4,xmm13
+ movdqa xmm0,xmm12
+DB 102,15,56,0,226
+DB 102,15,56,0,195
+ pxor xmm4,xmm5
+ movdqa xmm5,xmm15
+ pxor xmm0,xmm4
+ movdqa xmm1,XMMWORD[((-64))+r10*1+r11]
+DB 102,15,56,0,234
+ movdqa xmm4,XMMWORD[r10*1+r11]
+ movdqa xmm2,xmm14
+DB 102,15,56,0,211
+ movdqa xmm3,xmm0
+ pxor xmm2,xmm5
+DB 102,15,56,0,193
+ add r9,16
+ pxor xmm0,xmm2
+DB 102,15,56,0,220
+ add r11,16
+ pxor xmm3,xmm0
+DB 102,15,56,0,193
+ and r11,0x30
+ sub rax,1
+ pxor xmm0,xmm3
+
+$L$enc_entry:
+
+ movdqa xmm1,xmm9
+ movdqa xmm5,xmm11
+ pandn xmm1,xmm0
+ psrld xmm1,4
+ pand xmm0,xmm9
+DB 102,15,56,0,232
+ movdqa xmm3,xmm10
+ pxor xmm0,xmm1
+DB 102,15,56,0,217
+ movdqa xmm4,xmm10
+ pxor xmm3,xmm5
+DB 102,15,56,0,224
+ movdqa xmm2,xmm10
+ pxor xmm4,xmm5
+DB 102,15,56,0,211
+ movdqa xmm3,xmm10
+ pxor xmm2,xmm0
+DB 102,15,56,0,220
+ movdqu xmm5,XMMWORD[r9]
+ pxor xmm3,xmm1
+ jnz NEAR $L$enc_loop
+
+
+ movdqa xmm4,XMMWORD[((-96))+r10]
+ movdqa xmm0,XMMWORD[((-80))+r10]
+DB 102,15,56,0,226
+ pxor xmm4,xmm5
+DB 102,15,56,0,195
+ movdqa xmm1,XMMWORD[64+r10*1+r11]
+ pxor xmm0,xmm4
+DB 102,15,56,0,193
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_decrypt_core:
+
+ mov r9,rdx
+ mov eax,DWORD[240+rdx]
+ movdqa xmm1,xmm9
+ movdqa xmm2,XMMWORD[$L$k_dipt]
+ pandn xmm1,xmm0
+ mov r11,rax
+ psrld xmm1,4
+ movdqu xmm5,XMMWORD[r9]
+ shl r11,4
+ pand xmm0,xmm9
+DB 102,15,56,0,208
+ movdqa xmm0,XMMWORD[(($L$k_dipt+16))]
+ xor r11,0x30
+ lea r10,[$L$k_dsbd]
+DB 102,15,56,0,193
+ and r11,0x30
+ pxor xmm2,xmm5
+ movdqa xmm5,XMMWORD[(($L$k_mc_forward+48))]
+ pxor xmm0,xmm2
+ add r9,16
+ add r11,r10
+ jmp NEAR $L$dec_entry
+
+ALIGN 16
+$L$dec_loop:
+
+
+
+ movdqa xmm4,XMMWORD[((-32))+r10]
+ movdqa xmm1,XMMWORD[((-16))+r10]
+DB 102,15,56,0,226
+DB 102,15,56,0,203
+ pxor xmm0,xmm4
+ movdqa xmm4,XMMWORD[r10]
+ pxor xmm0,xmm1
+ movdqa xmm1,XMMWORD[16+r10]
+
+DB 102,15,56,0,226
+DB 102,15,56,0,197
+DB 102,15,56,0,203
+ pxor xmm0,xmm4
+ movdqa xmm4,XMMWORD[32+r10]
+ pxor xmm0,xmm1
+ movdqa xmm1,XMMWORD[48+r10]
+
+DB 102,15,56,0,226
+DB 102,15,56,0,197
+DB 102,15,56,0,203
+ pxor xmm0,xmm4
+ movdqa xmm4,XMMWORD[64+r10]
+ pxor xmm0,xmm1
+ movdqa xmm1,XMMWORD[80+r10]
+
+DB 102,15,56,0,226
+DB 102,15,56,0,197
+DB 102,15,56,0,203
+ pxor xmm0,xmm4
+ add r9,16
+DB 102,15,58,15,237,12
+ pxor xmm0,xmm1
+ sub rax,1
+
+$L$dec_entry:
+
+ movdqa xmm1,xmm9
+ pandn xmm1,xmm0
+ movdqa xmm2,xmm11
+ psrld xmm1,4
+ pand xmm0,xmm9
+DB 102,15,56,0,208
+ movdqa xmm3,xmm10
+ pxor xmm0,xmm1
+DB 102,15,56,0,217
+ movdqa xmm4,xmm10
+ pxor xmm3,xmm2
+DB 102,15,56,0,224
+ pxor xmm4,xmm2
+ movdqa xmm2,xmm10
+DB 102,15,56,0,211
+ movdqa xmm3,xmm10
+ pxor xmm2,xmm0
+DB 102,15,56,0,220
+ movdqu xmm0,XMMWORD[r9]
+ pxor xmm3,xmm1
+ jnz NEAR $L$dec_loop
+
+
+ movdqa xmm4,XMMWORD[96+r10]
+DB 102,15,56,0,226
+ pxor xmm4,xmm0
+ movdqa xmm0,XMMWORD[112+r10]
+ movdqa xmm2,XMMWORD[((-352))+r11]
+DB 102,15,56,0,195
+ pxor xmm0,xmm4
+DB 102,15,56,0,194
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_schedule_core:
+
+
+
+
+
+
+ call _vpaes_preheat
+ movdqa xmm8,XMMWORD[$L$k_rcon]
+ movdqu xmm0,XMMWORD[rdi]
+
+
+ movdqa xmm3,xmm0
+ lea r11,[$L$k_ipt]
+ call _vpaes_schedule_transform
+ movdqa xmm7,xmm0
+
+ lea r10,[$L$k_sr]
+ test rcx,rcx
+ jnz NEAR $L$schedule_am_decrypting
+
+
+ movdqu XMMWORD[rdx],xmm0
+ jmp NEAR $L$schedule_go
+
+$L$schedule_am_decrypting:
+
+ movdqa xmm1,XMMWORD[r10*1+r8]
+DB 102,15,56,0,217
+ movdqu XMMWORD[rdx],xmm3
+ xor r8,0x30
+
+$L$schedule_go:
+ cmp esi,192
+ ja NEAR $L$schedule_256
+ je NEAR $L$schedule_192
+
+
+
+
+
+
+
+
+
+
+$L$schedule_128:
+ mov esi,10
+
+$L$oop_schedule_128:
+ call _vpaes_schedule_round
+ dec rsi
+ jz NEAR $L$schedule_mangle_last
+ call _vpaes_schedule_mangle
+ jmp NEAR $L$oop_schedule_128
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+$L$schedule_192:
+ movdqu xmm0,XMMWORD[8+rdi]
+ call _vpaes_schedule_transform
+ movdqa xmm6,xmm0
+ pxor xmm4,xmm4
+ movhlps xmm6,xmm4
+ mov esi,4
+
+$L$oop_schedule_192:
+ call _vpaes_schedule_round
+DB 102,15,58,15,198,8
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_192_smear
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_round
+ dec rsi
+ jz NEAR $L$schedule_mangle_last
+ call _vpaes_schedule_mangle
+ call _vpaes_schedule_192_smear
+ jmp NEAR $L$oop_schedule_192
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+$L$schedule_256:
+ movdqu xmm0,XMMWORD[16+rdi]
+ call _vpaes_schedule_transform
+ mov esi,7
+
+$L$oop_schedule_256:
+ call _vpaes_schedule_mangle
+ movdqa xmm6,xmm0
+
+
+ call _vpaes_schedule_round
+ dec rsi
+ jz NEAR $L$schedule_mangle_last
+ call _vpaes_schedule_mangle
+
+
+ pshufd xmm0,xmm0,0xFF
+ movdqa xmm5,xmm7
+ movdqa xmm7,xmm6
+ call _vpaes_schedule_low_round
+ movdqa xmm7,xmm5
+
+ jmp NEAR $L$oop_schedule_256
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+$L$schedule_mangle_last:
+
+ lea r11,[$L$k_deskew]
+ test rcx,rcx
+ jnz NEAR $L$schedule_mangle_last_dec
+
+
+ movdqa xmm1,XMMWORD[r10*1+r8]
+DB 102,15,56,0,193
+ lea r11,[$L$k_opt]
+ add rdx,32
+
+$L$schedule_mangle_last_dec:
+ add rdx,-16
+ pxor xmm0,XMMWORD[$L$k_s63]
+ call _vpaes_schedule_transform
+ movdqu XMMWORD[rdx],xmm0
+
+
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ pxor xmm6,xmm6
+ pxor xmm7,xmm7
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_schedule_192_smear:
+
+ pshufd xmm1,xmm6,0x80
+ pshufd xmm0,xmm7,0xFE
+ pxor xmm6,xmm1
+ pxor xmm1,xmm1
+ pxor xmm6,xmm0
+ movdqa xmm0,xmm6
+ movhlps xmm6,xmm1
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_schedule_round:
+
+
+ pxor xmm1,xmm1
+DB 102,65,15,58,15,200,15
+DB 102,69,15,58,15,192,15
+ pxor xmm7,xmm1
+
+
+ pshufd xmm0,xmm0,0xFF
+DB 102,15,58,15,192,1
+
+
+
+
+_vpaes_schedule_low_round:
+
+ movdqa xmm1,xmm7
+ pslldq xmm7,4
+ pxor xmm7,xmm1
+ movdqa xmm1,xmm7
+ pslldq xmm7,8
+ pxor xmm7,xmm1
+ pxor xmm7,XMMWORD[$L$k_s63]
+
+
+ movdqa xmm1,xmm9
+ pandn xmm1,xmm0
+ psrld xmm1,4
+ pand xmm0,xmm9
+ movdqa xmm2,xmm11
+DB 102,15,56,0,208
+ pxor xmm0,xmm1
+ movdqa xmm3,xmm10
+DB 102,15,56,0,217
+ pxor xmm3,xmm2
+ movdqa xmm4,xmm10
+DB 102,15,56,0,224
+ pxor xmm4,xmm2
+ movdqa xmm2,xmm10
+DB 102,15,56,0,211
+ pxor xmm2,xmm0
+ movdqa xmm3,xmm10
+DB 102,15,56,0,220
+ pxor xmm3,xmm1
+ movdqa xmm4,xmm13
+DB 102,15,56,0,226
+ movdqa xmm0,xmm12
+DB 102,15,56,0,195
+ pxor xmm0,xmm4
+
+
+ pxor xmm0,xmm7
+ movdqa xmm7,xmm0
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_schedule_transform:
+
+ movdqa xmm1,xmm9
+ pandn xmm1,xmm0
+ psrld xmm1,4
+ pand xmm0,xmm9
+ movdqa xmm2,XMMWORD[r11]
+DB 102,15,56,0,208
+ movdqa xmm0,XMMWORD[16+r11]
+DB 102,15,56,0,193
+ pxor xmm0,xmm2
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_schedule_mangle:
+
+ movdqa xmm4,xmm0
+ movdqa xmm5,XMMWORD[$L$k_mc_forward]
+ test rcx,rcx
+ jnz NEAR $L$schedule_mangle_dec
+
+
+ add rdx,16
+ pxor xmm4,XMMWORD[$L$k_s63]
+DB 102,15,56,0,229
+ movdqa xmm3,xmm4
+DB 102,15,56,0,229
+ pxor xmm3,xmm4
+DB 102,15,56,0,229
+ pxor xmm3,xmm4
+
+ jmp NEAR $L$schedule_mangle_both
+ALIGN 16
+$L$schedule_mangle_dec:
+
+ lea r11,[$L$k_dksd]
+ movdqa xmm1,xmm9
+ pandn xmm1,xmm4
+ psrld xmm1,4
+ pand xmm4,xmm9
+
+ movdqa xmm2,XMMWORD[r11]
+DB 102,15,56,0,212
+ movdqa xmm3,XMMWORD[16+r11]
+DB 102,15,56,0,217
+ pxor xmm3,xmm2
+DB 102,15,56,0,221
+
+ movdqa xmm2,XMMWORD[32+r11]
+DB 102,15,56,0,212
+ pxor xmm2,xmm3
+ movdqa xmm3,XMMWORD[48+r11]
+DB 102,15,56,0,217
+ pxor xmm3,xmm2
+DB 102,15,56,0,221
+
+ movdqa xmm2,XMMWORD[64+r11]
+DB 102,15,56,0,212
+ pxor xmm2,xmm3
+ movdqa xmm3,XMMWORD[80+r11]
+DB 102,15,56,0,217
+ pxor xmm3,xmm2
+DB 102,15,56,0,221
+
+ movdqa xmm2,XMMWORD[96+r11]
+DB 102,15,56,0,212
+ pxor xmm2,xmm3
+ movdqa xmm3,XMMWORD[112+r11]
+DB 102,15,56,0,217
+ pxor xmm3,xmm2
+
+ add rdx,-16
+
+$L$schedule_mangle_both:
+ movdqa xmm1,XMMWORD[r10*1+r8]
+DB 102,15,56,0,217
+ add r8,-16
+ and r8,0x30
+ movdqu XMMWORD[rdx],xmm3
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+global vpaes_set_encrypt_key
+
+ALIGN 16
+vpaes_set_encrypt_key:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_vpaes_set_encrypt_key:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-184))+rsp]
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$enc_key_body:
+ mov eax,esi
+ shr eax,5
+ add eax,5
+ mov DWORD[240+rdx],eax
+
+ mov ecx,0
+ mov r8d,0x30
+ call _vpaes_schedule_core
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps xmm15,XMMWORD[160+rsp]
+ lea rsp,[184+rsp]
+$L$enc_key_epilogue:
+ xor eax,eax
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_vpaes_set_encrypt_key:
+
+global vpaes_set_decrypt_key
+
+ALIGN 16
+vpaes_set_decrypt_key:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_vpaes_set_decrypt_key:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-184))+rsp]
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$dec_key_body:
+ mov eax,esi
+ shr eax,5
+ add eax,5
+ mov DWORD[240+rdx],eax
+ shl eax,4
+ lea rdx,[16+rax*1+rdx]
+
+ mov ecx,1
+ mov r8d,esi
+ shr r8d,1
+ and r8d,32
+ xor r8d,32
+ call _vpaes_schedule_core
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps xmm15,XMMWORD[160+rsp]
+ lea rsp,[184+rsp]
+$L$dec_key_epilogue:
+ xor eax,eax
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_vpaes_set_decrypt_key:
+
+global vpaes_encrypt
+
+ALIGN 16
+vpaes_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_vpaes_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-184))+rsp]
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$enc_body:
+ movdqu xmm0,XMMWORD[rdi]
+ call _vpaes_preheat
+ call _vpaes_encrypt_core
+ movdqu XMMWORD[rsi],xmm0
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps xmm15,XMMWORD[160+rsp]
+ lea rsp,[184+rsp]
+$L$enc_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_vpaes_encrypt:
+
+global vpaes_decrypt
+
+ALIGN 16
+vpaes_decrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_vpaes_decrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+
+DB 243,15,30,250
+ lea rsp,[((-184))+rsp]
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$dec_body:
+ movdqu xmm0,XMMWORD[rdi]
+ call _vpaes_preheat
+ call _vpaes_decrypt_core
+ movdqu XMMWORD[rsi],xmm0
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps xmm15,XMMWORD[160+rsp]
+ lea rsp,[184+rsp]
+$L$dec_epilogue:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_vpaes_decrypt:
+global vpaes_cbc_encrypt
+
+ALIGN 16
+vpaes_cbc_encrypt:
+ mov QWORD[8+rsp],rdi ;WIN64 prologue
+ mov QWORD[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_vpaes_cbc_encrypt:
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD[40+rsp]
+ mov r9,QWORD[48+rsp]
+
+
+
+DB 243,15,30,250
+ xchg rdx,rcx
+ sub rcx,16
+ jc NEAR $L$cbc_abort
+ lea rsp,[((-184))+rsp]
+ movaps XMMWORD[16+rsp],xmm6
+ movaps XMMWORD[32+rsp],xmm7
+ movaps XMMWORD[48+rsp],xmm8
+ movaps XMMWORD[64+rsp],xmm9
+ movaps XMMWORD[80+rsp],xmm10
+ movaps XMMWORD[96+rsp],xmm11
+ movaps XMMWORD[112+rsp],xmm12
+ movaps XMMWORD[128+rsp],xmm13
+ movaps XMMWORD[144+rsp],xmm14
+ movaps XMMWORD[160+rsp],xmm15
+$L$cbc_body:
+ movdqu xmm6,XMMWORD[r8]
+ sub rsi,rdi
+ call _vpaes_preheat
+ cmp r9d,0
+ je NEAR $L$cbc_dec_loop
+ jmp NEAR $L$cbc_enc_loop
+ALIGN 16
+$L$cbc_enc_loop:
+ movdqu xmm0,XMMWORD[rdi]
+ pxor xmm0,xmm6
+ call _vpaes_encrypt_core
+ movdqa xmm6,xmm0
+ movdqu XMMWORD[rdi*1+rsi],xmm0
+ lea rdi,[16+rdi]
+ sub rcx,16
+ jnc NEAR $L$cbc_enc_loop
+ jmp NEAR $L$cbc_done
+ALIGN 16
+$L$cbc_dec_loop:
+ movdqu xmm0,XMMWORD[rdi]
+ movdqa xmm7,xmm0
+ call _vpaes_decrypt_core
+ pxor xmm0,xmm6
+ movdqa xmm6,xmm7
+ movdqu XMMWORD[rdi*1+rsi],xmm0
+ lea rdi,[16+rdi]
+ sub rcx,16
+ jnc NEAR $L$cbc_dec_loop
+$L$cbc_done:
+ movdqu XMMWORD[r8],xmm6
+ movaps xmm6,XMMWORD[16+rsp]
+ movaps xmm7,XMMWORD[32+rsp]
+ movaps xmm8,XMMWORD[48+rsp]
+ movaps xmm9,XMMWORD[64+rsp]
+ movaps xmm10,XMMWORD[80+rsp]
+ movaps xmm11,XMMWORD[96+rsp]
+ movaps xmm12,XMMWORD[112+rsp]
+ movaps xmm13,XMMWORD[128+rsp]
+ movaps xmm14,XMMWORD[144+rsp]
+ movaps xmm15,XMMWORD[160+rsp]
+ lea rsp,[184+rsp]
+$L$cbc_epilogue:
+$L$cbc_abort:
+ mov rdi,QWORD[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD[16+rsp]
+ DB 0F3h,0C3h ;repret
+
+$L$SEH_end_vpaes_cbc_encrypt:
+
+
+
+
+
+
+
+ALIGN 16
+_vpaes_preheat:
+
+ lea r10,[$L$k_s0F]
+ movdqa xmm10,XMMWORD[((-32))+r10]
+ movdqa xmm11,XMMWORD[((-16))+r10]
+ movdqa xmm9,XMMWORD[r10]
+ movdqa xmm13,XMMWORD[48+r10]
+ movdqa xmm12,XMMWORD[64+r10]
+ movdqa xmm15,XMMWORD[80+r10]
+ movdqa xmm14,XMMWORD[96+r10]
+ DB 0F3h,0C3h ;repret
+
+
+
+
+
+
+
+
+ALIGN 64
+_vpaes_consts:
+$L$k_inv:
+ DQ 0x0E05060F0D080180,0x040703090A0B0C02
+ DQ 0x01040A060F0B0780,0x030D0E0C02050809
+
+$L$k_s0F:
+ DQ 0x0F0F0F0F0F0F0F0F,0x0F0F0F0F0F0F0F0F
+
+$L$k_ipt:
+ DQ 0xC2B2E8985A2A7000,0xCABAE09052227808
+ DQ 0x4C01307D317C4D00,0xCD80B1FCB0FDCC81
+
+$L$k_sb1:
+ DQ 0xB19BE18FCB503E00,0xA5DF7A6E142AF544
+ DQ 0x3618D415FAE22300,0x3BF7CCC10D2ED9EF
+$L$k_sb2:
+ DQ 0xE27A93C60B712400,0x5EB7E955BC982FCD
+ DQ 0x69EB88400AE12900,0xC2A163C8AB82234A
+$L$k_sbo:
+ DQ 0xD0D26D176FBDC700,0x15AABF7AC502A878
+ DQ 0xCFE474A55FBB6A00,0x8E1E90D1412B35FA
+
+$L$k_mc_forward:
+ DQ 0x0407060500030201,0x0C0F0E0D080B0A09
+ DQ 0x080B0A0904070605,0x000302010C0F0E0D
+ DQ 0x0C0F0E0D080B0A09,0x0407060500030201
+ DQ 0x000302010C0F0E0D,0x080B0A0904070605
+
+$L$k_mc_backward:
+ DQ 0x0605040702010003,0x0E0D0C0F0A09080B
+ DQ 0x020100030E0D0C0F,0x0A09080B06050407
+ DQ 0x0E0D0C0F0A09080B,0x0605040702010003
+ DQ 0x0A09080B06050407,0x020100030E0D0C0F
+
+$L$k_sr:
+ DQ 0x0706050403020100,0x0F0E0D0C0B0A0908
+ DQ 0x030E09040F0A0500,0x0B06010C07020D08
+ DQ 0x0F060D040B020900,0x070E050C030A0108
+ DQ 0x0B0E0104070A0D00,0x0306090C0F020508
+
+$L$k_rcon:
+ DQ 0x1F8391B9AF9DEEB6,0x702A98084D7C7D81
+
+$L$k_s63:
+ DQ 0x5B5B5B5B5B5B5B5B,0x5B5B5B5B5B5B5B5B
+
+$L$k_opt:
+ DQ 0xFF9F4929D6B66000,0xF7974121DEBE6808
+ DQ 0x01EDBD5150BCEC00,0xE10D5DB1B05C0CE0
+
+$L$k_deskew:
+ DQ 0x07E4A34047A4E300,0x1DFEB95A5DBEF91A
+ DQ 0x5F36B5DC83EA6900,0x2841C2ABF49D1E77
+
+
+
+
+
+$L$k_dksd:
+ DQ 0xFEB91A5DA3E44700,0x0740E3A45A1DBEF9
+ DQ 0x41C277F4B5368300,0x5FDC69EAAB289D1E
+$L$k_dksb:
+ DQ 0x9A4FCA1F8550D500,0x03D653861CC94C99
+ DQ 0x115BEDA7B6FC4A00,0xD993256F7E3482C8
+$L$k_dkse:
+ DQ 0xD5031CCA1FC9D600,0x53859A4C994F5086
+ DQ 0xA23196054FDC7BE8,0xCD5EF96A20B31487
+$L$k_dks9:
+ DQ 0xB6116FC87ED9A700,0x4AED933482255BFC
+ DQ 0x4576516227143300,0x8BB89FACE9DAFDCE
+
+
+
+
+
+$L$k_dipt:
+ DQ 0x0F505B040B545F00,0x154A411E114E451A
+ DQ 0x86E383E660056500,0x12771772F491F194
+
+$L$k_dsb9:
+ DQ 0x851C03539A86D600,0xCAD51F504F994CC9
+ DQ 0xC03B1789ECD74900,0x725E2C9EB2FBA565
+$L$k_dsbd:
+ DQ 0x7D57CCDFE6B1A200,0xF56E9B13882A4439
+ DQ 0x3CE2FAF724C6CB00,0x2931180D15DEEFD3
+$L$k_dsbb:
+ DQ 0xD022649296B44200,0x602646F6B0F2D404
+ DQ 0xC19498A6CD596700,0xF3FF0C3E3255AA6B
+$L$k_dsbe:
+ DQ 0x46F2929626D4D000,0x2242600464B4F6B0
+ DQ 0x0C55A6CDFFAAC100,0x9467F36B98593E32
+$L$k_dsbo:
+ DQ 0x1387EA537EF94000,0xC7AA6DB9D4943E2D
+ DQ 0x12D7560F93441D00,0xCA4B8159D8C58E9C
+DB 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
+DB 111,110,32,65,69,83,32,102,111,114,32,120,56,54,95,54
+DB 52,47,83,83,83,69,51,44,32,77,105,107,101,32,72,97
+DB 109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32
+DB 85,110,105,118,101,114,115,105,116,121,41,0
+ALIGN 64
+
+EXTERN __imp_RtlVirtualUnwind
+
+ALIGN 16
+se_handler:
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD[120+r8]
+ mov rbx,QWORD[248+r8]
+
+ mov rsi,QWORD[8+r9]
+ mov r11,QWORD[56+r9]
+
+ mov r10d,DWORD[r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jb NEAR $L$in_prologue
+
+ mov rax,QWORD[152+r8]
+
+ mov r10d,DWORD[4+r11]
+ lea r10,[r10*1+rsi]
+ cmp rbx,r10
+ jae NEAR $L$in_prologue
+
+ lea rsi,[16+rax]
+ lea rdi,[512+r8]
+ mov ecx,20
+ DD 0xa548f3fc
+ lea rax,[184+rax]
+
+$L$in_prologue:
+ mov rdi,QWORD[8+rax]
+ mov rsi,QWORD[16+rax]
+ mov QWORD[152+r8],rax
+ mov QWORD[168+r8],rsi
+ mov QWORD[176+r8],rdi
+
+ mov rdi,QWORD[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0xa548f3fc
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD[8+rsi]
+ mov r8,QWORD[rsi]
+ mov r9,QWORD[16+rsi]
+ mov r10,QWORD[40+rsi]
+ lea r11,[56+rsi]
+ lea r12,[24+rsi]
+ mov QWORD[32+rsp],r10
+ mov QWORD[40+rsp],r11
+ mov QWORD[48+rsp],r12
+ mov QWORD[56+rsp],rcx
+ call QWORD[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+
+
+section .pdata rdata align=4
+ALIGN 4
+ DD $L$SEH_begin_vpaes_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_end_vpaes_set_encrypt_key wrt ..imagebase
+ DD $L$SEH_info_vpaes_set_encrypt_key wrt ..imagebase
+
+ DD $L$SEH_begin_vpaes_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_end_vpaes_set_decrypt_key wrt ..imagebase
+ DD $L$SEH_info_vpaes_set_decrypt_key wrt ..imagebase
+
+ DD $L$SEH_begin_vpaes_encrypt wrt ..imagebase
+ DD $L$SEH_end_vpaes_encrypt wrt ..imagebase
+ DD $L$SEH_info_vpaes_encrypt wrt ..imagebase
+
+ DD $L$SEH_begin_vpaes_decrypt wrt ..imagebase
+ DD $L$SEH_end_vpaes_decrypt wrt ..imagebase
+ DD $L$SEH_info_vpaes_decrypt wrt ..imagebase
+
+ DD $L$SEH_begin_vpaes_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_end_vpaes_cbc_encrypt wrt ..imagebase
+ DD $L$SEH_info_vpaes_cbc_encrypt wrt ..imagebase
+
+section .xdata rdata align=8
+ALIGN 8
+$L$SEH_info_vpaes_set_encrypt_key:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$enc_key_body wrt ..imagebase,$L$enc_key_epilogue wrt ..imagebase
+$L$SEH_info_vpaes_set_decrypt_key:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$dec_key_body wrt ..imagebase,$L$dec_key_epilogue wrt ..imagebase
+$L$SEH_info_vpaes_encrypt:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$enc_body wrt ..imagebase,$L$enc_epilogue wrt ..imagebase
+$L$SEH_info_vpaes_decrypt:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$dec_body wrt ..imagebase,$L$dec_epilogue wrt ..imagebase
+$L$SEH_info_vpaes_cbc_encrypt:
+DB 9,0,0,0
+ DD se_handler wrt ..imagebase
+ DD $L$cbc_body wrt ..imagebase,$L$cbc_epilogue wrt ..imagebase