Revert "Roll src/third_party/boringssl/src cb3f04f58..98b4cdba1"

This reverts commit f7a4166c3ee0359f1a7ce96563dd8384376fb484.

Reason for revert: This change results in consistent test failures on Windows and Mac (https://crbug.com/1042657).

Original change's description:
> Roll src/third_party/boringssl/src cb3f04f58..98b4cdba1
> 
> https://boringssl.googlesource.com/boringssl/+log/cb3f04f584a53844546aaa32e949534c453185aa..98b4cdba1e4ede26d845bcae8185ddb5b2feea93
> 
> The following commits have update notes:
>   0df6edff4 Split BN_prime_checks into two constants for generation and validation.
> 
> Bug: none
> Change-Id: Ia16ddb85756277144fc0ea410fb2b31840bf9332
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1998162
> Commit-Queue: David Benjamin <davidben@chromium.org>
> Auto-Submit: David Benjamin <davidben@chromium.org>
> Reviewed-by: Steven Valdez <svaldez@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#732261}

TBR=davidben@chromium.org,svaldez@chromium.org

Change-Id: I79ac0bbab74553526588bf2c8d5690f25bac723c
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 1042657
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2003851
Reviewed-by: Jan Wilken Dörrie <jdoerrie@chromium.org>
Commit-Queue: Jan Wilken Dörrie <jdoerrie@chromium.org>
Cr-Original-Commit-Position: refs/heads/master@{#732365}
Cr-Mirrored-From: https://chromium.googlesource.com/chromium/src
Cr-Mirrored-Commit: a28d963e68e70628e20408e9e1ac478a1ff01931
diff --git a/BUILD.generated.gni b/BUILD.generated.gni
index e399985..f9da444 100644
--- a/BUILD.generated.gni
+++ b/BUILD.generated.gni
@@ -417,6 +417,7 @@
 
 crypto_sources_ios_arm = [
   "ios-arm/crypto/chacha/chacha-armv4.S",
+  "ios-arm/crypto/fipsmodule/aes-armv4.S",
   "ios-arm/crypto/fipsmodule/aesv8-armx32.S",
   "ios-arm/crypto/fipsmodule/armv4-mont.S",
   "ios-arm/crypto/fipsmodule/bsaes-armv7.S",
@@ -444,6 +445,7 @@
 
 crypto_sources_linux_arm = [
   "linux-arm/crypto/chacha/chacha-armv4.S",
+  "linux-arm/crypto/fipsmodule/aes-armv4.S",
   "linux-arm/crypto/fipsmodule/aesv8-armx32.S",
   "linux-arm/crypto/fipsmodule/armv4-mont.S",
   "linux-arm/crypto/fipsmodule/bsaes-armv7.S",
@@ -466,6 +468,7 @@
 
 crypto_sources_linux_x86 = [
   "linux-x86/crypto/chacha/chacha-x86.S",
+  "linux-x86/crypto/fipsmodule/aes-586.S",
   "linux-x86/crypto/fipsmodule/aesni-x86.S",
   "linux-x86/crypto/fipsmodule/bn-586.S",
   "linux-x86/crypto/fipsmodule/co-586.S",
@@ -484,6 +487,7 @@
   "linux-x86_64/crypto/chacha/chacha-x86_64.S",
   "linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S",
   "linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S",
+  "linux-x86_64/crypto/fipsmodule/aes-x86_64.S",
   "linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S",
   "linux-x86_64/crypto/fipsmodule/aesni-x86_64.S",
   "linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S",
@@ -505,6 +509,7 @@
 
 crypto_sources_mac_x86 = [
   "mac-x86/crypto/chacha/chacha-x86.S",
+  "mac-x86/crypto/fipsmodule/aes-586.S",
   "mac-x86/crypto/fipsmodule/aesni-x86.S",
   "mac-x86/crypto/fipsmodule/bn-586.S",
   "mac-x86/crypto/fipsmodule/co-586.S",
@@ -523,6 +528,7 @@
   "mac-x86_64/crypto/chacha/chacha-x86_64.S",
   "mac-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S",
   "mac-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S",
+  "mac-x86_64/crypto/fipsmodule/aes-x86_64.S",
   "mac-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S",
   "mac-x86_64/crypto/fipsmodule/aesni-x86_64.S",
   "mac-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S",
@@ -543,6 +549,7 @@
 
 crypto_sources_win_x86 = [
   "win-x86/crypto/chacha/chacha-x86.asm",
+  "win-x86/crypto/fipsmodule/aes-586.asm",
   "win-x86/crypto/fipsmodule/aesni-x86.asm",
   "win-x86/crypto/fipsmodule/bn-586.asm",
   "win-x86/crypto/fipsmodule/co-586.asm",
@@ -561,6 +568,7 @@
   "win-x86_64/crypto/chacha/chacha-x86_64.asm",
   "win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.asm",
   "win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.asm",
+  "win-x86_64/crypto/fipsmodule/aes-x86_64.asm",
   "win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.asm",
   "win-x86_64/crypto/fipsmodule/aesni-x86_64.asm",
   "win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.asm",
diff --git a/BUILD.generated_tests.gni b/BUILD.generated_tests.gni
index 8670dfd..3a0ea64 100644
--- a/BUILD.generated_tests.gni
+++ b/BUILD.generated_tests.gni
@@ -18,7 +18,6 @@
   "src/ssl/test/fuzzer.h",
   "src/ssl/test/fuzzer_tags.h",
   "src/ssl/test/handshake_util.h",
-  "src/ssl/test/mock_quic_transport.h",
   "src/ssl/test/packeted_bio.h",
   "src/ssl/test/settings_writer.h",
   "src/ssl/test/test_config.h",
@@ -166,39 +165,8 @@
   "src/third_party/wycheproof_testvectors/ecdsa_secp384r1_sha512_test.txt",
   "src/third_party/wycheproof_testvectors/ecdsa_secp521r1_sha512_test.txt",
   "src/third_party/wycheproof_testvectors/eddsa_test.txt",
-  "src/third_party/wycheproof_testvectors/hkdf_sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/hkdf_sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/hkdf_sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/hkdf_sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/hmac_sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/hmac_sha224_test.txt",
-  "src/third_party/wycheproof_testvectors/hmac_sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/hmac_sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/hmac_sha512_test.txt",
   "src/third_party/wycheproof_testvectors/kw_test.txt",
   "src/third_party/wycheproof_testvectors/kwp_test.txt",
-  "src/third_party/wycheproof_testvectors/primality_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha1_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha224_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha224_mgf1sha224_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha256_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha256_mgf1sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha384_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha384_mgf1sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha512_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_2048_sha512_mgf1sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_3072_sha256_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_3072_sha256_mgf1sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_3072_sha512_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_3072_sha512_mgf1sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_4096_sha256_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_4096_sha256_mgf1sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_4096_sha512_mgf1sha1_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_4096_sha512_mgf1sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_oaep_misc_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_pkcs1_2048_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_pkcs1_3072_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_pkcs1_4096_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_pss_2048_sha1_mgf1_20_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_pss_2048_sha256_mgf1_0_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_pss_2048_sha256_mgf1_32_test.txt",
@@ -206,19 +174,8 @@
   "src/third_party/wycheproof_testvectors/rsa_pss_4096_sha256_mgf1_32_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_pss_4096_sha512_mgf1_32_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_pss_misc_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_sig_gen_misc_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_2048_sha224_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_2048_sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_2048_sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_2048_sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_3072_sha256_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_3072_sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_3072_sha512_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_4096_sha384_test.txt",
-  "src/third_party/wycheproof_testvectors/rsa_signature_4096_sha512_test.txt",
   "src/third_party/wycheproof_testvectors/rsa_signature_test.txt",
   "src/third_party/wycheproof_testvectors/x25519_test.txt",
-  "src/third_party/wycheproof_testvectors/xchacha20_poly1305_test.txt",
 ]
 
 ssl_test_sources = [
diff --git a/ios-arm/crypto/fipsmodule/aes-armv4.S b/ios-arm/crypto/fipsmodule/aes-armv4.S
new file mode 100644
index 0000000..63e2ec7
--- /dev/null
+++ b/ios-arm/crypto/fipsmodule/aes-armv4.S
@@ -0,0 +1,1233 @@
+// This file is generated from a similarly-named Perl script in the BoringSSL
+// source tree. Do not edit by hand.
+
+#if !defined(__has_feature)
+#define __has_feature(x) 0
+#endif
+#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
+#define OPENSSL_NO_ASM
+#endif
+
+#if !defined(OPENSSL_NO_ASM)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+@ Copyright 2007-2016 The OpenSSL Project Authors. All Rights Reserved.
+@
+@ Licensed under the OpenSSL license (the "License").  You may not use
+@ this file except in compliance with the License.  You can obtain a copy
+@ in the file LICENSE in the source distribution or at
+@ https://www.openssl.org/source/license.html
+
+
+@ ====================================================================
+@ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+@ project. The module is, however, dual licensed under OpenSSL and
+@ CRYPTOGAMS licenses depending on where you obtain it. For further
+@ details see http://www.openssl.org/~appro/cryptogams/.
+@ ====================================================================
+
+@ AES for ARMv4
+
+@ January 2007.
+@
+@ Code uses single 1K S-box and is >2 times faster than code generated
+@ by gcc-3.4.1. This is thanks to unique feature of ARMv4 ISA, which
+@ allows to merge logical or arithmetic operation with shift or rotate
+@ in one instruction and emit combined result every cycle. The module
+@ is endian-neutral. The performance is ~42 cycles/byte for 128-bit
+@ key [on single-issue Xscale PXA250 core].
+
+@ May 2007.
+@
+@ AES_set_[en|de]crypt_key is added.
+
+@ July 2010.
+@
+@ Rescheduling for dual-issue pipeline resulted in 12% improvement on
+@ Cortex A8 core and ~25 cycles per byte processed with 128-bit key.
+
+@ February 2011.
+@
+@ Profiler-assisted and platform-specific optimization resulted in 16%
+@ improvement on Cortex A8 core and ~21.5 cycles per byte.
+
+#ifndef __KERNEL__
+# include <openssl/arm_arch.h>
+#else
+# define __ARM_ARCH__ __LINUX_ARM_ARCH__
+#endif
+
+@ Silence ARMv8 deprecated IT instruction warnings. This file is used by both
+@ ARMv7 and ARMv8 processors and does not use ARMv8 instructions. (ARMv8 AES
+@ instructions are in aesv8-armx.pl.)
+
+
+.text
+#if defined(__thumb2__) && !defined(__APPLE__)
+.syntax	unified
+.thumb
+#else
+.code	32
+#undef __thumb2__
+#endif
+
+
+.align	5
+AES_Te:
+.word	0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d
+.word	0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554
+.word	0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d
+.word	0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a
+.word	0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87
+.word	0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b
+.word	0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea
+.word	0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b
+.word	0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a
+.word	0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f
+.word	0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108
+.word	0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f
+.word	0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e
+.word	0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5
+.word	0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d
+.word	0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f
+.word	0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e
+.word	0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb
+.word	0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce
+.word	0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497
+.word	0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c
+.word	0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed
+.word	0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b
+.word	0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a
+.word	0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16
+.word	0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594
+.word	0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81
+.word	0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3
+.word	0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a
+.word	0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504
+.word	0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163
+.word	0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d
+.word	0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f
+.word	0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739
+.word	0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47
+.word	0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395
+.word	0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f
+.word	0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883
+.word	0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c
+.word	0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76
+.word	0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e
+.word	0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4
+.word	0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6
+.word	0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b
+.word	0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7
+.word	0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0
+.word	0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25
+.word	0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818
+.word	0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72
+.word	0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651
+.word	0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21
+.word	0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85
+.word	0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa
+.word	0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12
+.word	0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0
+.word	0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9
+.word	0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133
+.word	0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7
+.word	0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920
+.word	0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a
+.word	0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17
+.word	0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8
+.word	0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11
+.word	0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a
+@ Te4[256]
+.byte	0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5
+.byte	0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76
+.byte	0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0
+.byte	0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0
+.byte	0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc
+.byte	0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15
+.byte	0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a
+.byte	0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75
+.byte	0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0
+.byte	0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84
+.byte	0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b
+.byte	0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf
+.byte	0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85
+.byte	0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8
+.byte	0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5
+.byte	0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2
+.byte	0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17
+.byte	0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73
+.byte	0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88
+.byte	0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb
+.byte	0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c
+.byte	0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79
+.byte	0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9
+.byte	0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08
+.byte	0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6
+.byte	0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a
+.byte	0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e
+.byte	0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e
+.byte	0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94
+.byte	0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf
+.byte	0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68
+.byte	0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
+@ rcon[]
+.word	0x01000000, 0x02000000, 0x04000000, 0x08000000
+.word	0x10000000, 0x20000000, 0x40000000, 0x80000000
+.word	0x1B000000, 0x36000000, 0, 0, 0, 0, 0, 0
+
+
+@ void aes_nohw_encrypt(const unsigned char *in, unsigned char *out,
+@ 		                  const AES_KEY *key) {
+.globl	_aes_nohw_encrypt
+.private_extern	_aes_nohw_encrypt
+#ifdef __thumb2__
+.thumb_func	_aes_nohw_encrypt
+#endif
+.align	5
+_aes_nohw_encrypt:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ _aes_nohw_encrypt
+#else
+	adr	r3,.
+#endif
+	stmdb	sp!,{r1,r4-r12,lr}
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Te
+#else
+	sub	r10,r3,#_aes_nohw_encrypt-AES_Te	@ Te
+#endif
+	mov	r12,r0		@ inp
+	mov	r11,r2
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	orr	r3,r3,r5,lsl#16
+	orr	r3,r3,r6,lsl#24
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+#endif
+	bl	_armv4_AES_encrypt
+
+	ldr	r12,[sp],#4		@ pop out
+#if __ARM_ARCH__>=7
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r12,#0]
+	str	r1,[r12,#4]
+	str	r2,[r12,#8]
+	str	r3,[r12,#12]
+#else
+	mov	r4,r0,lsr#24		@ write output in endian-neutral
+	mov	r5,r0,lsr#16		@ manner...
+	mov	r6,r0,lsr#8
+	strb	r4,[r12,#0]
+	strb	r5,[r12,#1]
+	mov	r4,r1,lsr#24
+	strb	r6,[r12,#2]
+	mov	r5,r1,lsr#16
+	strb	r0,[r12,#3]
+	mov	r6,r1,lsr#8
+	strb	r4,[r12,#4]
+	strb	r5,[r12,#5]
+	mov	r4,r2,lsr#24
+	strb	r6,[r12,#6]
+	mov	r5,r2,lsr#16
+	strb	r1,[r12,#7]
+	mov	r6,r2,lsr#8
+	strb	r4,[r12,#8]
+	strb	r5,[r12,#9]
+	mov	r4,r3,lsr#24
+	strb	r6,[r12,#10]
+	mov	r5,r3,lsr#16
+	strb	r2,[r12,#11]
+	mov	r6,r3,lsr#8
+	strb	r4,[r12,#12]
+	strb	r5,[r12,#13]
+	strb	r6,[r12,#14]
+	strb	r3,[r12,#15]
+#endif
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+
+
+#ifdef __thumb2__
+.thumb_func	_armv4_AES_encrypt
+#endif
+.align	2
+_armv4_AES_encrypt:
+	str	lr,[sp,#-4]!		@ push lr
+	ldmia	r11!,{r4,r5,r6,r7}
+	eor	r0,r0,r4
+	ldr	r12,[r11,#240-16]
+	eor	r1,r1,r5
+	eor	r2,r2,r6
+	eor	r3,r3,r7
+	sub	r12,r12,#1
+	mov	lr,#255
+
+	and	r7,lr,r0
+	and	r8,lr,r0,lsr#8
+	and	r9,lr,r0,lsr#16
+	mov	r0,r0,lsr#24
+Lenc_loop:
+	ldr	r4,[r10,r7,lsl#2]	@ Te3[s0>>0]
+	and	r7,lr,r1,lsr#16	@ i0
+	ldr	r5,[r10,r8,lsl#2]	@ Te2[s0>>8]
+	and	r8,lr,r1
+	ldr	r6,[r10,r9,lsl#2]	@ Te1[s0>>16]
+	and	r9,lr,r1,lsr#8
+	ldr	r0,[r10,r0,lsl#2]	@ Te0[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldr	r7,[r10,r7,lsl#2]	@ Te1[s1>>16]
+	ldr	r8,[r10,r8,lsl#2]	@ Te3[s1>>0]
+	ldr	r9,[r10,r9,lsl#2]	@ Te2[s1>>8]
+	eor	r0,r0,r7,ror#8
+	ldr	r1,[r10,r1,lsl#2]	@ Te0[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r5,r8,ror#8
+	and	r8,lr,r2,lsr#16	@ i1
+	eor	r6,r6,r9,ror#8
+	and	r9,lr,r2
+	ldr	r7,[r10,r7,lsl#2]	@ Te2[s2>>8]
+	eor	r1,r1,r4,ror#24
+	ldr	r8,[r10,r8,lsl#2]	@ Te1[s2>>16]
+	mov	r2,r2,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Te3[s2>>0]
+	eor	r0,r0,r7,ror#16
+	ldr	r2,[r10,r2,lsl#2]	@ Te0[s2>>24]
+	and	r7,lr,r3		@ i0
+	eor	r1,r1,r8,ror#8
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r6,r9,ror#16
+	and	r9,lr,r3,lsr#16	@ i2
+	ldr	r7,[r10,r7,lsl#2]	@ Te3[s3>>0]
+	eor	r2,r2,r5,ror#16
+	ldr	r8,[r10,r8,lsl#2]	@ Te2[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Te1[s3>>16]
+	eor	r0,r0,r7,ror#24
+	ldr	r7,[r11],#16
+	eor	r1,r1,r8,ror#16
+	ldr	r3,[r10,r3,lsl#2]	@ Te0[s3>>24]
+	eor	r2,r2,r9,ror#8
+	ldr	r4,[r11,#-12]
+	eor	r3,r3,r6,ror#8
+
+	ldr	r5,[r11,#-8]
+	eor	r0,r0,r7
+	ldr	r6,[r11,#-4]
+	and	r7,lr,r0
+	eor	r1,r1,r4
+	and	r8,lr,r0,lsr#8
+	eor	r2,r2,r5
+	and	r9,lr,r0,lsr#16
+	eor	r3,r3,r6
+	mov	r0,r0,lsr#24
+
+	subs	r12,r12,#1
+	bne	Lenc_loop
+
+	add	r10,r10,#2
+
+	ldrb	r4,[r10,r7,lsl#2]	@ Te4[s0>>0]
+	and	r7,lr,r1,lsr#16	@ i0
+	ldrb	r5,[r10,r8,lsl#2]	@ Te4[s0>>8]
+	and	r8,lr,r1
+	ldrb	r6,[r10,r9,lsl#2]	@ Te4[s0>>16]
+	and	r9,lr,r1,lsr#8
+	ldrb	r0,[r10,r0,lsl#2]	@ Te4[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s1>>16]
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s1>>0]
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s1>>8]
+	eor	r0,r7,r0,lsl#8
+	ldrb	r1,[r10,r1,lsl#2]	@ Te4[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r8,r5,lsl#8
+	and	r8,lr,r2,lsr#16	@ i1
+	eor	r6,r9,r6,lsl#8
+	and	r9,lr,r2
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s2>>8]
+	eor	r1,r4,r1,lsl#24
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s2>>16]
+	mov	r2,r2,lsr#24
+
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s2>>0]
+	eor	r0,r7,r0,lsl#8
+	ldrb	r2,[r10,r2,lsl#2]	@ Te4[s2>>24]
+	and	r7,lr,r3		@ i0
+	eor	r1,r1,r8,lsl#16
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r9,r6,lsl#8
+	and	r9,lr,r3,lsr#16	@ i2
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s3>>0]
+	eor	r2,r5,r2,lsl#24
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s3>>16]
+	eor	r0,r7,r0,lsl#8
+	ldr	r7,[r11,#0]
+	ldrb	r3,[r10,r3,lsl#2]	@ Te4[s3>>24]
+	eor	r1,r1,r8,lsl#8
+	ldr	r4,[r11,#4]
+	eor	r2,r2,r9,lsl#16
+	ldr	r5,[r11,#8]
+	eor	r3,r6,r3,lsl#24
+	ldr	r6,[r11,#12]
+
+	eor	r0,r0,r7
+	eor	r1,r1,r4
+	eor	r2,r2,r5
+	eor	r3,r3,r6
+
+	sub	r10,r10,#2
+	ldr	pc,[sp],#4		@ pop and return
+
+
+.globl	_aes_nohw_set_encrypt_key
+.private_extern	_aes_nohw_set_encrypt_key
+#ifdef __thumb2__
+.thumb_func	_aes_nohw_set_encrypt_key
+#endif
+.align	5
+_aes_nohw_set_encrypt_key:
+_armv4_AES_set_encrypt_key:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ _aes_nohw_set_encrypt_key
+#else
+	adr	r3,.
+#endif
+	teq	r0,#0
+#ifdef	__thumb2__
+	itt	eq			@ Thumb2 thing, sanity check in ARM
+#endif
+	moveq	r0,#-1
+	beq	Labrt
+	teq	r2,#0
+#ifdef	__thumb2__
+	itt	eq			@ Thumb2 thing, sanity check in ARM
+#endif
+	moveq	r0,#-1
+	beq	Labrt
+
+	teq	r1,#128
+	beq	Lok
+	teq	r1,#192
+	beq	Lok
+	teq	r1,#256
+#ifdef	__thumb2__
+	itt	ne			@ Thumb2 thing, sanity check in ARM
+#endif
+	movne	r0,#-1
+	bne	Labrt
+
+Lok:	stmdb	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	mov	r12,r0		@ inp
+	mov	lr,r1			@ bits
+	mov	r11,r2			@ key
+
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Te+1024				@ Te4
+#else
+	sub	r10,r3,#_armv4_AES_set_encrypt_key-AES_Te-1024	@ Te4
+#endif
+
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	str	r0,[r11],#16
+	orr	r3,r3,r5,lsl#16
+	str	r1,[r11,#-12]
+	orr	r3,r3,r6,lsl#24
+	str	r2,[r11,#-8]
+	str	r3,[r11,#-4]
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r11],#16
+	str	r1,[r11,#-12]
+	str	r2,[r11,#-8]
+	str	r3,[r11,#-4]
+#endif
+
+	teq	lr,#128
+	bne	Lnot128
+	mov	r12,#10
+	str	r12,[r11,#240-16]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+
+L128_loop:
+	and	r5,lr,r3,lsr#24
+	and	r7,lr,r3,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r3,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r3
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r5,r5,r4
+	eor	r0,r0,r5			@ rk[4]=rk[0]^...
+	eor	r1,r1,r0			@ rk[5]=rk[1]^rk[4]
+	str	r0,[r11],#16
+	eor	r2,r2,r1			@ rk[6]=rk[2]^rk[5]
+	str	r1,[r11,#-12]
+	eor	r3,r3,r2			@ rk[7]=rk[3]^rk[6]
+	str	r2,[r11,#-8]
+	subs	r12,r12,#1
+	str	r3,[r11,#-4]
+	bne	L128_loop
+	sub	r2,r11,#176
+	b	Ldone
+
+Lnot128:
+#if __ARM_ARCH__<7
+	ldrb	r8,[r12,#19]
+	ldrb	r4,[r12,#18]
+	ldrb	r5,[r12,#17]
+	ldrb	r6,[r12,#16]
+	orr	r8,r8,r4,lsl#8
+	ldrb	r9,[r12,#23]
+	orr	r8,r8,r5,lsl#16
+	ldrb	r4,[r12,#22]
+	orr	r8,r8,r6,lsl#24
+	ldrb	r5,[r12,#21]
+	ldrb	r6,[r12,#20]
+	orr	r9,r9,r4,lsl#8
+	orr	r9,r9,r5,lsl#16
+	str	r8,[r11],#8
+	orr	r9,r9,r6,lsl#24
+	str	r9,[r11,#-4]
+#else
+	ldr	r8,[r12,#16]
+	ldr	r9,[r12,#20]
+#ifdef __ARMEL__
+	rev	r8,r8
+	rev	r9,r9
+#endif
+	str	r8,[r11],#8
+	str	r9,[r11,#-4]
+#endif
+
+	teq	lr,#192
+	bne	Lnot192
+	mov	r12,#12
+	str	r12,[r11,#240-24]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+	mov	r12,#8
+
+L192_loop:
+	and	r5,lr,r9,lsr#24
+	and	r7,lr,r9,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r9,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r9
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r9,r5,r4
+	eor	r0,r0,r9			@ rk[6]=rk[0]^...
+	eor	r1,r1,r0			@ rk[7]=rk[1]^rk[6]
+	str	r0,[r11],#24
+	eor	r2,r2,r1			@ rk[8]=rk[2]^rk[7]
+	str	r1,[r11,#-20]
+	eor	r3,r3,r2			@ rk[9]=rk[3]^rk[8]
+	str	r2,[r11,#-16]
+	subs	r12,r12,#1
+	str	r3,[r11,#-12]
+#ifdef	__thumb2__
+	itt	eq				@ Thumb2 thing, sanity check in ARM
+#endif
+	subeq	r2,r11,#216
+	beq	Ldone
+
+	ldr	r7,[r11,#-32]
+	ldr	r8,[r11,#-28]
+	eor	r7,r7,r3			@ rk[10]=rk[4]^rk[9]
+	eor	r9,r8,r7			@ rk[11]=rk[5]^rk[10]
+	str	r7,[r11,#-8]
+	str	r9,[r11,#-4]
+	b	L192_loop
+
+Lnot192:
+#if __ARM_ARCH__<7
+	ldrb	r8,[r12,#27]
+	ldrb	r4,[r12,#26]
+	ldrb	r5,[r12,#25]
+	ldrb	r6,[r12,#24]
+	orr	r8,r8,r4,lsl#8
+	ldrb	r9,[r12,#31]
+	orr	r8,r8,r5,lsl#16
+	ldrb	r4,[r12,#30]
+	orr	r8,r8,r6,lsl#24
+	ldrb	r5,[r12,#29]
+	ldrb	r6,[r12,#28]
+	orr	r9,r9,r4,lsl#8
+	orr	r9,r9,r5,lsl#16
+	str	r8,[r11],#8
+	orr	r9,r9,r6,lsl#24
+	str	r9,[r11,#-4]
+#else
+	ldr	r8,[r12,#24]
+	ldr	r9,[r12,#28]
+#ifdef __ARMEL__
+	rev	r8,r8
+	rev	r9,r9
+#endif
+	str	r8,[r11],#8
+	str	r9,[r11,#-4]
+#endif
+
+	mov	r12,#14
+	str	r12,[r11,#240-32]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+	mov	r12,#7
+
+L256_loop:
+	and	r5,lr,r9,lsr#24
+	and	r7,lr,r9,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r9,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r9
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r9,r5,r4
+	eor	r0,r0,r9			@ rk[8]=rk[0]^...
+	eor	r1,r1,r0			@ rk[9]=rk[1]^rk[8]
+	str	r0,[r11],#32
+	eor	r2,r2,r1			@ rk[10]=rk[2]^rk[9]
+	str	r1,[r11,#-28]
+	eor	r3,r3,r2			@ rk[11]=rk[3]^rk[10]
+	str	r2,[r11,#-24]
+	subs	r12,r12,#1
+	str	r3,[r11,#-20]
+#ifdef	__thumb2__
+	itt	eq				@ Thumb2 thing, sanity check in ARM
+#endif
+	subeq	r2,r11,#256
+	beq	Ldone
+
+	and	r5,lr,r3
+	and	r7,lr,r3,lsr#8
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r3,lsr#16
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r3,lsr#24
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#8
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r11,#-48]
+	orr	r5,r5,r9,lsl#24
+
+	ldr	r7,[r11,#-44]
+	ldr	r8,[r11,#-40]
+	eor	r4,r4,r5			@ rk[12]=rk[4]^...
+	ldr	r9,[r11,#-36]
+	eor	r7,r7,r4			@ rk[13]=rk[5]^rk[12]
+	str	r4,[r11,#-16]
+	eor	r8,r8,r7			@ rk[14]=rk[6]^rk[13]
+	str	r7,[r11,#-12]
+	eor	r9,r9,r8			@ rk[15]=rk[7]^rk[14]
+	str	r8,[r11,#-8]
+	str	r9,[r11,#-4]
+	b	L256_loop
+
+.align	2
+Ldone:	mov	r0,#0
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+Labrt:
+#if __ARM_ARCH__>=5
+	bx	lr				@ .word	0xe12fff1e
+#else
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+
+
+.globl	_aes_nohw_set_decrypt_key
+.private_extern	_aes_nohw_set_decrypt_key
+#ifdef __thumb2__
+.thumb_func	_aes_nohw_set_decrypt_key
+#endif
+.align	5
+_aes_nohw_set_decrypt_key:
+	str	lr,[sp,#-4]!            @ push lr
+	bl	_armv4_AES_set_encrypt_key
+	teq	r0,#0
+	ldr	lr,[sp],#4              @ pop lr
+	bne	Labrt
+
+	mov	r0,r2			@ _aes_nohw_set_encrypt_key preserves r2,
+	mov	r1,r2			@ which is AES_KEY *key
+	b	_armv4_AES_set_enc2dec_key
+
+
+@ void AES_set_enc2dec_key(const AES_KEY *inp,AES_KEY *out)
+.globl	_AES_set_enc2dec_key
+.private_extern	_AES_set_enc2dec_key
+#ifdef __thumb2__
+.thumb_func	_AES_set_enc2dec_key
+#endif
+.align	5
+_AES_set_enc2dec_key:
+_armv4_AES_set_enc2dec_key:
+	stmdb	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+
+	ldr	r12,[r0,#240]
+	mov	r7,r0			@ input
+	add	r8,r0,r12,lsl#4
+	mov	r11,r1			@ output
+	add	r10,r1,r12,lsl#4
+	str	r12,[r1,#240]
+
+Linv:	ldr	r0,[r7],#16
+	ldr	r1,[r7,#-12]
+	ldr	r2,[r7,#-8]
+	ldr	r3,[r7,#-4]
+	ldr	r4,[r8],#-16
+	ldr	r5,[r8,#16+4]
+	ldr	r6,[r8,#16+8]
+	ldr	r9,[r8,#16+12]
+	str	r0,[r10],#-16
+	str	r1,[r10,#16+4]
+	str	r2,[r10,#16+8]
+	str	r3,[r10,#16+12]
+	str	r4,[r11],#16
+	str	r5,[r11,#-12]
+	str	r6,[r11,#-8]
+	str	r9,[r11,#-4]
+	teq	r7,r8
+	bne	Linv
+
+	ldr	r0,[r7]
+	ldr	r1,[r7,#4]
+	ldr	r2,[r7,#8]
+	ldr	r3,[r7,#12]
+	str	r0,[r11]
+	str	r1,[r11,#4]
+	str	r2,[r11,#8]
+	str	r3,[r11,#12]
+	sub	r11,r11,r12,lsl#3
+	ldr	r0,[r11,#16]!		@ prefetch tp1
+	mov	r7,#0x80
+	mov	r8,#0x1b
+	orr	r7,r7,#0x8000
+	orr	r8,r8,#0x1b00
+	orr	r7,r7,r7,lsl#16
+	orr	r8,r8,r8,lsl#16
+	sub	r12,r12,#1
+	mvn	r9,r7
+	mov	r12,r12,lsl#2	@ (rounds-1)*4
+
+Lmix:	and	r4,r0,r7
+	and	r1,r0,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r1,r4,r1,lsl#1	@ tp2
+
+	and	r4,r1,r7
+	and	r2,r1,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r2,r4,r2,lsl#1	@ tp4
+
+	and	r4,r2,r7
+	and	r3,r2,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r3,r4,r3,lsl#1	@ tp8
+
+	eor	r4,r1,r2
+	eor	r5,r0,r3		@ tp9
+	eor	r4,r4,r3		@ tpe
+	eor	r4,r4,r1,ror#24
+	eor	r4,r4,r5,ror#24	@ ^= ROTATE(tpb=tp9^tp2,8)
+	eor	r4,r4,r2,ror#16
+	eor	r4,r4,r5,ror#16	@ ^= ROTATE(tpd=tp9^tp4,16)
+	eor	r4,r4,r5,ror#8	@ ^= ROTATE(tp9,24)
+
+	ldr	r0,[r11,#4]		@ prefetch tp1
+	str	r4,[r11],#4
+	subs	r12,r12,#1
+	bne	Lmix
+
+	mov	r0,#0
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+
+
+
+.align	5
+AES_Td:
+.word	0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96
+.word	0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393
+.word	0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25
+.word	0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f
+.word	0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1
+.word	0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6
+.word	0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da
+.word	0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844
+.word	0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd
+.word	0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4
+.word	0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45
+.word	0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94
+.word	0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7
+.word	0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a
+.word	0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5
+.word	0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c
+.word	0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1
+.word	0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a
+.word	0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75
+.word	0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051
+.word	0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46
+.word	0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff
+.word	0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77
+.word	0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb
+.word	0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000
+.word	0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e
+.word	0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927
+.word	0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a
+.word	0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e
+.word	0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16
+.word	0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d
+.word	0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8
+.word	0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd
+.word	0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34
+.word	0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163
+.word	0xd731dcca, 0x42638510, 0x13972240, 0x84c61120
+.word	0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d
+.word	0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0
+.word	0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422
+.word	0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef
+.word	0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36
+.word	0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4
+.word	0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662
+.word	0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5
+.word	0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3
+.word	0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b
+.word	0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8
+.word	0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6
+.word	0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6
+.word	0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0
+.word	0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815
+.word	0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f
+.word	0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df
+.word	0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f
+.word	0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e
+.word	0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713
+.word	0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89
+.word	0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c
+.word	0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf
+.word	0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86
+.word	0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f
+.word	0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541
+.word	0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190
+.word	0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742
+@ Td4[256]
+.byte	0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38
+.byte	0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb
+.byte	0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87
+.byte	0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb
+.byte	0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d
+.byte	0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e
+.byte	0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2
+.byte	0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25
+.byte	0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16
+.byte	0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92
+.byte	0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda
+.byte	0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84
+.byte	0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a
+.byte	0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06
+.byte	0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02
+.byte	0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b
+.byte	0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea
+.byte	0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73
+.byte	0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85
+.byte	0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e
+.byte	0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89
+.byte	0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b
+.byte	0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20
+.byte	0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4
+.byte	0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31
+.byte	0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f
+.byte	0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d
+.byte	0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef
+.byte	0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0
+.byte	0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61
+.byte	0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26
+.byte	0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
+
+
+@ void aes_nohw_decrypt(const unsigned char *in, unsigned char *out,
+@ 		                  const AES_KEY *key) {
+.globl	_aes_nohw_decrypt
+.private_extern	_aes_nohw_decrypt
+#ifdef __thumb2__
+.thumb_func	_aes_nohw_decrypt
+#endif
+.align	5
+_aes_nohw_decrypt:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ _aes_nohw_decrypt
+#else
+	adr	r3,.
+#endif
+	stmdb	sp!,{r1,r4-r12,lr}
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Td
+#else
+	sub	r10,r3,#_aes_nohw_decrypt-AES_Td	@ Td
+#endif
+	mov	r12,r0		@ inp
+	mov	r11,r2
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	orr	r3,r3,r5,lsl#16
+	orr	r3,r3,r6,lsl#24
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+#endif
+	bl	_armv4_AES_decrypt
+
+	ldr	r12,[sp],#4		@ pop out
+#if __ARM_ARCH__>=7
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r12,#0]
+	str	r1,[r12,#4]
+	str	r2,[r12,#8]
+	str	r3,[r12,#12]
+#else
+	mov	r4,r0,lsr#24		@ write output in endian-neutral
+	mov	r5,r0,lsr#16		@ manner...
+	mov	r6,r0,lsr#8
+	strb	r4,[r12,#0]
+	strb	r5,[r12,#1]
+	mov	r4,r1,lsr#24
+	strb	r6,[r12,#2]
+	mov	r5,r1,lsr#16
+	strb	r0,[r12,#3]
+	mov	r6,r1,lsr#8
+	strb	r4,[r12,#4]
+	strb	r5,[r12,#5]
+	mov	r4,r2,lsr#24
+	strb	r6,[r12,#6]
+	mov	r5,r2,lsr#16
+	strb	r1,[r12,#7]
+	mov	r6,r2,lsr#8
+	strb	r4,[r12,#8]
+	strb	r5,[r12,#9]
+	mov	r4,r3,lsr#24
+	strb	r6,[r12,#10]
+	mov	r5,r3,lsr#16
+	strb	r2,[r12,#11]
+	mov	r6,r3,lsr#8
+	strb	r4,[r12,#12]
+	strb	r5,[r12,#13]
+	strb	r6,[r12,#14]
+	strb	r3,[r12,#15]
+#endif
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+
+
+#ifdef __thumb2__
+.thumb_func	_armv4_AES_decrypt
+#endif
+.align	2
+_armv4_AES_decrypt:
+	str	lr,[sp,#-4]!		@ push lr
+	ldmia	r11!,{r4,r5,r6,r7}
+	eor	r0,r0,r4
+	ldr	r12,[r11,#240-16]
+	eor	r1,r1,r5
+	eor	r2,r2,r6
+	eor	r3,r3,r7
+	sub	r12,r12,#1
+	mov	lr,#255
+
+	and	r7,lr,r0,lsr#16
+	and	r8,lr,r0,lsr#8
+	and	r9,lr,r0
+	mov	r0,r0,lsr#24
+Ldec_loop:
+	ldr	r4,[r10,r7,lsl#2]	@ Td1[s0>>16]
+	and	r7,lr,r1		@ i0
+	ldr	r5,[r10,r8,lsl#2]	@ Td2[s0>>8]
+	and	r8,lr,r1,lsr#16
+	ldr	r6,[r10,r9,lsl#2]	@ Td3[s0>>0]
+	and	r9,lr,r1,lsr#8
+	ldr	r0,[r10,r0,lsl#2]	@ Td0[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldr	r7,[r10,r7,lsl#2]	@ Td3[s1>>0]
+	ldr	r8,[r10,r8,lsl#2]	@ Td1[s1>>16]
+	ldr	r9,[r10,r9,lsl#2]	@ Td2[s1>>8]
+	eor	r0,r0,r7,ror#24
+	ldr	r1,[r10,r1,lsl#2]	@ Td0[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r8,r5,ror#8
+	and	r8,lr,r2		@ i1
+	eor	r6,r9,r6,ror#8
+	and	r9,lr,r2,lsr#16
+	ldr	r7,[r10,r7,lsl#2]	@ Td2[s2>>8]
+	eor	r1,r1,r4,ror#8
+	ldr	r8,[r10,r8,lsl#2]	@ Td3[s2>>0]
+	mov	r2,r2,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Td1[s2>>16]
+	eor	r0,r0,r7,ror#16
+	ldr	r2,[r10,r2,lsl#2]	@ Td0[s2>>24]
+	and	r7,lr,r3,lsr#16	@ i0
+	eor	r1,r1,r8,ror#24
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r9,r6,ror#8
+	and	r9,lr,r3		@ i2
+	ldr	r7,[r10,r7,lsl#2]	@ Td1[s3>>16]
+	eor	r2,r2,r5,ror#8
+	ldr	r8,[r10,r8,lsl#2]	@ Td2[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Td3[s3>>0]
+	eor	r0,r0,r7,ror#8
+	ldr	r7,[r11],#16
+	eor	r1,r1,r8,ror#16
+	ldr	r3,[r10,r3,lsl#2]	@ Td0[s3>>24]
+	eor	r2,r2,r9,ror#24
+
+	ldr	r4,[r11,#-12]
+	eor	r0,r0,r7
+	ldr	r5,[r11,#-8]
+	eor	r3,r3,r6,ror#8
+	ldr	r6,[r11,#-4]
+	and	r7,lr,r0,lsr#16
+	eor	r1,r1,r4
+	and	r8,lr,r0,lsr#8
+	eor	r2,r2,r5
+	and	r9,lr,r0
+	eor	r3,r3,r6
+	mov	r0,r0,lsr#24
+
+	subs	r12,r12,#1
+	bne	Ldec_loop
+
+	add	r10,r10,#1024
+
+	ldr	r5,[r10,#0]		@ prefetch Td4
+	ldr	r6,[r10,#32]
+	ldr	r4,[r10,#64]
+	ldr	r5,[r10,#96]
+	ldr	r6,[r10,#128]
+	ldr	r4,[r10,#160]
+	ldr	r5,[r10,#192]
+	ldr	r6,[r10,#224]
+
+	ldrb	r0,[r10,r0]		@ Td4[s0>>24]
+	ldrb	r4,[r10,r7]		@ Td4[s0>>16]
+	and	r7,lr,r1		@ i0
+	ldrb	r5,[r10,r8]		@ Td4[s0>>8]
+	and	r8,lr,r1,lsr#16
+	ldrb	r6,[r10,r9]		@ Td4[s0>>0]
+	and	r9,lr,r1,lsr#8
+
+	add	r1,r10,r1,lsr#24
+	ldrb	r7,[r10,r7]		@ Td4[s1>>0]
+	ldrb	r1,[r1]		@ Td4[s1>>24]
+	ldrb	r8,[r10,r8]		@ Td4[s1>>16]
+	eor	r0,r7,r0,lsl#24
+	ldrb	r9,[r10,r9]		@ Td4[s1>>8]
+	eor	r1,r4,r1,lsl#8
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r5,r8,lsl#8
+	and	r8,lr,r2		@ i1
+	ldrb	r7,[r10,r7]		@ Td4[s2>>8]
+	eor	r6,r6,r9,lsl#8
+	ldrb	r8,[r10,r8]		@ Td4[s2>>0]
+	and	r9,lr,r2,lsr#16
+
+	add	r2,r10,r2,lsr#24
+	ldrb	r2,[r2]		@ Td4[s2>>24]
+	eor	r0,r0,r7,lsl#8
+	ldrb	r9,[r10,r9]		@ Td4[s2>>16]
+	eor	r1,r8,r1,lsl#16
+	and	r7,lr,r3,lsr#16	@ i0
+	eor	r2,r5,r2,lsl#16
+	and	r8,lr,r3,lsr#8	@ i1
+	ldrb	r7,[r10,r7]		@ Td4[s3>>16]
+	eor	r6,r6,r9,lsl#16
+	ldrb	r8,[r10,r8]		@ Td4[s3>>8]
+	and	r9,lr,r3		@ i2
+
+	add	r3,r10,r3,lsr#24
+	ldrb	r9,[r10,r9]		@ Td4[s3>>0]
+	ldrb	r3,[r3]		@ Td4[s3>>24]
+	eor	r0,r0,r7,lsl#16
+	ldr	r7,[r11,#0]
+	eor	r1,r1,r8,lsl#8
+	ldr	r4,[r11,#4]
+	eor	r2,r9,r2,lsl#8
+	ldr	r5,[r11,#8]
+	eor	r3,r6,r3,lsl#24
+	ldr	r6,[r11,#12]
+
+	eor	r0,r0,r7
+	eor	r1,r1,r4
+	eor	r2,r2,r5
+	eor	r3,r3,r6
+
+	sub	r10,r10,#1024
+	ldr	pc,[sp],#4		@ pop and return
+
+.byte	65,69,83,32,102,111,114,32,65,82,77,118,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align	2
+.align	2
+#endif  // !OPENSSL_NO_ASM
diff --git a/linux-arm/crypto/fipsmodule/aes-armv4.S b/linux-arm/crypto/fipsmodule/aes-armv4.S
new file mode 100644
index 0000000..cfe2a36
--- /dev/null
+++ b/linux-arm/crypto/fipsmodule/aes-armv4.S
@@ -0,0 +1,1222 @@
+// This file is generated from a similarly-named Perl script in the BoringSSL
+// source tree. Do not edit by hand.
+
+#if !defined(__has_feature)
+#define __has_feature(x) 0
+#endif
+#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
+#define OPENSSL_NO_ASM
+#endif
+
+#if !defined(OPENSSL_NO_ASM)
+#if defined(__arm__)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+@ Copyright 2007-2016 The OpenSSL Project Authors. All Rights Reserved.
+@
+@ Licensed under the OpenSSL license (the "License").  You may not use
+@ this file except in compliance with the License.  You can obtain a copy
+@ in the file LICENSE in the source distribution or at
+@ https://www.openssl.org/source/license.html
+
+
+@ ====================================================================
+@ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+@ project. The module is, however, dual licensed under OpenSSL and
+@ CRYPTOGAMS licenses depending on where you obtain it. For further
+@ details see http://www.openssl.org/~appro/cryptogams/.
+@ ====================================================================
+
+@ AES for ARMv4
+
+@ January 2007.
+@
+@ Code uses single 1K S-box and is >2 times faster than code generated
+@ by gcc-3.4.1. This is thanks to unique feature of ARMv4 ISA, which
+@ allows to merge logical or arithmetic operation with shift or rotate
+@ in one instruction and emit combined result every cycle. The module
+@ is endian-neutral. The performance is ~42 cycles/byte for 128-bit
+@ key [on single-issue Xscale PXA250 core].
+
+@ May 2007.
+@
+@ AES_set_[en|de]crypt_key is added.
+
+@ July 2010.
+@
+@ Rescheduling for dual-issue pipeline resulted in 12% improvement on
+@ Cortex A8 core and ~25 cycles per byte processed with 128-bit key.
+
+@ February 2011.
+@
+@ Profiler-assisted and platform-specific optimization resulted in 16%
+@ improvement on Cortex A8 core and ~21.5 cycles per byte.
+
+#ifndef __KERNEL__
+# include <openssl/arm_arch.h>
+#else
+# define __ARM_ARCH__ __LINUX_ARM_ARCH__
+#endif
+
+@ Silence ARMv8 deprecated IT instruction warnings. This file is used by both
+@ ARMv7 and ARMv8 processors and does not use ARMv8 instructions. (ARMv8 AES
+@ instructions are in aesv8-armx.pl.)
+.arch	armv7-a
+
+.text
+#if defined(__thumb2__) && !defined(__APPLE__)
+.syntax	unified
+.thumb
+#else
+.code	32
+#undef __thumb2__
+#endif
+
+.type	AES_Te,%object
+.align	5
+AES_Te:
+.word	0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d
+.word	0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554
+.word	0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d
+.word	0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a
+.word	0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87
+.word	0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b
+.word	0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea
+.word	0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b
+.word	0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a
+.word	0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f
+.word	0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108
+.word	0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f
+.word	0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e
+.word	0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5
+.word	0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d
+.word	0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f
+.word	0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e
+.word	0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb
+.word	0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce
+.word	0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497
+.word	0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c
+.word	0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed
+.word	0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b
+.word	0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a
+.word	0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16
+.word	0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594
+.word	0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81
+.word	0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3
+.word	0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a
+.word	0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504
+.word	0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163
+.word	0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d
+.word	0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f
+.word	0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739
+.word	0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47
+.word	0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395
+.word	0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f
+.word	0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883
+.word	0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c
+.word	0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76
+.word	0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e
+.word	0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4
+.word	0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6
+.word	0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b
+.word	0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7
+.word	0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0
+.word	0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25
+.word	0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818
+.word	0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72
+.word	0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651
+.word	0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21
+.word	0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85
+.word	0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa
+.word	0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12
+.word	0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0
+.word	0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9
+.word	0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133
+.word	0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7
+.word	0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920
+.word	0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a
+.word	0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17
+.word	0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8
+.word	0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11
+.word	0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a
+@ Te4[256]
+.byte	0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5
+.byte	0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76
+.byte	0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0
+.byte	0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0
+.byte	0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc
+.byte	0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15
+.byte	0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a
+.byte	0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75
+.byte	0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0
+.byte	0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84
+.byte	0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b
+.byte	0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf
+.byte	0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85
+.byte	0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8
+.byte	0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5
+.byte	0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2
+.byte	0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17
+.byte	0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73
+.byte	0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88
+.byte	0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb
+.byte	0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c
+.byte	0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79
+.byte	0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9
+.byte	0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08
+.byte	0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6
+.byte	0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a
+.byte	0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e
+.byte	0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e
+.byte	0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94
+.byte	0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf
+.byte	0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68
+.byte	0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
+@ rcon[]
+.word	0x01000000, 0x02000000, 0x04000000, 0x08000000
+.word	0x10000000, 0x20000000, 0x40000000, 0x80000000
+.word	0x1B000000, 0x36000000, 0, 0, 0, 0, 0, 0
+.size	AES_Te,.-AES_Te
+
+@ void aes_nohw_encrypt(const unsigned char *in, unsigned char *out,
+@ 		                  const AES_KEY *key) {
+.globl	aes_nohw_encrypt
+.hidden	aes_nohw_encrypt
+.type	aes_nohw_encrypt,%function
+.align	5
+aes_nohw_encrypt:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ aes_nohw_encrypt
+#else
+	adr	r3,.
+#endif
+	stmdb	sp!,{r1,r4-r12,lr}
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Te
+#else
+	sub	r10,r3,#aes_nohw_encrypt-AES_Te	@ Te
+#endif
+	mov	r12,r0		@ inp
+	mov	r11,r2
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	orr	r3,r3,r5,lsl#16
+	orr	r3,r3,r6,lsl#24
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+#endif
+	bl	_armv4_AES_encrypt
+
+	ldr	r12,[sp],#4		@ pop out
+#if __ARM_ARCH__>=7
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r12,#0]
+	str	r1,[r12,#4]
+	str	r2,[r12,#8]
+	str	r3,[r12,#12]
+#else
+	mov	r4,r0,lsr#24		@ write output in endian-neutral
+	mov	r5,r0,lsr#16		@ manner...
+	mov	r6,r0,lsr#8
+	strb	r4,[r12,#0]
+	strb	r5,[r12,#1]
+	mov	r4,r1,lsr#24
+	strb	r6,[r12,#2]
+	mov	r5,r1,lsr#16
+	strb	r0,[r12,#3]
+	mov	r6,r1,lsr#8
+	strb	r4,[r12,#4]
+	strb	r5,[r12,#5]
+	mov	r4,r2,lsr#24
+	strb	r6,[r12,#6]
+	mov	r5,r2,lsr#16
+	strb	r1,[r12,#7]
+	mov	r6,r2,lsr#8
+	strb	r4,[r12,#8]
+	strb	r5,[r12,#9]
+	mov	r4,r3,lsr#24
+	strb	r6,[r12,#10]
+	mov	r5,r3,lsr#16
+	strb	r2,[r12,#11]
+	mov	r6,r3,lsr#8
+	strb	r4,[r12,#12]
+	strb	r5,[r12,#13]
+	strb	r6,[r12,#14]
+	strb	r3,[r12,#15]
+#endif
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+.size	aes_nohw_encrypt,.-aes_nohw_encrypt
+
+.type	_armv4_AES_encrypt,%function
+.align	2
+_armv4_AES_encrypt:
+	str	lr,[sp,#-4]!		@ push lr
+	ldmia	r11!,{r4,r5,r6,r7}
+	eor	r0,r0,r4
+	ldr	r12,[r11,#240-16]
+	eor	r1,r1,r5
+	eor	r2,r2,r6
+	eor	r3,r3,r7
+	sub	r12,r12,#1
+	mov	lr,#255
+
+	and	r7,lr,r0
+	and	r8,lr,r0,lsr#8
+	and	r9,lr,r0,lsr#16
+	mov	r0,r0,lsr#24
+.Lenc_loop:
+	ldr	r4,[r10,r7,lsl#2]	@ Te3[s0>>0]
+	and	r7,lr,r1,lsr#16	@ i0
+	ldr	r5,[r10,r8,lsl#2]	@ Te2[s0>>8]
+	and	r8,lr,r1
+	ldr	r6,[r10,r9,lsl#2]	@ Te1[s0>>16]
+	and	r9,lr,r1,lsr#8
+	ldr	r0,[r10,r0,lsl#2]	@ Te0[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldr	r7,[r10,r7,lsl#2]	@ Te1[s1>>16]
+	ldr	r8,[r10,r8,lsl#2]	@ Te3[s1>>0]
+	ldr	r9,[r10,r9,lsl#2]	@ Te2[s1>>8]
+	eor	r0,r0,r7,ror#8
+	ldr	r1,[r10,r1,lsl#2]	@ Te0[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r5,r8,ror#8
+	and	r8,lr,r2,lsr#16	@ i1
+	eor	r6,r6,r9,ror#8
+	and	r9,lr,r2
+	ldr	r7,[r10,r7,lsl#2]	@ Te2[s2>>8]
+	eor	r1,r1,r4,ror#24
+	ldr	r8,[r10,r8,lsl#2]	@ Te1[s2>>16]
+	mov	r2,r2,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Te3[s2>>0]
+	eor	r0,r0,r7,ror#16
+	ldr	r2,[r10,r2,lsl#2]	@ Te0[s2>>24]
+	and	r7,lr,r3		@ i0
+	eor	r1,r1,r8,ror#8
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r6,r9,ror#16
+	and	r9,lr,r3,lsr#16	@ i2
+	ldr	r7,[r10,r7,lsl#2]	@ Te3[s3>>0]
+	eor	r2,r2,r5,ror#16
+	ldr	r8,[r10,r8,lsl#2]	@ Te2[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Te1[s3>>16]
+	eor	r0,r0,r7,ror#24
+	ldr	r7,[r11],#16
+	eor	r1,r1,r8,ror#16
+	ldr	r3,[r10,r3,lsl#2]	@ Te0[s3>>24]
+	eor	r2,r2,r9,ror#8
+	ldr	r4,[r11,#-12]
+	eor	r3,r3,r6,ror#8
+
+	ldr	r5,[r11,#-8]
+	eor	r0,r0,r7
+	ldr	r6,[r11,#-4]
+	and	r7,lr,r0
+	eor	r1,r1,r4
+	and	r8,lr,r0,lsr#8
+	eor	r2,r2,r5
+	and	r9,lr,r0,lsr#16
+	eor	r3,r3,r6
+	mov	r0,r0,lsr#24
+
+	subs	r12,r12,#1
+	bne	.Lenc_loop
+
+	add	r10,r10,#2
+
+	ldrb	r4,[r10,r7,lsl#2]	@ Te4[s0>>0]
+	and	r7,lr,r1,lsr#16	@ i0
+	ldrb	r5,[r10,r8,lsl#2]	@ Te4[s0>>8]
+	and	r8,lr,r1
+	ldrb	r6,[r10,r9,lsl#2]	@ Te4[s0>>16]
+	and	r9,lr,r1,lsr#8
+	ldrb	r0,[r10,r0,lsl#2]	@ Te4[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s1>>16]
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s1>>0]
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s1>>8]
+	eor	r0,r7,r0,lsl#8
+	ldrb	r1,[r10,r1,lsl#2]	@ Te4[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r8,r5,lsl#8
+	and	r8,lr,r2,lsr#16	@ i1
+	eor	r6,r9,r6,lsl#8
+	and	r9,lr,r2
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s2>>8]
+	eor	r1,r4,r1,lsl#24
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s2>>16]
+	mov	r2,r2,lsr#24
+
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s2>>0]
+	eor	r0,r7,r0,lsl#8
+	ldrb	r2,[r10,r2,lsl#2]	@ Te4[s2>>24]
+	and	r7,lr,r3		@ i0
+	eor	r1,r1,r8,lsl#16
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r9,r6,lsl#8
+	and	r9,lr,r3,lsr#16	@ i2
+	ldrb	r7,[r10,r7,lsl#2]	@ Te4[s3>>0]
+	eor	r2,r5,r2,lsl#24
+	ldrb	r8,[r10,r8,lsl#2]	@ Te4[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldrb	r9,[r10,r9,lsl#2]	@ Te4[s3>>16]
+	eor	r0,r7,r0,lsl#8
+	ldr	r7,[r11,#0]
+	ldrb	r3,[r10,r3,lsl#2]	@ Te4[s3>>24]
+	eor	r1,r1,r8,lsl#8
+	ldr	r4,[r11,#4]
+	eor	r2,r2,r9,lsl#16
+	ldr	r5,[r11,#8]
+	eor	r3,r6,r3,lsl#24
+	ldr	r6,[r11,#12]
+
+	eor	r0,r0,r7
+	eor	r1,r1,r4
+	eor	r2,r2,r5
+	eor	r3,r3,r6
+
+	sub	r10,r10,#2
+	ldr	pc,[sp],#4		@ pop and return
+.size	_armv4_AES_encrypt,.-_armv4_AES_encrypt
+
+.globl	aes_nohw_set_encrypt_key
+.hidden	aes_nohw_set_encrypt_key
+.type	aes_nohw_set_encrypt_key,%function
+.align	5
+aes_nohw_set_encrypt_key:
+_armv4_AES_set_encrypt_key:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ aes_nohw_set_encrypt_key
+#else
+	adr	r3,.
+#endif
+	teq	r0,#0
+#ifdef	__thumb2__
+	itt	eq			@ Thumb2 thing, sanity check in ARM
+#endif
+	moveq	r0,#-1
+	beq	.Labrt
+	teq	r2,#0
+#ifdef	__thumb2__
+	itt	eq			@ Thumb2 thing, sanity check in ARM
+#endif
+	moveq	r0,#-1
+	beq	.Labrt
+
+	teq	r1,#128
+	beq	.Lok
+	teq	r1,#192
+	beq	.Lok
+	teq	r1,#256
+#ifdef	__thumb2__
+	itt	ne			@ Thumb2 thing, sanity check in ARM
+#endif
+	movne	r0,#-1
+	bne	.Labrt
+
+.Lok:	stmdb	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	mov	r12,r0		@ inp
+	mov	lr,r1			@ bits
+	mov	r11,r2			@ key
+
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Te+1024				@ Te4
+#else
+	sub	r10,r3,#_armv4_AES_set_encrypt_key-AES_Te-1024	@ Te4
+#endif
+
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	str	r0,[r11],#16
+	orr	r3,r3,r5,lsl#16
+	str	r1,[r11,#-12]
+	orr	r3,r3,r6,lsl#24
+	str	r2,[r11,#-8]
+	str	r3,[r11,#-4]
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r11],#16
+	str	r1,[r11,#-12]
+	str	r2,[r11,#-8]
+	str	r3,[r11,#-4]
+#endif
+
+	teq	lr,#128
+	bne	.Lnot128
+	mov	r12,#10
+	str	r12,[r11,#240-16]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+
+.L128_loop:
+	and	r5,lr,r3,lsr#24
+	and	r7,lr,r3,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r3,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r3
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r5,r5,r4
+	eor	r0,r0,r5			@ rk[4]=rk[0]^...
+	eor	r1,r1,r0			@ rk[5]=rk[1]^rk[4]
+	str	r0,[r11],#16
+	eor	r2,r2,r1			@ rk[6]=rk[2]^rk[5]
+	str	r1,[r11,#-12]
+	eor	r3,r3,r2			@ rk[7]=rk[3]^rk[6]
+	str	r2,[r11,#-8]
+	subs	r12,r12,#1
+	str	r3,[r11,#-4]
+	bne	.L128_loop
+	sub	r2,r11,#176
+	b	.Ldone
+
+.Lnot128:
+#if __ARM_ARCH__<7
+	ldrb	r8,[r12,#19]
+	ldrb	r4,[r12,#18]
+	ldrb	r5,[r12,#17]
+	ldrb	r6,[r12,#16]
+	orr	r8,r8,r4,lsl#8
+	ldrb	r9,[r12,#23]
+	orr	r8,r8,r5,lsl#16
+	ldrb	r4,[r12,#22]
+	orr	r8,r8,r6,lsl#24
+	ldrb	r5,[r12,#21]
+	ldrb	r6,[r12,#20]
+	orr	r9,r9,r4,lsl#8
+	orr	r9,r9,r5,lsl#16
+	str	r8,[r11],#8
+	orr	r9,r9,r6,lsl#24
+	str	r9,[r11,#-4]
+#else
+	ldr	r8,[r12,#16]
+	ldr	r9,[r12,#20]
+#ifdef __ARMEL__
+	rev	r8,r8
+	rev	r9,r9
+#endif
+	str	r8,[r11],#8
+	str	r9,[r11,#-4]
+#endif
+
+	teq	lr,#192
+	bne	.Lnot192
+	mov	r12,#12
+	str	r12,[r11,#240-24]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+	mov	r12,#8
+
+.L192_loop:
+	and	r5,lr,r9,lsr#24
+	and	r7,lr,r9,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r9,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r9
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r9,r5,r4
+	eor	r0,r0,r9			@ rk[6]=rk[0]^...
+	eor	r1,r1,r0			@ rk[7]=rk[1]^rk[6]
+	str	r0,[r11],#24
+	eor	r2,r2,r1			@ rk[8]=rk[2]^rk[7]
+	str	r1,[r11,#-20]
+	eor	r3,r3,r2			@ rk[9]=rk[3]^rk[8]
+	str	r2,[r11,#-16]
+	subs	r12,r12,#1
+	str	r3,[r11,#-12]
+#ifdef	__thumb2__
+	itt	eq				@ Thumb2 thing, sanity check in ARM
+#endif
+	subeq	r2,r11,#216
+	beq	.Ldone
+
+	ldr	r7,[r11,#-32]
+	ldr	r8,[r11,#-28]
+	eor	r7,r7,r3			@ rk[10]=rk[4]^rk[9]
+	eor	r9,r8,r7			@ rk[11]=rk[5]^rk[10]
+	str	r7,[r11,#-8]
+	str	r9,[r11,#-4]
+	b	.L192_loop
+
+.Lnot192:
+#if __ARM_ARCH__<7
+	ldrb	r8,[r12,#27]
+	ldrb	r4,[r12,#26]
+	ldrb	r5,[r12,#25]
+	ldrb	r6,[r12,#24]
+	orr	r8,r8,r4,lsl#8
+	ldrb	r9,[r12,#31]
+	orr	r8,r8,r5,lsl#16
+	ldrb	r4,[r12,#30]
+	orr	r8,r8,r6,lsl#24
+	ldrb	r5,[r12,#29]
+	ldrb	r6,[r12,#28]
+	orr	r9,r9,r4,lsl#8
+	orr	r9,r9,r5,lsl#16
+	str	r8,[r11],#8
+	orr	r9,r9,r6,lsl#24
+	str	r9,[r11,#-4]
+#else
+	ldr	r8,[r12,#24]
+	ldr	r9,[r12,#28]
+#ifdef __ARMEL__
+	rev	r8,r8
+	rev	r9,r9
+#endif
+	str	r8,[r11],#8
+	str	r9,[r11,#-4]
+#endif
+
+	mov	r12,#14
+	str	r12,[r11,#240-32]
+	add	r6,r10,#256			@ rcon
+	mov	lr,#255
+	mov	r12,#7
+
+.L256_loop:
+	and	r5,lr,r9,lsr#24
+	and	r7,lr,r9,lsr#16
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r9,lsr#8
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r9
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#24
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r6],#4			@ rcon[i++]
+	orr	r5,r5,r9,lsl#8
+	eor	r9,r5,r4
+	eor	r0,r0,r9			@ rk[8]=rk[0]^...
+	eor	r1,r1,r0			@ rk[9]=rk[1]^rk[8]
+	str	r0,[r11],#32
+	eor	r2,r2,r1			@ rk[10]=rk[2]^rk[9]
+	str	r1,[r11,#-28]
+	eor	r3,r3,r2			@ rk[11]=rk[3]^rk[10]
+	str	r2,[r11,#-24]
+	subs	r12,r12,#1
+	str	r3,[r11,#-20]
+#ifdef	__thumb2__
+	itt	eq				@ Thumb2 thing, sanity check in ARM
+#endif
+	subeq	r2,r11,#256
+	beq	.Ldone
+
+	and	r5,lr,r3
+	and	r7,lr,r3,lsr#8
+	ldrb	r5,[r10,r5]
+	and	r8,lr,r3,lsr#16
+	ldrb	r7,[r10,r7]
+	and	r9,lr,r3,lsr#24
+	ldrb	r8,[r10,r8]
+	orr	r5,r5,r7,lsl#8
+	ldrb	r9,[r10,r9]
+	orr	r5,r5,r8,lsl#16
+	ldr	r4,[r11,#-48]
+	orr	r5,r5,r9,lsl#24
+
+	ldr	r7,[r11,#-44]
+	ldr	r8,[r11,#-40]
+	eor	r4,r4,r5			@ rk[12]=rk[4]^...
+	ldr	r9,[r11,#-36]
+	eor	r7,r7,r4			@ rk[13]=rk[5]^rk[12]
+	str	r4,[r11,#-16]
+	eor	r8,r8,r7			@ rk[14]=rk[6]^rk[13]
+	str	r7,[r11,#-12]
+	eor	r9,r9,r8			@ rk[15]=rk[7]^rk[14]
+	str	r8,[r11,#-8]
+	str	r9,[r11,#-4]
+	b	.L256_loop
+
+.align	2
+.Ldone:	mov	r0,#0
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+.Labrt:
+#if __ARM_ARCH__>=5
+	bx	lr				@ .word	0xe12fff1e
+#else
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+.size	aes_nohw_set_encrypt_key,.-aes_nohw_set_encrypt_key
+
+.globl	aes_nohw_set_decrypt_key
+.hidden	aes_nohw_set_decrypt_key
+.type	aes_nohw_set_decrypt_key,%function
+.align	5
+aes_nohw_set_decrypt_key:
+	str	lr,[sp,#-4]!            @ push lr
+	bl	_armv4_AES_set_encrypt_key
+	teq	r0,#0
+	ldr	lr,[sp],#4              @ pop lr
+	bne	.Labrt
+
+	mov	r0,r2			@ aes_nohw_set_encrypt_key preserves r2,
+	mov	r1,r2			@ which is AES_KEY *key
+	b	_armv4_AES_set_enc2dec_key
+.size	aes_nohw_set_decrypt_key,.-aes_nohw_set_decrypt_key
+
+@ void AES_set_enc2dec_key(const AES_KEY *inp,AES_KEY *out)
+.globl	AES_set_enc2dec_key
+.hidden	AES_set_enc2dec_key
+.type	AES_set_enc2dec_key,%function
+.align	5
+AES_set_enc2dec_key:
+_armv4_AES_set_enc2dec_key:
+	stmdb	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+
+	ldr	r12,[r0,#240]
+	mov	r7,r0			@ input
+	add	r8,r0,r12,lsl#4
+	mov	r11,r1			@ output
+	add	r10,r1,r12,lsl#4
+	str	r12,[r1,#240]
+
+.Linv:	ldr	r0,[r7],#16
+	ldr	r1,[r7,#-12]
+	ldr	r2,[r7,#-8]
+	ldr	r3,[r7,#-4]
+	ldr	r4,[r8],#-16
+	ldr	r5,[r8,#16+4]
+	ldr	r6,[r8,#16+8]
+	ldr	r9,[r8,#16+12]
+	str	r0,[r10],#-16
+	str	r1,[r10,#16+4]
+	str	r2,[r10,#16+8]
+	str	r3,[r10,#16+12]
+	str	r4,[r11],#16
+	str	r5,[r11,#-12]
+	str	r6,[r11,#-8]
+	str	r9,[r11,#-4]
+	teq	r7,r8
+	bne	.Linv
+
+	ldr	r0,[r7]
+	ldr	r1,[r7,#4]
+	ldr	r2,[r7,#8]
+	ldr	r3,[r7,#12]
+	str	r0,[r11]
+	str	r1,[r11,#4]
+	str	r2,[r11,#8]
+	str	r3,[r11,#12]
+	sub	r11,r11,r12,lsl#3
+	ldr	r0,[r11,#16]!		@ prefetch tp1
+	mov	r7,#0x80
+	mov	r8,#0x1b
+	orr	r7,r7,#0x8000
+	orr	r8,r8,#0x1b00
+	orr	r7,r7,r7,lsl#16
+	orr	r8,r8,r8,lsl#16
+	sub	r12,r12,#1
+	mvn	r9,r7
+	mov	r12,r12,lsl#2	@ (rounds-1)*4
+
+.Lmix:	and	r4,r0,r7
+	and	r1,r0,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r1,r4,r1,lsl#1	@ tp2
+
+	and	r4,r1,r7
+	and	r2,r1,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r2,r4,r2,lsl#1	@ tp4
+
+	and	r4,r2,r7
+	and	r3,r2,r9
+	sub	r4,r4,r4,lsr#7
+	and	r4,r4,r8
+	eor	r3,r4,r3,lsl#1	@ tp8
+
+	eor	r4,r1,r2
+	eor	r5,r0,r3		@ tp9
+	eor	r4,r4,r3		@ tpe
+	eor	r4,r4,r1,ror#24
+	eor	r4,r4,r5,ror#24	@ ^= ROTATE(tpb=tp9^tp2,8)
+	eor	r4,r4,r2,ror#16
+	eor	r4,r4,r5,ror#16	@ ^= ROTATE(tpd=tp9^tp4,16)
+	eor	r4,r4,r5,ror#8	@ ^= ROTATE(tp9,24)
+
+	ldr	r0,[r11,#4]		@ prefetch tp1
+	str	r4,[r11],#4
+	subs	r12,r12,#1
+	bne	.Lmix
+
+	mov	r0,#0
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+.size	AES_set_enc2dec_key,.-AES_set_enc2dec_key
+
+.type	AES_Td,%object
+.align	5
+AES_Td:
+.word	0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96
+.word	0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393
+.word	0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25
+.word	0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f
+.word	0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1
+.word	0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6
+.word	0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da
+.word	0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844
+.word	0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd
+.word	0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4
+.word	0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45
+.word	0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94
+.word	0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7
+.word	0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a
+.word	0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5
+.word	0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c
+.word	0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1
+.word	0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a
+.word	0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75
+.word	0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051
+.word	0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46
+.word	0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff
+.word	0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77
+.word	0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb
+.word	0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000
+.word	0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e
+.word	0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927
+.word	0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a
+.word	0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e
+.word	0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16
+.word	0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d
+.word	0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8
+.word	0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd
+.word	0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34
+.word	0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163
+.word	0xd731dcca, 0x42638510, 0x13972240, 0x84c61120
+.word	0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d
+.word	0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0
+.word	0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422
+.word	0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef
+.word	0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36
+.word	0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4
+.word	0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662
+.word	0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5
+.word	0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3
+.word	0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b
+.word	0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8
+.word	0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6
+.word	0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6
+.word	0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0
+.word	0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815
+.word	0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f
+.word	0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df
+.word	0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f
+.word	0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e
+.word	0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713
+.word	0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89
+.word	0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c
+.word	0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf
+.word	0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86
+.word	0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f
+.word	0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541
+.word	0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190
+.word	0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742
+@ Td4[256]
+.byte	0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38
+.byte	0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb
+.byte	0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87
+.byte	0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb
+.byte	0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d
+.byte	0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e
+.byte	0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2
+.byte	0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25
+.byte	0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16
+.byte	0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92
+.byte	0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda
+.byte	0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84
+.byte	0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a
+.byte	0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06
+.byte	0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02
+.byte	0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b
+.byte	0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea
+.byte	0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73
+.byte	0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85
+.byte	0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e
+.byte	0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89
+.byte	0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b
+.byte	0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20
+.byte	0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4
+.byte	0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31
+.byte	0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f
+.byte	0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d
+.byte	0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef
+.byte	0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0
+.byte	0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61
+.byte	0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26
+.byte	0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
+.size	AES_Td,.-AES_Td
+
+@ void aes_nohw_decrypt(const unsigned char *in, unsigned char *out,
+@ 		                  const AES_KEY *key) {
+.globl	aes_nohw_decrypt
+.hidden	aes_nohw_decrypt
+.type	aes_nohw_decrypt,%function
+.align	5
+aes_nohw_decrypt:
+#ifndef	__thumb2__
+	sub	r3,pc,#8		@ aes_nohw_decrypt
+#else
+	adr	r3,.
+#endif
+	stmdb	sp!,{r1,r4-r12,lr}
+#if defined(__thumb2__) || defined(__APPLE__)
+	adr	r10,AES_Td
+#else
+	sub	r10,r3,#aes_nohw_decrypt-AES_Td	@ Td
+#endif
+	mov	r12,r0		@ inp
+	mov	r11,r2
+#if __ARM_ARCH__<7
+	ldrb	r0,[r12,#3]	@ load input data in endian-neutral
+	ldrb	r4,[r12,#2]	@ manner...
+	ldrb	r5,[r12,#1]
+	ldrb	r6,[r12,#0]
+	orr	r0,r0,r4,lsl#8
+	ldrb	r1,[r12,#7]
+	orr	r0,r0,r5,lsl#16
+	ldrb	r4,[r12,#6]
+	orr	r0,r0,r6,lsl#24
+	ldrb	r5,[r12,#5]
+	ldrb	r6,[r12,#4]
+	orr	r1,r1,r4,lsl#8
+	ldrb	r2,[r12,#11]
+	orr	r1,r1,r5,lsl#16
+	ldrb	r4,[r12,#10]
+	orr	r1,r1,r6,lsl#24
+	ldrb	r5,[r12,#9]
+	ldrb	r6,[r12,#8]
+	orr	r2,r2,r4,lsl#8
+	ldrb	r3,[r12,#15]
+	orr	r2,r2,r5,lsl#16
+	ldrb	r4,[r12,#14]
+	orr	r2,r2,r6,lsl#24
+	ldrb	r5,[r12,#13]
+	ldrb	r6,[r12,#12]
+	orr	r3,r3,r4,lsl#8
+	orr	r3,r3,r5,lsl#16
+	orr	r3,r3,r6,lsl#24
+#else
+	ldr	r0,[r12,#0]
+	ldr	r1,[r12,#4]
+	ldr	r2,[r12,#8]
+	ldr	r3,[r12,#12]
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+#endif
+	bl	_armv4_AES_decrypt
+
+	ldr	r12,[sp],#4		@ pop out
+#if __ARM_ARCH__>=7
+#ifdef __ARMEL__
+	rev	r0,r0
+	rev	r1,r1
+	rev	r2,r2
+	rev	r3,r3
+#endif
+	str	r0,[r12,#0]
+	str	r1,[r12,#4]
+	str	r2,[r12,#8]
+	str	r3,[r12,#12]
+#else
+	mov	r4,r0,lsr#24		@ write output in endian-neutral
+	mov	r5,r0,lsr#16		@ manner...
+	mov	r6,r0,lsr#8
+	strb	r4,[r12,#0]
+	strb	r5,[r12,#1]
+	mov	r4,r1,lsr#24
+	strb	r6,[r12,#2]
+	mov	r5,r1,lsr#16
+	strb	r0,[r12,#3]
+	mov	r6,r1,lsr#8
+	strb	r4,[r12,#4]
+	strb	r5,[r12,#5]
+	mov	r4,r2,lsr#24
+	strb	r6,[r12,#6]
+	mov	r5,r2,lsr#16
+	strb	r1,[r12,#7]
+	mov	r6,r2,lsr#8
+	strb	r4,[r12,#8]
+	strb	r5,[r12,#9]
+	mov	r4,r3,lsr#24
+	strb	r6,[r12,#10]
+	mov	r5,r3,lsr#16
+	strb	r2,[r12,#11]
+	mov	r6,r3,lsr#8
+	strb	r4,[r12,#12]
+	strb	r5,[r12,#13]
+	strb	r6,[r12,#14]
+	strb	r3,[r12,#15]
+#endif
+#if __ARM_ARCH__>=5
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
+#else
+	ldmia	sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
+	tst	lr,#1
+	moveq	pc,lr			@ be binary compatible with V4, yet
+.word	0xe12fff1e			@ interoperable with Thumb ISA:-)
+#endif
+.size	aes_nohw_decrypt,.-aes_nohw_decrypt
+
+.type	_armv4_AES_decrypt,%function
+.align	2
+_armv4_AES_decrypt:
+	str	lr,[sp,#-4]!		@ push lr
+	ldmia	r11!,{r4,r5,r6,r7}
+	eor	r0,r0,r4
+	ldr	r12,[r11,#240-16]
+	eor	r1,r1,r5
+	eor	r2,r2,r6
+	eor	r3,r3,r7
+	sub	r12,r12,#1
+	mov	lr,#255
+
+	and	r7,lr,r0,lsr#16
+	and	r8,lr,r0,lsr#8
+	and	r9,lr,r0
+	mov	r0,r0,lsr#24
+.Ldec_loop:
+	ldr	r4,[r10,r7,lsl#2]	@ Td1[s0>>16]
+	and	r7,lr,r1		@ i0
+	ldr	r5,[r10,r8,lsl#2]	@ Td2[s0>>8]
+	and	r8,lr,r1,lsr#16
+	ldr	r6,[r10,r9,lsl#2]	@ Td3[s0>>0]
+	and	r9,lr,r1,lsr#8
+	ldr	r0,[r10,r0,lsl#2]	@ Td0[s0>>24]
+	mov	r1,r1,lsr#24
+
+	ldr	r7,[r10,r7,lsl#2]	@ Td3[s1>>0]
+	ldr	r8,[r10,r8,lsl#2]	@ Td1[s1>>16]
+	ldr	r9,[r10,r9,lsl#2]	@ Td2[s1>>8]
+	eor	r0,r0,r7,ror#24
+	ldr	r1,[r10,r1,lsl#2]	@ Td0[s1>>24]
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r8,r5,ror#8
+	and	r8,lr,r2		@ i1
+	eor	r6,r9,r6,ror#8
+	and	r9,lr,r2,lsr#16
+	ldr	r7,[r10,r7,lsl#2]	@ Td2[s2>>8]
+	eor	r1,r1,r4,ror#8
+	ldr	r8,[r10,r8,lsl#2]	@ Td3[s2>>0]
+	mov	r2,r2,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Td1[s2>>16]
+	eor	r0,r0,r7,ror#16
+	ldr	r2,[r10,r2,lsl#2]	@ Td0[s2>>24]
+	and	r7,lr,r3,lsr#16	@ i0
+	eor	r1,r1,r8,ror#24
+	and	r8,lr,r3,lsr#8	@ i1
+	eor	r6,r9,r6,ror#8
+	and	r9,lr,r3		@ i2
+	ldr	r7,[r10,r7,lsl#2]	@ Td1[s3>>16]
+	eor	r2,r2,r5,ror#8
+	ldr	r8,[r10,r8,lsl#2]	@ Td2[s3>>8]
+	mov	r3,r3,lsr#24
+
+	ldr	r9,[r10,r9,lsl#2]	@ Td3[s3>>0]
+	eor	r0,r0,r7,ror#8
+	ldr	r7,[r11],#16
+	eor	r1,r1,r8,ror#16
+	ldr	r3,[r10,r3,lsl#2]	@ Td0[s3>>24]
+	eor	r2,r2,r9,ror#24
+
+	ldr	r4,[r11,#-12]
+	eor	r0,r0,r7
+	ldr	r5,[r11,#-8]
+	eor	r3,r3,r6,ror#8
+	ldr	r6,[r11,#-4]
+	and	r7,lr,r0,lsr#16
+	eor	r1,r1,r4
+	and	r8,lr,r0,lsr#8
+	eor	r2,r2,r5
+	and	r9,lr,r0
+	eor	r3,r3,r6
+	mov	r0,r0,lsr#24
+
+	subs	r12,r12,#1
+	bne	.Ldec_loop
+
+	add	r10,r10,#1024
+
+	ldr	r5,[r10,#0]		@ prefetch Td4
+	ldr	r6,[r10,#32]
+	ldr	r4,[r10,#64]
+	ldr	r5,[r10,#96]
+	ldr	r6,[r10,#128]
+	ldr	r4,[r10,#160]
+	ldr	r5,[r10,#192]
+	ldr	r6,[r10,#224]
+
+	ldrb	r0,[r10,r0]		@ Td4[s0>>24]
+	ldrb	r4,[r10,r7]		@ Td4[s0>>16]
+	and	r7,lr,r1		@ i0
+	ldrb	r5,[r10,r8]		@ Td4[s0>>8]
+	and	r8,lr,r1,lsr#16
+	ldrb	r6,[r10,r9]		@ Td4[s0>>0]
+	and	r9,lr,r1,lsr#8
+
+	add	r1,r10,r1,lsr#24
+	ldrb	r7,[r10,r7]		@ Td4[s1>>0]
+	ldrb	r1,[r1]		@ Td4[s1>>24]
+	ldrb	r8,[r10,r8]		@ Td4[s1>>16]
+	eor	r0,r7,r0,lsl#24
+	ldrb	r9,[r10,r9]		@ Td4[s1>>8]
+	eor	r1,r4,r1,lsl#8
+	and	r7,lr,r2,lsr#8	@ i0
+	eor	r5,r5,r8,lsl#8
+	and	r8,lr,r2		@ i1
+	ldrb	r7,[r10,r7]		@ Td4[s2>>8]
+	eor	r6,r6,r9,lsl#8
+	ldrb	r8,[r10,r8]		@ Td4[s2>>0]
+	and	r9,lr,r2,lsr#16
+
+	add	r2,r10,r2,lsr#24
+	ldrb	r2,[r2]		@ Td4[s2>>24]
+	eor	r0,r0,r7,lsl#8
+	ldrb	r9,[r10,r9]		@ Td4[s2>>16]
+	eor	r1,r8,r1,lsl#16
+	and	r7,lr,r3,lsr#16	@ i0
+	eor	r2,r5,r2,lsl#16
+	and	r8,lr,r3,lsr#8	@ i1
+	ldrb	r7,[r10,r7]		@ Td4[s3>>16]
+	eor	r6,r6,r9,lsl#16
+	ldrb	r8,[r10,r8]		@ Td4[s3>>8]
+	and	r9,lr,r3		@ i2
+
+	add	r3,r10,r3,lsr#24
+	ldrb	r9,[r10,r9]		@ Td4[s3>>0]
+	ldrb	r3,[r3]		@ Td4[s3>>24]
+	eor	r0,r0,r7,lsl#16
+	ldr	r7,[r11,#0]
+	eor	r1,r1,r8,lsl#8
+	ldr	r4,[r11,#4]
+	eor	r2,r9,r2,lsl#8
+	ldr	r5,[r11,#8]
+	eor	r3,r6,r3,lsl#24
+	ldr	r6,[r11,#12]
+
+	eor	r0,r0,r7
+	eor	r1,r1,r4
+	eor	r2,r2,r5
+	eor	r3,r3,r6
+
+	sub	r10,r10,#1024
+	ldr	pc,[sp],#4		@ pop and return
+.size	_armv4_AES_decrypt,.-_armv4_AES_decrypt
+.byte	65,69,83,32,102,111,114,32,65,82,77,118,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align	2
+.align	2
+#endif
+#endif  // !OPENSSL_NO_ASM
+.section	.note.GNU-stack,"",%progbits
diff --git a/linux-x86/crypto/fipsmodule/aes-586.S b/linux-x86/crypto/fipsmodule/aes-586.S
new file mode 100644
index 0000000..e43cfea
--- /dev/null
+++ b/linux-x86/crypto/fipsmodule/aes-586.S
@@ -0,0 +1,3263 @@
+# This file is generated from a similarly-named Perl script in the BoringSSL
+# source tree. Do not edit by hand.
+
+#if defined(__i386__)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+.text
+.hidden	_x86_AES_encrypt_compact
+.type	_x86_AES_encrypt_compact,@function
+.align	16
+_x86_AES_encrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L000loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%ecx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	xorl	%esi,%ecx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%ecx
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%ecx
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%edx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%edx
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%edx
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%eax,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%eax
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%eax
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%ebx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	xorl	%esi,%ebx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%ebx
+	xorl	%edi,%esi
+	xorl	%esi,%ebx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L000loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.hidden	_sse_AES_encrypt_compact
+.type	_sse_AES_encrypt_compact,@function
+.align	16
+_sse_AES_encrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L001loop:
+	pshufw	$8,%mm0,%mm1
+	pshufw	$13,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movl	%edi,20(%esp)
+	movzbl	%al,%esi
+	movzbl	%ah,%edx
+	pshufw	$13,%mm0,%mm2
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movzbl	%bl,%edi
+	movzbl	-128(%ebp,%edx,1),%edx
+	shrl	$16,%eax
+	shll	$8,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$16,%esi
+	pshufw	$8,%mm4,%mm6
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$24,%esi
+	shrl	$16,%ebx
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	movd	%mm2,%eax
+	movd	%ecx,%mm0
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%ah,%edi
+	shll	$16,%ecx
+	movd	%mm6,%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	shll	$8,%esi
+	shrl	$16,%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shrl	$16,%eax
+	movd	%ecx,%mm1
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%ah,%edi
+	shll	$16,%ecx
+	andl	$255,%eax
+	orl	%esi,%ecx
+	punpckldq	%mm1,%mm0
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%eax,1),%eax
+	orl	%esi,%ecx
+	shll	$16,%eax
+	movzbl	-128(%ebp,%edi,1),%esi
+	orl	%eax,%edx
+	shll	$8,%esi
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%esi,%ecx
+	orl	%ebx,%edx
+	movl	20(%esp),%edi
+	movd	%ecx,%mm4
+	movd	%edx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L002out
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pcmpgtb	%mm0,%mm3
+	pcmpgtb	%mm4,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	pshufw	$177,%mm0,%mm2
+	pshufw	$177,%mm4,%mm6
+	paddb	%mm0,%mm0
+	paddb	%mm4,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pshufw	$177,%mm2,%mm3
+	pshufw	$177,%mm6,%mm7
+	pxor	%mm0,%mm1
+	pxor	%mm4,%mm5
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm3,%mm2
+	movq	%mm7,%mm6
+	pslld	$8,%mm3
+	pslld	$8,%mm7
+	psrld	$24,%mm2
+	psrld	$24,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	psrld	$8,%mm1
+	psrld	$8,%mm5
+	movl	-128(%ebp),%eax
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	movl	-64(%ebp),%ebx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	(%ebp),%ecx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L001loop
+.align	16
+.L002out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.hidden	_x86_AES_encrypt
+.type	_x86_AES_encrypt,@function
+.align	16
+_x86_AES_encrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L003loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ch,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movzbl	%bh,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ah,%eax
+	xorl	3(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	xorl	1(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L003loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%bh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%ch,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%dh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	2(%ebp,%edx,8),%edx
+	andl	$255,%edx
+	movzbl	%ah,%eax
+	movl	(%ebp,%eax,8),%eax
+	andl	$65280,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movl	(%ebp,%ebx,8),%ebx
+	andl	$16711680,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movl	2(%ebp,%ecx,8),%ecx
+	andl	$4278190080,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Te:
+.long	2774754246,2774754246
+.long	2222750968,2222750968
+.long	2574743534,2574743534
+.long	2373680118,2373680118
+.long	234025727,234025727
+.long	3177933782,3177933782
+.long	2976870366,2976870366
+.long	1422247313,1422247313
+.long	1345335392,1345335392
+.long	50397442,50397442
+.long	2842126286,2842126286
+.long	2099981142,2099981142
+.long	436141799,436141799
+.long	1658312629,1658312629
+.long	3870010189,3870010189
+.long	2591454956,2591454956
+.long	1170918031,1170918031
+.long	2642575903,2642575903
+.long	1086966153,1086966153
+.long	2273148410,2273148410
+.long	368769775,368769775
+.long	3948501426,3948501426
+.long	3376891790,3376891790
+.long	200339707,200339707
+.long	3970805057,3970805057
+.long	1742001331,1742001331
+.long	4255294047,4255294047
+.long	3937382213,3937382213
+.long	3214711843,3214711843
+.long	4154762323,4154762323
+.long	2524082916,2524082916
+.long	1539358875,1539358875
+.long	3266819957,3266819957
+.long	486407649,486407649
+.long	2928907069,2928907069
+.long	1780885068,1780885068
+.long	1513502316,1513502316
+.long	1094664062,1094664062
+.long	49805301,49805301
+.long	1338821763,1338821763
+.long	1546925160,1546925160
+.long	4104496465,4104496465
+.long	887481809,887481809
+.long	150073849,150073849
+.long	2473685474,2473685474
+.long	1943591083,1943591083
+.long	1395732834,1395732834
+.long	1058346282,1058346282
+.long	201589768,201589768
+.long	1388824469,1388824469
+.long	1696801606,1696801606
+.long	1589887901,1589887901
+.long	672667696,672667696
+.long	2711000631,2711000631
+.long	251987210,251987210
+.long	3046808111,3046808111
+.long	151455502,151455502
+.long	907153956,907153956
+.long	2608889883,2608889883
+.long	1038279391,1038279391
+.long	652995533,652995533
+.long	1764173646,1764173646
+.long	3451040383,3451040383
+.long	2675275242,2675275242
+.long	453576978,453576978
+.long	2659418909,2659418909
+.long	1949051992,1949051992
+.long	773462580,773462580
+.long	756751158,756751158
+.long	2993581788,2993581788
+.long	3998898868,3998898868
+.long	4221608027,4221608027
+.long	4132590244,4132590244
+.long	1295727478,1295727478
+.long	1641469623,1641469623
+.long	3467883389,3467883389
+.long	2066295122,2066295122
+.long	1055122397,1055122397
+.long	1898917726,1898917726
+.long	2542044179,2542044179
+.long	4115878822,4115878822
+.long	1758581177,1758581177
+.long	0,0
+.long	753790401,753790401
+.long	1612718144,1612718144
+.long	536673507,536673507
+.long	3367088505,3367088505
+.long	3982187446,3982187446
+.long	3194645204,3194645204
+.long	1187761037,1187761037
+.long	3653156455,3653156455
+.long	1262041458,1262041458
+.long	3729410708,3729410708
+.long	3561770136,3561770136
+.long	3898103984,3898103984
+.long	1255133061,1255133061
+.long	1808847035,1808847035
+.long	720367557,720367557
+.long	3853167183,3853167183
+.long	385612781,385612781
+.long	3309519750,3309519750
+.long	3612167578,3612167578
+.long	1429418854,1429418854
+.long	2491778321,2491778321
+.long	3477423498,3477423498
+.long	284817897,284817897
+.long	100794884,100794884
+.long	2172616702,2172616702
+.long	4031795360,4031795360
+.long	1144798328,1144798328
+.long	3131023141,3131023141
+.long	3819481163,3819481163
+.long	4082192802,4082192802
+.long	4272137053,4272137053
+.long	3225436288,3225436288
+.long	2324664069,2324664069
+.long	2912064063,2912064063
+.long	3164445985,3164445985
+.long	1211644016,1211644016
+.long	83228145,83228145
+.long	3753688163,3753688163
+.long	3249976951,3249976951
+.long	1977277103,1977277103
+.long	1663115586,1663115586
+.long	806359072,806359072
+.long	452984805,452984805
+.long	250868733,250868733
+.long	1842533055,1842533055
+.long	1288555905,1288555905
+.long	336333848,336333848
+.long	890442534,890442534
+.long	804056259,804056259
+.long	3781124030,3781124030
+.long	2727843637,2727843637
+.long	3427026056,3427026056
+.long	957814574,957814574
+.long	1472513171,1472513171
+.long	4071073621,4071073621
+.long	2189328124,2189328124
+.long	1195195770,1195195770
+.long	2892260552,2892260552
+.long	3881655738,3881655738
+.long	723065138,723065138
+.long	2507371494,2507371494
+.long	2690670784,2690670784
+.long	2558624025,2558624025
+.long	3511635870,3511635870
+.long	2145180835,2145180835
+.long	1713513028,1713513028
+.long	2116692564,2116692564
+.long	2878378043,2878378043
+.long	2206763019,2206763019
+.long	3393603212,3393603212
+.long	703524551,703524551
+.long	3552098411,3552098411
+.long	1007948840,1007948840
+.long	2044649127,2044649127
+.long	3797835452,3797835452
+.long	487262998,487262998
+.long	1994120109,1994120109
+.long	1004593371,1004593371
+.long	1446130276,1446130276
+.long	1312438900,1312438900
+.long	503974420,503974420
+.long	3679013266,3679013266
+.long	168166924,168166924
+.long	1814307912,1814307912
+.long	3831258296,3831258296
+.long	1573044895,1573044895
+.long	1859376061,1859376061
+.long	4021070915,4021070915
+.long	2791465668,2791465668
+.long	2828112185,2828112185
+.long	2761266481,2761266481
+.long	937747667,937747667
+.long	2339994098,2339994098
+.long	854058965,854058965
+.long	1137232011,1137232011
+.long	1496790894,1496790894
+.long	3077402074,3077402074
+.long	2358086913,2358086913
+.long	1691735473,1691735473
+.long	3528347292,3528347292
+.long	3769215305,3769215305
+.long	3027004632,3027004632
+.long	4199962284,4199962284
+.long	133494003,133494003
+.long	636152527,636152527
+.long	2942657994,2942657994
+.long	2390391540,2390391540
+.long	3920539207,3920539207
+.long	403179536,403179536
+.long	3585784431,3585784431
+.long	2289596656,2289596656
+.long	1864705354,1864705354
+.long	1915629148,1915629148
+.long	605822008,605822008
+.long	4054230615,4054230615
+.long	3350508659,3350508659
+.long	1371981463,1371981463
+.long	602466507,602466507
+.long	2094914977,2094914977
+.long	2624877800,2624877800
+.long	555687742,555687742
+.long	3712699286,3712699286
+.long	3703422305,3703422305
+.long	2257292045,2257292045
+.long	2240449039,2240449039
+.long	2423288032,2423288032
+.long	1111375484,1111375484
+.long	3300242801,3300242801
+.long	2858837708,2858837708
+.long	3628615824,3628615824
+.long	84083462,84083462
+.long	32962295,32962295
+.long	302911004,302911004
+.long	2741068226,2741068226
+.long	1597322602,1597322602
+.long	4183250862,4183250862
+.long	3501832553,3501832553
+.long	2441512471,2441512471
+.long	1489093017,1489093017
+.long	656219450,656219450
+.long	3114180135,3114180135
+.long	954327513,954327513
+.long	335083755,335083755
+.long	3013122091,3013122091
+.long	856756514,856756514
+.long	3144247762,3144247762
+.long	1893325225,1893325225
+.long	2307821063,2307821063
+.long	2811532339,2811532339
+.long	3063651117,3063651117
+.long	572399164,572399164
+.long	2458355477,2458355477
+.long	552200649,552200649
+.long	1238290055,1238290055
+.long	4283782570,4283782570
+.long	2015897680,2015897680
+.long	2061492133,2061492133
+.long	2408352771,2408352771
+.long	4171342169,4171342169
+.long	2156497161,2156497161
+.long	386731290,386731290
+.long	3669999461,3669999461
+.long	837215959,837215959
+.long	3326231172,3326231172
+.long	3093850320,3093850320
+.long	3275833730,3275833730
+.long	2962856233,2962856233
+.long	1999449434,1999449434
+.long	286199582,286199582
+.long	3417354363,3417354363
+.long	4233385128,4233385128
+.long	3602627437,3602627437
+.long	974525996,974525996
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.long	1,2,4,8
+.long	16,32,64,128
+.long	27,54,0,0
+.long	0,0,0,0
+.size	_x86_AES_encrypt,.-_x86_AES_encrypt
+.globl	aes_nohw_encrypt
+.hidden	aes_nohw_encrypt
+.type	aes_nohw_encrypt,@function
+.align	16
+aes_nohw_encrypt:
+.L_aes_nohw_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L004pic_point
+.L004pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P-.L004pic_point(%ebp),%eax
+	leal	.LAES_Te-.L004pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L005x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L005x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aes_nohw_encrypt,.-.L_aes_nohw_encrypt_begin
+.hidden	_x86_AES_decrypt_compact
+.type	_x86_AES_decrypt_compact,@function
+.align	16
+_x86_AES_decrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L006loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%eax
+	subl	%edi,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%esi,%eax
+	movl	$2155905152,%edi
+	andl	%eax,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ecx,%eax
+	xorl	%esi,%ebx
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ecx,%ebx
+	roll	$8,%ecx
+	xorl	%esi,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebp,%eax
+	xorl	%ebx,%ecx
+	xorl	%ebp,%ebx
+	roll	$24,%eax
+	xorl	%ebp,%ecx
+	roll	$16,%ebx
+	xorl	%eax,%ecx
+	roll	$8,%ebp
+	xorl	%ebx,%ecx
+	movl	4(%esp),%eax
+	xorl	%ebp,%ecx
+	movl	%ecx,12(%esp)
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%esi,%ebx
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%edx,%ebx
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%edx,%ecx
+	roll	$8,%edx
+	xorl	%esi,%ebp
+	xorl	%ebx,%edx
+	xorl	%ebp,%ebx
+	xorl	%ecx,%edx
+	xorl	%ebp,%ecx
+	roll	$24,%ebx
+	xorl	%ebp,%edx
+	roll	$16,%ecx
+	xorl	%ebx,%edx
+	roll	$8,%ebp
+	xorl	%ecx,%edx
+	movl	8(%esp),%ebx
+	xorl	%ebp,%edx
+	movl	%edx,16(%esp)
+	movl	$2155905152,%edi
+	andl	%eax,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%eax,%edx
+	roll	$8,%eax
+	xorl	%esi,%ebp
+	xorl	%ecx,%eax
+	xorl	%ebp,%ecx
+	xorl	%edx,%eax
+	xorl	%ebp,%edx
+	roll	$24,%ecx
+	xorl	%ebp,%eax
+	roll	$16,%edx
+	xorl	%ecx,%eax
+	roll	$8,%ebp
+	xorl	%edx,%eax
+	xorl	%ebp,%eax
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ebx,%edx
+	roll	$8,%ebx
+	xorl	%esi,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ebp,%ecx
+	xorl	%edx,%ebx
+	xorl	%ebp,%edx
+	roll	$24,%ecx
+	xorl	%ebp,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%ebp
+	xorl	%edx,%ebx
+	movl	12(%esp),%ecx
+	xorl	%ebp,%ebx
+	movl	16(%esp),%edx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L006loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.hidden	_sse_AES_decrypt_compact
+.type	_sse_AES_decrypt_compact,@function
+.align	16
+_sse_AES_decrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L007loop:
+	pshufw	$12,%mm0,%mm1
+	pshufw	$9,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movl	%edi,20(%esp)
+	movzbl	%al,%esi
+	movzbl	%ah,%edx
+	pshufw	$6,%mm0,%mm2
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movzbl	%bl,%edi
+	movzbl	-128(%ebp,%edx,1),%edx
+	shrl	$16,%eax
+	shll	$8,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$16,%esi
+	pshufw	$3,%mm4,%mm6
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$24,%esi
+	shrl	$16,%ebx
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$8,%esi
+	movd	%mm2,%eax
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	shll	$16,%esi
+	movd	%mm6,%ebx
+	movd	%ecx,%mm0
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%al,%edi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$16,%esi
+	shrl	$16,%eax
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shrl	$16,%ebx
+	shll	$8,%esi
+	movd	%edx,%mm1
+	movzbl	-128(%ebp,%edi,1),%edx
+	movzbl	%bh,%edi
+	shll	$24,%edx
+	andl	$255,%ebx
+	orl	%esi,%edx
+	punpckldq	%mm1,%mm0
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$8,%esi
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	orl	%ebx,%edx
+	shll	$16,%esi
+	movzbl	-128(%ebp,%eax,1),%eax
+	orl	%esi,%edx
+	shll	$24,%eax
+	orl	%eax,%ecx
+	movl	20(%esp),%edi
+	movd	%edx,%mm4
+	movd	%ecx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L008out
+	movq	%mm0,%mm3
+	movq	%mm4,%mm7
+	pshufw	$228,%mm0,%mm2
+	pshufw	$228,%mm4,%mm6
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pshufw	$177,%mm0,%mm0
+	pshufw	$177,%mm4,%mm4
+	pslld	$8,%mm2
+	pslld	$8,%mm6
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pslld	$16,%mm2
+	pslld	$16,%mm6
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movq	8(%esp),%mm3
+	pxor	%mm2,%mm2
+	pxor	%mm6,%mm6
+	pcmpgtb	%mm1,%mm2
+	pcmpgtb	%mm5,%mm6
+	pand	%mm3,%mm2
+	pand	%mm3,%mm6
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm2,%mm1
+	pxor	%mm6,%mm5
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	%mm1,%mm2
+	movq	%mm5,%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	psrld	$8,%mm2
+	psrld	$8,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pshufw	$177,%mm1,%mm3
+	pshufw	$177,%mm5,%mm7
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	pshufw	$177,%mm1,%mm2
+	pshufw	$177,%mm5,%mm6
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pslld	$8,%mm1
+	pslld	$8,%mm5
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	-128(%ebp),%eax
+	pslld	$16,%mm1
+	pslld	$16,%mm5
+	movl	-64(%ebp),%ebx
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	movl	(%ebp),%ecx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L007loop
+.align	16
+.L008out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.hidden	_x86_AES_decrypt
+.type	_x86_AES_decrypt,@function
+.align	16
+_x86_AES_decrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L009loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ah,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ch,%ecx
+	xorl	3(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	xorl	1(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L009loop
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+	leal	-128(%ebp),%ebp
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	leal	-2048(%ebp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Td:
+.long	1353184337,1353184337
+.long	1399144830,1399144830
+.long	3282310938,3282310938
+.long	2522752826,2522752826
+.long	3412831035,3412831035
+.long	4047871263,4047871263
+.long	2874735276,2874735276
+.long	2466505547,2466505547
+.long	1442459680,1442459680
+.long	4134368941,4134368941
+.long	2440481928,2440481928
+.long	625738485,625738485
+.long	4242007375,4242007375
+.long	3620416197,3620416197
+.long	2151953702,2151953702
+.long	2409849525,2409849525
+.long	1230680542,1230680542
+.long	1729870373,1729870373
+.long	2551114309,2551114309
+.long	3787521629,3787521629
+.long	41234371,41234371
+.long	317738113,317738113
+.long	2744600205,2744600205
+.long	3338261355,3338261355
+.long	3881799427,3881799427
+.long	2510066197,2510066197
+.long	3950669247,3950669247
+.long	3663286933,3663286933
+.long	763608788,763608788
+.long	3542185048,3542185048
+.long	694804553,694804553
+.long	1154009486,1154009486
+.long	1787413109,1787413109
+.long	2021232372,2021232372
+.long	1799248025,1799248025
+.long	3715217703,3715217703
+.long	3058688446,3058688446
+.long	397248752,397248752
+.long	1722556617,1722556617
+.long	3023752829,3023752829
+.long	407560035,407560035
+.long	2184256229,2184256229
+.long	1613975959,1613975959
+.long	1165972322,1165972322
+.long	3765920945,3765920945
+.long	2226023355,2226023355
+.long	480281086,480281086
+.long	2485848313,2485848313
+.long	1483229296,1483229296
+.long	436028815,436028815
+.long	2272059028,2272059028
+.long	3086515026,3086515026
+.long	601060267,601060267
+.long	3791801202,3791801202
+.long	1468997603,1468997603
+.long	715871590,715871590
+.long	120122290,120122290
+.long	63092015,63092015
+.long	2591802758,2591802758
+.long	2768779219,2768779219
+.long	4068943920,4068943920
+.long	2997206819,2997206819
+.long	3127509762,3127509762
+.long	1552029421,1552029421
+.long	723308426,723308426
+.long	2461301159,2461301159
+.long	4042393587,4042393587
+.long	2715969870,2715969870
+.long	3455375973,3455375973
+.long	3586000134,3586000134
+.long	526529745,526529745
+.long	2331944644,2331944644
+.long	2639474228,2639474228
+.long	2689987490,2689987490
+.long	853641733,853641733
+.long	1978398372,1978398372
+.long	971801355,971801355
+.long	2867814464,2867814464
+.long	111112542,111112542
+.long	1360031421,1360031421
+.long	4186579262,4186579262
+.long	1023860118,1023860118
+.long	2919579357,2919579357
+.long	1186850381,1186850381
+.long	3045938321,3045938321
+.long	90031217,90031217
+.long	1876166148,1876166148
+.long	4279586912,4279586912
+.long	620468249,620468249
+.long	2548678102,2548678102
+.long	3426959497,3426959497
+.long	2006899047,2006899047
+.long	3175278768,3175278768
+.long	2290845959,2290845959
+.long	945494503,945494503
+.long	3689859193,3689859193
+.long	1191869601,1191869601
+.long	3910091388,3910091388
+.long	3374220536,3374220536
+.long	0,0
+.long	2206629897,2206629897
+.long	1223502642,1223502642
+.long	2893025566,2893025566
+.long	1316117100,1316117100
+.long	4227796733,4227796733
+.long	1446544655,1446544655
+.long	517320253,517320253
+.long	658058550,658058550
+.long	1691946762,1691946762
+.long	564550760,564550760
+.long	3511966619,3511966619
+.long	976107044,976107044
+.long	2976320012,2976320012
+.long	266819475,266819475
+.long	3533106868,3533106868
+.long	2660342555,2660342555
+.long	1338359936,1338359936
+.long	2720062561,2720062561
+.long	1766553434,1766553434
+.long	370807324,370807324
+.long	179999714,179999714
+.long	3844776128,3844776128
+.long	1138762300,1138762300
+.long	488053522,488053522
+.long	185403662,185403662
+.long	2915535858,2915535858
+.long	3114841645,3114841645
+.long	3366526484,3366526484
+.long	2233069911,2233069911
+.long	1275557295,1275557295
+.long	3151862254,3151862254
+.long	4250959779,4250959779
+.long	2670068215,2670068215
+.long	3170202204,3170202204
+.long	3309004356,3309004356
+.long	880737115,880737115
+.long	1982415755,1982415755
+.long	3703972811,3703972811
+.long	1761406390,1761406390
+.long	1676797112,1676797112
+.long	3403428311,3403428311
+.long	277177154,277177154
+.long	1076008723,1076008723
+.long	538035844,538035844
+.long	2099530373,2099530373
+.long	4164795346,4164795346
+.long	288553390,288553390
+.long	1839278535,1839278535
+.long	1261411869,1261411869
+.long	4080055004,4080055004
+.long	3964831245,3964831245
+.long	3504587127,3504587127
+.long	1813426987,1813426987
+.long	2579067049,2579067049
+.long	4199060497,4199060497
+.long	577038663,577038663
+.long	3297574056,3297574056
+.long	440397984,440397984
+.long	3626794326,3626794326
+.long	4019204898,4019204898
+.long	3343796615,3343796615
+.long	3251714265,3251714265
+.long	4272081548,4272081548
+.long	906744984,906744984
+.long	3481400742,3481400742
+.long	685669029,685669029
+.long	646887386,646887386
+.long	2764025151,2764025151
+.long	3835509292,3835509292
+.long	227702864,227702864
+.long	2613862250,2613862250
+.long	1648787028,1648787028
+.long	3256061430,3256061430
+.long	3904428176,3904428176
+.long	1593260334,1593260334
+.long	4121936770,4121936770
+.long	3196083615,3196083615
+.long	2090061929,2090061929
+.long	2838353263,2838353263
+.long	3004310991,3004310991
+.long	999926984,999926984
+.long	2809993232,2809993232
+.long	1852021992,1852021992
+.long	2075868123,2075868123
+.long	158869197,158869197
+.long	4095236462,4095236462
+.long	28809964,28809964
+.long	2828685187,2828685187
+.long	1701746150,1701746150
+.long	2129067946,2129067946
+.long	147831841,147831841
+.long	3873969647,3873969647
+.long	3650873274,3650873274
+.long	3459673930,3459673930
+.long	3557400554,3557400554
+.long	3598495785,3598495785
+.long	2947720241,2947720241
+.long	824393514,824393514
+.long	815048134,815048134
+.long	3227951669,3227951669
+.long	935087732,935087732
+.long	2798289660,2798289660
+.long	2966458592,2966458592
+.long	366520115,366520115
+.long	1251476721,1251476721
+.long	4158319681,4158319681
+.long	240176511,240176511
+.long	804688151,804688151
+.long	2379631990,2379631990
+.long	1303441219,1303441219
+.long	1414376140,1414376140
+.long	3741619940,3741619940
+.long	3820343710,3820343710
+.long	461924940,461924940
+.long	3089050817,3089050817
+.long	2136040774,2136040774
+.long	82468509,82468509
+.long	1563790337,1563790337
+.long	1937016826,1937016826
+.long	776014843,776014843
+.long	1511876531,1511876531
+.long	1389550482,1389550482
+.long	861278441,861278441
+.long	323475053,323475053
+.long	2355222426,2355222426
+.long	2047648055,2047648055
+.long	2383738969,2383738969
+.long	2302415851,2302415851
+.long	3995576782,3995576782
+.long	902390199,902390199
+.long	3991215329,3991215329
+.long	1018251130,1018251130
+.long	1507840668,1507840668
+.long	1064563285,1064563285
+.long	2043548696,2043548696
+.long	3208103795,3208103795
+.long	3939366739,3939366739
+.long	1537932639,1537932639
+.long	342834655,342834655
+.long	2262516856,2262516856
+.long	2180231114,2180231114
+.long	1053059257,1053059257
+.long	741614648,741614648
+.long	1598071746,1598071746
+.long	1925389590,1925389590
+.long	203809468,203809468
+.long	2336832552,2336832552
+.long	1100287487,1100287487
+.long	1895934009,1895934009
+.long	3736275976,3736275976
+.long	2632234200,2632234200
+.long	2428589668,2428589668
+.long	1636092795,1636092795
+.long	1890988757,1890988757
+.long	1952214088,1952214088
+.long	1113045200,1113045200
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.size	_x86_AES_decrypt,.-_x86_AES_decrypt
+.globl	aes_nohw_decrypt
+.hidden	aes_nohw_decrypt
+.type	aes_nohw_decrypt,@function
+.align	16
+aes_nohw_decrypt:
+.L_aes_nohw_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L010pic_point
+.L010pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P-.L010pic_point(%ebp),%eax
+	leal	.LAES_Td-.L010pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L011x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L011x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aes_nohw_decrypt,.-.L_aes_nohw_decrypt_begin
+.globl	aes_nohw_cbc_encrypt
+.hidden	aes_nohw_cbc_encrypt
+.type	aes_nohw_cbc_encrypt,@function
+.align	16
+aes_nohw_cbc_encrypt:
+.L_aes_nohw_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	.L012drop_out
+	call	.L013pic_point
+.L013pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P-.L013pic_point(%ebp),%eax
+	cmpl	$0,40(%esp)
+	leal	.LAES_Te-.L013pic_point(%ebp),%ebp
+	jne	.L014picked_te
+	leal	.LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+	pushfl
+	cld
+	cmpl	$512,%ecx
+	jb	.L015slow_way
+	testl	$15,%ecx
+	jnz	.L015slow_way
+	btl	$28,(%eax)
+	jc	.L015slow_way
+	leal	-324(%esp),%esi
+	andl	$-64,%esi
+	movl	%ebp,%eax
+	leal	2304(%ebp),%ebx
+	movl	%esi,%edx
+	andl	$4095,%eax
+	andl	$4095,%ebx
+	andl	$4095,%edx
+	cmpl	%ebx,%edx
+	jb	.L016tbl_break_out
+	subl	%ebx,%edx
+	subl	%edx,%esi
+	jmp	.L017tbl_ok
+.align	4
+.L016tbl_break_out:
+	subl	%eax,%edx
+	andl	$4095,%edx
+	addl	$384,%edx
+	subl	%edx,%esi
+.align	4
+.L017tbl_ok:
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	12(%edx),%edi
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	$0,316(%esp)
+	movl	%edi,%ebx
+	movl	$61,%ecx
+	subl	%ebp,%ebx
+	movl	%edi,%esi
+	andl	$4095,%ebx
+	leal	76(%esp),%edi
+	cmpl	$2304,%ebx
+	jb	.L018do_copy
+	cmpl	$3852,%ebx
+	jb	.L019skip_copy
+.align	4
+.L018do_copy:
+	movl	%edi,44(%esp)
+.long	2784229001
+.L019skip_copy:
+	movl	$16,%edi
+.align	4
+.L020prefetch_tbl:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%esi
+	leal	128(%ebp),%ebp
+	subl	$1,%edi
+	jnz	.L020prefetch_tbl
+	subl	$2048,%ebp
+	movl	32(%esp),%esi
+	movl	48(%esp),%edi
+	cmpl	$0,%edx
+	je	.L021fast_decrypt
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	16
+.L022fast_enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%esi
+	movl	40(%esp),%ecx
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L022fast_enc_loop
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L023skip_ezero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L023skip_ezero:
+	movl	28(%esp),%esp
+	popfl
+.L012drop_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L021fast_decrypt:
+	cmpl	36(%esp),%esi
+	je	.L024fast_dec_in_place
+	movl	%edi,52(%esp)
+.align	4
+.align	16
+.L025fast_dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	52(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	36(%esp),%edi
+	movl	32(%esp),%esi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	%esi,52(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L025fast_dec_loop
+	movl	52(%esp),%edi
+	movl	48(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	.L026fast_dec_out
+.align	16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	48(%esp),%edi
+	movl	36(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L027fast_dec_in_place_loop
+.align	4
+.L026fast_dec_out:
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L028skip_dzero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L028skip_dzero:
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L015slow_way:
+	movl	(%eax),%eax
+	movl	36(%esp),%edi
+	leal	-80(%esp),%esi
+	andl	$-64,%esi
+	leal	-143(%edi),%ebx
+	subl	%esi,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esi
+	leal	768(%esi),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	%eax,52(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	%esi,%edi
+	movl	%eax,%esi
+	cmpl	$0,%edx
+	je	.L029slow_decrypt
+	cmpl	$16,%ecx
+	movl	%ebx,%edx
+	jb	.L030slow_enc_tail
+	btl	$25,52(%esp)
+	jnc	.L031slow_enc_x86
+	movq	(%edi),%mm0
+	movq	8(%edi),%mm4
+.align	16
+.L032slow_enc_loop_sse:
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	40(%esp),%ecx
+	movq	%mm0,(%edi)
+	movq	%mm4,8(%edi)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L032slow_enc_loop_sse
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L031slow_enc_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4
+.L033slow_enc_loop_x86:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L033slow_enc_loop_x86
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L030slow_enc_tail:
+	emms
+	movl	%edx,%edi
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	.L034enc_in_place
+.align	4
+.long	2767451785
+	jmp	.L035enc_skip_in_place
+.L034enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2868115081
+	movl	48(%esp),%edi
+	movl	%edx,%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,40(%esp)
+	jmp	.L033slow_enc_loop_x86
+.align	16
+.L029slow_decrypt:
+	btl	$25,52(%esp)
+	jnc	.L036slow_dec_loop_x86
+.align	4
+.L037slow_dec_loop_sse:
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_decrypt_compact
+	movl	32(%esp),%esi
+	leal	60(%esp),%eax
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	48(%esp),%edi
+	movq	(%esi),%mm1
+	movq	8(%esi),%mm5
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movq	%mm1,(%edi)
+	movq	%mm5,8(%edi)
+	subl	$16,%ecx
+	jc	.L038slow_dec_partial_sse
+	movq	%mm0,(%ebx)
+	movq	%mm4,8(%ebx)
+	leal	16(%ebx),%ebx
+	movl	%ebx,36(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	movl	%ecx,40(%esp)
+	jnz	.L037slow_dec_loop_sse
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L038slow_dec_partial_sse:
+	movq	%mm0,(%eax)
+	movq	%mm4,8(%eax)
+	emms
+	addl	$16,%ecx
+	movl	%ebx,%edi
+	movl	%eax,%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L036slow_dec_loop_x86:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt_compact
+	movl	48(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	.L039slow_dec_partial_x86
+	movl	%esi,40(%esp)
+	movl	36(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	jnz	.L036slow_dec_loop_x86
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L039slow_dec_partial_x86:
+	leal	60(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	36(%esp),%edi
+	leal	60(%esp),%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aes_nohw_cbc_encrypt,.-.L_aes_nohw_cbc_encrypt_begin
+.hidden	_x86_AES_set_encrypt_key
+.type	_x86_AES_set_encrypt_key,@function
+.align	16
+_x86_AES_set_encrypt_key:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%esi
+	movl	32(%esp),%edi
+	testl	$-1,%esi
+	jz	.L040badpointer
+	testl	$-1,%edi
+	jz	.L040badpointer
+	call	.L041pic_point
+.L041pic_point:
+	popl	%ebp
+	leal	.LAES_Te-.L041pic_point(%ebp),%ebp
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	movl	28(%esp),%ecx
+	cmpl	$128,%ecx
+	je	.L04210rounds
+	cmpl	$192,%ecx
+	je	.L04312rounds
+	cmpl	$256,%ecx
+	je	.L04414rounds
+	movl	$-2,%eax
+	jmp	.L045exit
+.L04210rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04610shortcut
+.align	4
+.L04710loop:
+	movl	(%edi),%eax
+	movl	12(%edi),%edx
+.L04610shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,16(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,20(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,24(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,28(%edi)
+	incl	%ecx
+	addl	$16,%edi
+	cmpl	$10,%ecx
+	jl	.L04710loop
+	movl	$10,80(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04312rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%ecx
+	movl	20(%esi),%edx
+	movl	%ecx,16(%edi)
+	movl	%edx,20(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04812shortcut
+.align	4
+.L04912loop:
+	movl	(%edi),%eax
+	movl	20(%edi),%edx
+.L04812shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,24(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,28(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,32(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,36(%edi)
+	cmpl	$7,%ecx
+	je	.L05012break
+	incl	%ecx
+	xorl	16(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,44(%edi)
+	addl	$24,%edi
+	jmp	.L04912loop
+.L05012break:
+	movl	$12,72(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04414rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	movl	%eax,16(%edi)
+	movl	%ebx,20(%edi)
+	movl	%ecx,24(%edi)
+	movl	%edx,28(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L05114shortcut
+.align	4
+.L05214loop:
+	movl	28(%edi),%edx
+.L05114shortcut:
+	movl	(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,32(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,36(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,44(%edi)
+	cmpl	$6,%ecx
+	je	.L05314break
+	incl	%ecx
+	movl	%eax,%edx
+	movl	16(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movl	%eax,48(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,52(%edi)
+	xorl	24(%edi),%eax
+	movl	%eax,56(%edi)
+	xorl	28(%edi),%eax
+	movl	%eax,60(%edi)
+	addl	$32,%edi
+	jmp	.L05214loop
+.L05314break:
+	movl	$14,48(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L040badpointer:
+	movl	$-1,%eax
+.L045exit:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl	aes_nohw_set_encrypt_key
+.hidden	aes_nohw_set_encrypt_key
+.type	aes_nohw_set_encrypt_key,@function
+.align	16
+aes_nohw_set_encrypt_key:
+.L_aes_nohw_set_encrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	ret
+.size	aes_nohw_set_encrypt_key,.-.L_aes_nohw_set_encrypt_key_begin
+.globl	aes_nohw_set_decrypt_key
+.hidden	aes_nohw_set_decrypt_key
+.type	aes_nohw_set_decrypt_key,@function
+.align	16
+aes_nohw_set_decrypt_key:
+.L_aes_nohw_set_decrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	cmpl	$0,%eax
+	je	.L054proceed
+	ret
+.L054proceed:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%esi
+	movl	240(%esi),%ecx
+	leal	(,%ecx,4),%ecx
+	leal	(%esi,%ecx,4),%edi
+.align	4
+.L055invert:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,(%esi)
+	movl	%edx,4(%esi)
+	movl	8(%esi),%eax
+	movl	12(%esi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,8(%edi)
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	addl	$16,%esi
+	subl	$16,%edi
+	cmpl	%edi,%esi
+	jne	.L055invert
+	movl	28(%esp),%edi
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,28(%esp)
+	movl	16(%edi),%eax
+.align	4
+.L056permute:
+	addl	$16,%edi
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%esi,%ebx
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%eax,%ebx
+	xorl	%esi,%ecx
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%eax,%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	roll	$8,%eax
+	xorl	%esi,%edx
+	movl	4(%edi),%ebp
+	xorl	%ebx,%eax
+	xorl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$24,%ebx
+	xorl	%edx,%ecx
+	xorl	%edx,%eax
+	roll	$16,%ecx
+	xorl	%ebx,%eax
+	roll	$8,%edx
+	xorl	%ecx,%eax
+	movl	%ebp,%ebx
+	xorl	%edx,%eax
+	movl	%eax,(%edi)
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%ebx,%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	roll	$8,%ebx
+	xorl	%esi,%eax
+	movl	8(%edi),%ebp
+	xorl	%ecx,%ebx
+	xorl	%eax,%ecx
+	xorl	%edx,%ebx
+	roll	$24,%ecx
+	xorl	%eax,%edx
+	xorl	%eax,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%eax
+	xorl	%edx,%ebx
+	movl	%ebp,%ecx
+	xorl	%eax,%ebx
+	movl	%ebx,4(%edi)
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%esi,%edx
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%ecx,%edx
+	xorl	%esi,%eax
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%ecx,%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	roll	$8,%ecx
+	xorl	%esi,%ebx
+	movl	12(%edi),%ebp
+	xorl	%edx,%ecx
+	xorl	%ebx,%edx
+	xorl	%eax,%ecx
+	roll	$24,%edx
+	xorl	%ebx,%eax
+	xorl	%ebx,%ecx
+	roll	$16,%eax
+	xorl	%edx,%ecx
+	roll	$8,%ebx
+	xorl	%eax,%ecx
+	movl	%ebp,%edx
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%edi)
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%esi,%eax
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%edx,%eax
+	xorl	%esi,%ebx
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%edx,%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	roll	$8,%edx
+	xorl	%esi,%ecx
+	movl	16(%edi),%ebp
+	xorl	%eax,%edx
+	xorl	%ecx,%eax
+	xorl	%ebx,%edx
+	roll	$24,%eax
+	xorl	%ecx,%ebx
+	xorl	%ecx,%edx
+	roll	$16,%ebx
+	xorl	%eax,%edx
+	roll	$8,%ecx
+	xorl	%ebx,%edx
+	movl	%ebp,%eax
+	xorl	%ecx,%edx
+	movl	%edx,12(%edi)
+	cmpl	28(%esp),%edi
+	jb	.L056permute
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aes_nohw_set_decrypt_key,.-.L_aes_nohw_set_decrypt_key_begin
+.byte	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+#endif
+.section	.note.GNU-stack,"",@progbits
diff --git a/linux-x86_64/crypto/fipsmodule/aes-x86_64.S b/linux-x86_64/crypto/fipsmodule/aes-x86_64.S
new file mode 100644
index 0000000..47a69ec
--- /dev/null
+++ b/linux-x86_64/crypto/fipsmodule/aes-x86_64.S
@@ -0,0 +1,2665 @@
+# This file is generated from a similarly-named Perl script in the BoringSSL
+# source tree. Do not edit by hand.
+
+#if defined(__has_feature)
+#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
+#define OPENSSL_NO_ASM
+#endif
+#endif
+
+#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+.text	
+.type	_x86_64_AES_encrypt,@function
+.align	16
+_x86_64_AES_encrypt:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+
+	movl	240(%r15),%r13d
+	subl	$1,%r13d
+	jmp	.Lenc_loop
+.align	16
+.Lenc_loop:
+
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movl	0(%r14,%rsi,8),%r10d
+	movl	0(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r12d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movzbl	%dl,%ebp
+	xorl	3(%r14,%rsi,8),%r10d
+	xorl	3(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r8d
+
+	movzbl	%dh,%esi
+	shrl	$16,%ecx
+	movzbl	%ah,%ebp
+	xorl	3(%r14,%rsi,8),%r12d
+	shrl	$16,%edx
+	xorl	3(%r14,%rbp,8),%r8d
+
+	shrl	$16,%ebx
+	leaq	16(%r15),%r15
+	shrl	$16,%eax
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	xorl	2(%r14,%rsi,8),%r10d
+	xorl	2(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movzbl	%bl,%ebp
+	xorl	1(%r14,%rsi,8),%r10d
+	xorl	1(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r8d
+
+	movl	12(%r15),%edx
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movl	0(%r15),%eax
+	xorl	1(%r14,%rdi,8),%r12d
+	xorl	1(%r14,%rbp,8),%r8d
+
+	movl	4(%r15),%ebx
+	movl	8(%r15),%ecx
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	subl	$1,%r13d
+	jnz	.Lenc_loop
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movzbl	2(%r14,%rsi,8),%r10d
+	movzbl	2(%r14,%rdi,8),%r11d
+	movzbl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%dl,%esi
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movzbl	2(%r14,%rsi,8),%r8d
+	movl	0(%r14,%rdi,8),%edi
+	movl	0(%r14,%rbp,8),%ebp
+
+	andl	$0x0000ff00,%edi
+	andl	$0x0000ff00,%ebp
+
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+	shrl	$16,%ecx
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	shrl	$16,%edx
+	movl	0(%r14,%rsi,8),%esi
+	movl	0(%r14,%rdi,8),%edi
+
+	andl	$0x0000ff00,%esi
+	andl	$0x0000ff00,%edi
+	shrl	$16,%ebx
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+	shrl	$16,%eax
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	movl	0(%r14,%rsi,8),%esi
+	movl	0(%r14,%rdi,8),%edi
+	movl	0(%r14,%rbp,8),%ebp
+
+	andl	$0x00ff0000,%esi
+	andl	$0x00ff0000,%edi
+	andl	$0x00ff0000,%ebp
+
+	xorl	%esi,%r10d
+	xorl	%edi,%r11d
+	xorl	%ebp,%r12d
+
+	movzbl	%bl,%esi
+	movzbl	%dh,%edi
+	movzbl	%ah,%ebp
+	movl	0(%r14,%rsi,8),%esi
+	movl	2(%r14,%rdi,8),%edi
+	movl	2(%r14,%rbp,8),%ebp
+
+	andl	$0x00ff0000,%esi
+	andl	$0xff000000,%edi
+	andl	$0xff000000,%ebp
+
+	xorl	%esi,%r8d
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movl	16+12(%r15),%edx
+	movl	2(%r14,%rsi,8),%esi
+	movl	2(%r14,%rdi,8),%edi
+	movl	16+0(%r15),%eax
+
+	andl	$0xff000000,%esi
+	andl	$0xff000000,%edi
+
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+
+	movl	16+4(%r15),%ebx
+	movl	16+8(%r15),%ecx
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+.byte	0xf3,0xc3
+.size	_x86_64_AES_encrypt,.-_x86_64_AES_encrypt
+.type	_x86_64_AES_encrypt_compact,@function
+.align	16
+_x86_64_AES_encrypt_compact:
+.cfi_startproc	
+	leaq	128(%r14),%r8
+	movl	0-128(%r8),%edi
+	movl	32-128(%r8),%ebp
+	movl	64-128(%r8),%r10d
+	movl	96-128(%r8),%r11d
+	movl	128-128(%r8),%edi
+	movl	160-128(%r8),%ebp
+	movl	192-128(%r8),%r10d
+	movl	224-128(%r8),%r11d
+	jmp	.Lenc_loop_compact
+.align	16
+.Lenc_loop_compact:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+	leaq	16(%r15),%r15
+	movzbl	%al,%r10d
+	movzbl	%bl,%r11d
+	movzbl	%cl,%r12d
+	movzbl	%dl,%r8d
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	shrl	$16,%ecx
+	movzbl	%dh,%ebp
+	movzbl	(%r14,%r10,1),%r10d
+	movzbl	(%r14,%r11,1),%r11d
+	movzbl	(%r14,%r12,1),%r12d
+	movzbl	(%r14,%r8,1),%r8d
+
+	movzbl	(%r14,%rsi,1),%r9d
+	movzbl	%ah,%esi
+	movzbl	(%r14,%rdi,1),%r13d
+	movzbl	%cl,%edi
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+
+	shll	$8,%r9d
+	shrl	$16,%edx
+	shll	$8,%r13d
+	xorl	%r9d,%r10d
+	shrl	$16,%eax
+	movzbl	%dl,%r9d
+	shrl	$16,%ebx
+	xorl	%r13d,%r11d
+	shll	$8,%ebp
+	movzbl	%al,%r13d
+	movzbl	(%r14,%rdi,1),%edi
+	xorl	%ebp,%r12d
+
+	shll	$8,%esi
+	movzbl	%bl,%ebp
+	shll	$16,%edi
+	xorl	%esi,%r8d
+	movzbl	(%r14,%r9,1),%r9d
+	movzbl	%dh,%esi
+	movzbl	(%r14,%r13,1),%r13d
+	xorl	%edi,%r10d
+
+	shrl	$8,%ecx
+	movzbl	%ah,%edi
+	shll	$16,%r9d
+	shrl	$8,%ebx
+	shll	$16,%r13d
+	xorl	%r9d,%r11d
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rcx,1),%edx
+	movzbl	(%r14,%rbx,1),%ecx
+
+	shll	$16,%ebp
+	xorl	%r13d,%r12d
+	shll	$24,%esi
+	xorl	%ebp,%r8d
+	shll	$24,%edi
+	xorl	%esi,%r10d
+	shll	$24,%edx
+	xorl	%edi,%r11d
+	shll	$24,%ecx
+	movl	%r10d,%eax
+	movl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	cmpq	16(%rsp),%r15
+	je	.Lenc_compact_done
+	movl	$0x80808080,%r10d
+	movl	$0x80808080,%r11d
+	andl	%eax,%r10d
+	andl	%ebx,%r11d
+	movl	%r10d,%esi
+	movl	%r11d,%edi
+	shrl	$7,%r10d
+	leal	(%rax,%rax,1),%r8d
+	shrl	$7,%r11d
+	leal	(%rbx,%rbx,1),%r9d
+	subl	%r10d,%esi
+	subl	%r11d,%edi
+	andl	$0xfefefefe,%r8d
+	andl	$0xfefefefe,%r9d
+	andl	$0x1b1b1b1b,%esi
+	andl	$0x1b1b1b1b,%edi
+	movl	%eax,%r10d
+	movl	%ebx,%r11d
+	xorl	%esi,%r8d
+	xorl	%edi,%r9d
+
+	xorl	%r8d,%eax
+	xorl	%r9d,%ebx
+	movl	$0x80808080,%r12d
+	roll	$24,%eax
+	movl	$0x80808080,%ebp
+	roll	$24,%ebx
+	andl	%ecx,%r12d
+	andl	%edx,%ebp
+	xorl	%r8d,%eax
+	xorl	%r9d,%ebx
+	movl	%r12d,%esi
+	rorl	$16,%r10d
+	movl	%ebp,%edi
+	rorl	$16,%r11d
+	leal	(%rcx,%rcx,1),%r8d
+	shrl	$7,%r12d
+	xorl	%r10d,%eax
+	shrl	$7,%ebp
+	xorl	%r11d,%ebx
+	rorl	$8,%r10d
+	leal	(%rdx,%rdx,1),%r9d
+	rorl	$8,%r11d
+	subl	%r12d,%esi
+	subl	%ebp,%edi
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+
+	andl	$0xfefefefe,%r8d
+	andl	$0xfefefefe,%r9d
+	andl	$0x1b1b1b1b,%esi
+	andl	$0x1b1b1b1b,%edi
+	movl	%ecx,%r12d
+	movl	%edx,%ebp
+	xorl	%esi,%r8d
+	xorl	%edi,%r9d
+
+	rorl	$16,%r12d
+	xorl	%r8d,%ecx
+	rorl	$16,%ebp
+	xorl	%r9d,%edx
+	roll	$24,%ecx
+	movl	0(%r14),%esi
+	roll	$24,%edx
+	xorl	%r8d,%ecx
+	movl	64(%r14),%edi
+	xorl	%r9d,%edx
+	movl	128(%r14),%r8d
+	xorl	%r12d,%ecx
+	rorl	$8,%r12d
+	xorl	%ebp,%edx
+	rorl	$8,%ebp
+	xorl	%r12d,%ecx
+	movl	192(%r14),%r9d
+	xorl	%ebp,%edx
+	jmp	.Lenc_loop_compact
+.align	16
+.Lenc_compact_done:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+.byte	0xf3,0xc3
+.cfi_endproc	
+.size	_x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
+.align	16
+.globl	aes_nohw_encrypt
+.hidden aes_nohw_encrypt
+.type	aes_nohw_encrypt,@function
+.hidden	aes_nohw_encrypt
+aes_nohw_encrypt:
+.cfi_startproc	
+	movq	%rsp,%rax
+.cfi_def_cfa_register	%rax
+	pushq	%rbx
+.cfi_offset	%rbx,-16
+	pushq	%rbp
+.cfi_offset	%rbp,-24
+	pushq	%r12
+.cfi_offset	%r12,-32
+	pushq	%r13
+.cfi_offset	%r13,-40
+	pushq	%r14
+.cfi_offset	%r14,-48
+	pushq	%r15
+.cfi_offset	%r15,-56
+
+
+	leaq	-63(%rdx),%rcx
+	andq	$-64,%rsp
+	subq	%rsp,%rcx
+	negq	%rcx
+	andq	$0x3c0,%rcx
+	subq	%rcx,%rsp
+	subq	$32,%rsp
+
+	movq	%rsi,16(%rsp)
+	movq	%rax,24(%rsp)
+.cfi_escape	0x0f,0x05,0x77,0x18,0x06,0x23,0x08
+.Lenc_prologue:
+
+	movq	%rdx,%r15
+	movl	240(%r15),%r13d
+
+	movl	0(%rdi),%eax
+	movl	4(%rdi),%ebx
+	movl	8(%rdi),%ecx
+	movl	12(%rdi),%edx
+
+	shll	$4,%r13d
+	leaq	(%r15,%r13,1),%rbp
+	movq	%r15,(%rsp)
+	movq	%rbp,8(%rsp)
+
+
+	leaq	.LAES_Te+2048(%rip),%r14
+	leaq	768(%rsp),%rbp
+	subq	%r14,%rbp
+	andq	$0x300,%rbp
+	leaq	(%r14,%rbp,1),%r14
+
+	call	_x86_64_AES_encrypt_compact
+
+	movq	16(%rsp),%r9
+	movq	24(%rsp),%rsi
+.cfi_def_cfa	%rsi,8
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	movq	-48(%rsi),%r15
+.cfi_restore	%r15
+	movq	-40(%rsi),%r14
+.cfi_restore	%r14
+	movq	-32(%rsi),%r13
+.cfi_restore	%r13
+	movq	-24(%rsi),%r12
+.cfi_restore	%r12
+	movq	-16(%rsi),%rbp
+.cfi_restore	%rbp
+	movq	-8(%rsi),%rbx
+.cfi_restore	%rbx
+	leaq	(%rsi),%rsp
+.cfi_def_cfa_register	%rsp
+.Lenc_epilogue:
+	.byte	0xf3,0xc3
+.cfi_endproc	
+.size	aes_nohw_encrypt,.-aes_nohw_encrypt
+.type	_x86_64_AES_decrypt,@function
+.align	16
+_x86_64_AES_decrypt:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+
+	movl	240(%r15),%r13d
+	subl	$1,%r13d
+	jmp	.Ldec_loop
+.align	16
+.Ldec_loop:
+
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movl	0(%r14,%rsi,8),%r10d
+	movl	0(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r12d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movzbl	%dl,%ebp
+	xorl	3(%r14,%rsi,8),%r10d
+	xorl	3(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r8d
+
+	movzbl	%bh,%esi
+	shrl	$16,%eax
+	movzbl	%ch,%ebp
+	xorl	3(%r14,%rsi,8),%r12d
+	shrl	$16,%edx
+	xorl	3(%r14,%rbp,8),%r8d
+
+	shrl	$16,%ebx
+	leaq	16(%r15),%r15
+	shrl	$16,%ecx
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	xorl	2(%r14,%rsi,8),%r10d
+	xorl	2(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movzbl	%bl,%ebp
+	xorl	1(%r14,%rsi,8),%r10d
+	xorl	1(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r8d
+
+	movzbl	%dh,%esi
+	movl	12(%r15),%edx
+	movzbl	%ah,%ebp
+	xorl	1(%r14,%rsi,8),%r12d
+	movl	0(%r15),%eax
+	xorl	1(%r14,%rbp,8),%r8d
+
+	xorl	%r10d,%eax
+	movl	4(%r15),%ebx
+	movl	8(%r15),%ecx
+	xorl	%r12d,%ecx
+	xorl	%r11d,%ebx
+	xorl	%r8d,%edx
+	subl	$1,%r13d
+	jnz	.Ldec_loop
+	leaq	2048(%r14),%r14
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movzbl	(%r14,%rsi,1),%r10d
+	movzbl	(%r14,%rdi,1),%r11d
+	movzbl	(%r14,%rbp,1),%r12d
+
+	movzbl	%dl,%esi
+	movzbl	%dh,%edi
+	movzbl	%ah,%ebp
+	movzbl	(%r14,%rsi,1),%r8d
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$8,%edi
+	shll	$8,%ebp
+
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+	shrl	$16,%edx
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	shrl	$16,%eax
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+
+	shll	$8,%esi
+	shll	$8,%edi
+	shrl	$16,%ebx
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+	shrl	$16,%ecx
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$16,%esi
+	shll	$16,%edi
+	shll	$16,%ebp
+
+	xorl	%esi,%r10d
+	xorl	%edi,%r11d
+	xorl	%ebp,%r12d
+
+	movzbl	%bl,%esi
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$16,%esi
+	shll	$24,%edi
+	shll	$24,%ebp
+
+	xorl	%esi,%r8d
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movl	16+12(%r15),%edx
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movl	16+0(%r15),%eax
+
+	shll	$24,%esi
+	shll	$24,%edi
+
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+
+	movl	16+4(%r15),%ebx
+	movl	16+8(%r15),%ecx
+	leaq	-2048(%r14),%r14
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+.byte	0xf3,0xc3
+.size	_x86_64_AES_decrypt,.-_x86_64_AES_decrypt
+.type	_x86_64_AES_decrypt_compact,@function
+.align	16
+_x86_64_AES_decrypt_compact:
+.cfi_startproc	
+	leaq	128(%r14),%r8
+	movl	0-128(%r8),%edi
+	movl	32-128(%r8),%ebp
+	movl	64-128(%r8),%r10d
+	movl	96-128(%r8),%r11d
+	movl	128-128(%r8),%edi
+	movl	160-128(%r8),%ebp
+	movl	192-128(%r8),%r10d
+	movl	224-128(%r8),%r11d
+	jmp	.Ldec_loop_compact
+
+.align	16
+.Ldec_loop_compact:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+	leaq	16(%r15),%r15
+	movzbl	%al,%r10d
+	movzbl	%bl,%r11d
+	movzbl	%cl,%r12d
+	movzbl	%dl,%r8d
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	shrl	$16,%edx
+	movzbl	%bh,%ebp
+	movzbl	(%r14,%r10,1),%r10d
+	movzbl	(%r14,%r11,1),%r11d
+	movzbl	(%r14,%r12,1),%r12d
+	movzbl	(%r14,%r8,1),%r8d
+
+	movzbl	(%r14,%rsi,1),%r9d
+	movzbl	%ch,%esi
+	movzbl	(%r14,%rdi,1),%r13d
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+
+	shrl	$16,%ecx
+	shll	$8,%r13d
+	shll	$8,%r9d
+	movzbl	%cl,%edi
+	shrl	$16,%eax
+	xorl	%r9d,%r10d
+	shrl	$16,%ebx
+	movzbl	%dl,%r9d
+
+	shll	$8,%ebp
+	xorl	%r13d,%r11d
+	shll	$8,%esi
+	movzbl	%al,%r13d
+	movzbl	(%r14,%rdi,1),%edi
+	xorl	%ebp,%r12d
+	movzbl	%bl,%ebp
+
+	shll	$16,%edi
+	xorl	%esi,%r8d
+	movzbl	(%r14,%r9,1),%r9d
+	movzbl	%bh,%esi
+	movzbl	(%r14,%rbp,1),%ebp
+	xorl	%edi,%r10d
+	movzbl	(%r14,%r13,1),%r13d
+	movzbl	%ch,%edi
+
+	shll	$16,%ebp
+	shll	$16,%r9d
+	shll	$16,%r13d
+	xorl	%ebp,%r8d
+	movzbl	%dh,%ebp
+	xorl	%r9d,%r11d
+	shrl	$8,%eax
+	xorl	%r13d,%r12d
+
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%ebx
+	movzbl	(%r14,%rbp,1),%ecx
+	movzbl	(%r14,%rax,1),%edx
+
+	movl	%r10d,%eax
+	shll	$24,%esi
+	shll	$24,%ebx
+	shll	$24,%ecx
+	xorl	%esi,%eax
+	shll	$24,%edx
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	cmpq	16(%rsp),%r15
+	je	.Ldec_compact_done
+
+	movq	256+0(%r14),%rsi
+	shlq	$32,%rbx
+	shlq	$32,%rdx
+	movq	256+8(%r14),%rdi
+	orq	%rbx,%rax
+	orq	%rdx,%rcx
+	movq	256+16(%r14),%rbp
+	movq	%rsi,%r9
+	movq	%rsi,%r12
+	andq	%rax,%r9
+	andq	%rcx,%r12
+	movq	%r9,%rbx
+	movq	%r12,%rdx
+	shrq	$7,%r9
+	leaq	(%rax,%rax,1),%r8
+	shrq	$7,%r12
+	leaq	(%rcx,%rcx,1),%r11
+	subq	%r9,%rbx
+	subq	%r12,%rdx
+	andq	%rdi,%r8
+	andq	%rdi,%r11
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r8
+	xorq	%rdx,%r11
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r8,%r10
+	andq	%r11,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	leaq	(%r8,%r8,1),%r9
+	shrq	$7,%r13
+	leaq	(%r11,%r11,1),%r12
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	andq	%rdi,%r9
+	andq	%rdi,%r12
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r9
+	xorq	%rdx,%r12
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r9,%r10
+	andq	%r12,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	xorq	%rax,%r8
+	shrq	$7,%r13
+	xorq	%rcx,%r11
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	leaq	(%r9,%r9,1),%r10
+	leaq	(%r12,%r12,1),%r13
+	xorq	%rax,%r9
+	xorq	%rcx,%r12
+	andq	%rdi,%r10
+	andq	%rdi,%r13
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r10
+	xorq	%rdx,%r13
+
+	xorq	%r10,%rax
+	xorq	%r13,%rcx
+	xorq	%r10,%r8
+	xorq	%r13,%r11
+	movq	%rax,%rbx
+	movq	%rcx,%rdx
+	xorq	%r10,%r9
+	shrq	$32,%rbx
+	xorq	%r13,%r12
+	shrq	$32,%rdx
+	xorq	%r8,%r10
+	roll	$8,%eax
+	xorq	%r11,%r13
+	roll	$8,%ecx
+	xorq	%r9,%r10
+	roll	$8,%ebx
+	xorq	%r12,%r13
+
+	roll	$8,%edx
+	xorl	%r10d,%eax
+	shrq	$32,%r10
+	xorl	%r13d,%ecx
+	shrq	$32,%r13
+	xorl	%r10d,%ebx
+	xorl	%r13d,%edx
+
+	movq	%r8,%r10
+	roll	$24,%r8d
+	movq	%r11,%r13
+	roll	$24,%r11d
+	shrq	$32,%r10
+	xorl	%r8d,%eax
+	shrq	$32,%r13
+	xorl	%r11d,%ecx
+	roll	$24,%r10d
+	movq	%r9,%r8
+	roll	$24,%r13d
+	movq	%r12,%r11
+	shrq	$32,%r8
+	xorl	%r10d,%ebx
+	shrq	$32,%r11
+	xorl	%r13d,%edx
+
+	movq	0(%r14),%rsi
+	roll	$16,%r9d
+	movq	64(%r14),%rdi
+	roll	$16,%r12d
+	movq	128(%r14),%rbp
+	roll	$16,%r8d
+	movq	192(%r14),%r10
+	xorl	%r9d,%eax
+	roll	$16,%r11d
+	xorl	%r12d,%ecx
+	movq	256(%r14),%r13
+	xorl	%r8d,%ebx
+	xorl	%r11d,%edx
+	jmp	.Ldec_loop_compact
+.align	16
+.Ldec_compact_done:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+.byte	0xf3,0xc3
+.cfi_endproc	
+.size	_x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
+.align	16
+.globl	aes_nohw_decrypt
+.hidden aes_nohw_decrypt
+.type	aes_nohw_decrypt,@function
+.hidden	aes_nohw_decrypt
+aes_nohw_decrypt:
+.cfi_startproc	
+	movq	%rsp,%rax
+.cfi_def_cfa_register	%rax
+	pushq	%rbx
+.cfi_offset	%rbx,-16
+	pushq	%rbp
+.cfi_offset	%rbp,-24
+	pushq	%r12
+.cfi_offset	%r12,-32
+	pushq	%r13
+.cfi_offset	%r13,-40
+	pushq	%r14
+.cfi_offset	%r14,-48
+	pushq	%r15
+.cfi_offset	%r15,-56
+
+
+	leaq	-63(%rdx),%rcx
+	andq	$-64,%rsp
+	subq	%rsp,%rcx
+	negq	%rcx
+	andq	$0x3c0,%rcx
+	subq	%rcx,%rsp
+	subq	$32,%rsp
+
+	movq	%rsi,16(%rsp)
+	movq	%rax,24(%rsp)
+.cfi_escape	0x0f,0x05,0x77,0x18,0x06,0x23,0x08
+.Ldec_prologue:
+
+	movq	%rdx,%r15
+	movl	240(%r15),%r13d
+
+	movl	0(%rdi),%eax
+	movl	4(%rdi),%ebx
+	movl	8(%rdi),%ecx
+	movl	12(%rdi),%edx
+
+	shll	$4,%r13d
+	leaq	(%r15,%r13,1),%rbp
+	movq	%r15,(%rsp)
+	movq	%rbp,8(%rsp)
+
+
+	leaq	.LAES_Td+2048(%rip),%r14
+	leaq	768(%rsp),%rbp
+	subq	%r14,%rbp
+	andq	$0x300,%rbp
+	leaq	(%r14,%rbp,1),%r14
+	shrq	$3,%rbp
+	addq	%rbp,%r14
+
+	call	_x86_64_AES_decrypt_compact
+
+	movq	16(%rsp),%r9
+	movq	24(%rsp),%rsi
+.cfi_def_cfa	%rsi,8
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	movq	-48(%rsi),%r15
+.cfi_restore	%r15
+	movq	-40(%rsi),%r14
+.cfi_restore	%r14
+	movq	-32(%rsi),%r13
+.cfi_restore	%r13
+	movq	-24(%rsi),%r12
+.cfi_restore	%r12
+	movq	-16(%rsi),%rbp
+.cfi_restore	%rbp
+	movq	-8(%rsi),%rbx
+.cfi_restore	%rbx
+	leaq	(%rsi),%rsp
+.cfi_def_cfa_register	%rsp
+.Ldec_epilogue:
+	.byte	0xf3,0xc3
+.cfi_endproc	
+.size	aes_nohw_decrypt,.-aes_nohw_decrypt
+.align	16
+.globl	aes_nohw_set_encrypt_key
+.hidden aes_nohw_set_encrypt_key
+.type	aes_nohw_set_encrypt_key,@function
+aes_nohw_set_encrypt_key:
+.cfi_startproc	
+	pushq	%rbx
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbx,-16
+	pushq	%rbp
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbp,-24
+	pushq	%r12
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r12,-32
+	pushq	%r13
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r13,-40
+	pushq	%r14
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r14,-48
+	pushq	%r15
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r15,-56
+	subq	$8,%rsp
+.cfi_adjust_cfa_offset	8
+.Lenc_key_prologue:
+
+	call	_x86_64_AES_set_encrypt_key
+
+	movq	40(%rsp),%rbp
+.cfi_restore	%rbp
+	movq	48(%rsp),%rbx
+.cfi_restore	%rbx
+	addq	$56,%rsp
+.cfi_adjust_cfa_offset	-56
+.Lenc_key_epilogue:
+	.byte	0xf3,0xc3
+.cfi_endproc	
+.size	aes_nohw_set_encrypt_key,.-aes_nohw_set_encrypt_key
+
+.type	_x86_64_AES_set_encrypt_key,@function
+.align	16
+_x86_64_AES_set_encrypt_key:
+.cfi_startproc	
+	movl	%esi,%ecx
+	movq	%rdi,%rsi
+	movq	%rdx,%rdi
+
+	testq	$-1,%rsi
+	jz	.Lbadpointer
+	testq	$-1,%rdi
+	jz	.Lbadpointer
+
+	leaq	.LAES_Te(%rip),%rbp
+	leaq	2048+128(%rbp),%rbp
+
+
+	movl	0-128(%rbp),%eax
+	movl	32-128(%rbp),%ebx
+	movl	64-128(%rbp),%r8d
+	movl	96-128(%rbp),%edx
+	movl	128-128(%rbp),%eax
+	movl	160-128(%rbp),%ebx
+	movl	192-128(%rbp),%r8d
+	movl	224-128(%rbp),%edx
+
+	cmpl	$128,%ecx
+	je	.L10rounds
+	cmpl	$192,%ecx
+	je	.L12rounds
+	cmpl	$256,%ecx
+	je	.L14rounds
+	movq	$-2,%rax
+	jmp	.Lexit
+
+.L10rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rdx,8(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	.L10shortcut
+.align	4
+.L10loop:
+	movl	0(%rdi),%eax
+	movl	12(%rdi),%edx
+.L10shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,16(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,20(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,24(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,28(%rdi)
+	addl	$1,%ecx
+	leaq	16(%rdi),%rdi
+	cmpl	$10,%ecx
+	jl	.L10loop
+
+	movl	$10,80(%rdi)
+	xorq	%rax,%rax
+	jmp	.Lexit
+
+.L12rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	16(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rdx,16(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	.L12shortcut
+.align	4
+.L12loop:
+	movl	0(%rdi),%eax
+	movl	20(%rdi),%edx
+.L12shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,24(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,28(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,32(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,36(%rdi)
+
+	cmpl	$7,%ecx
+	je	.L12break
+	addl	$1,%ecx
+
+	xorl	16(%rdi),%eax
+	movl	%eax,40(%rdi)
+	xorl	20(%rdi),%eax
+	movl	%eax,44(%rdi)
+
+	leaq	24(%rdi),%rdi
+	jmp	.L12loop
+.L12break:
+	movl	$12,72(%rdi)
+	xorq	%rax,%rax
+	jmp	.Lexit
+
+.L14rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	16(%rsi),%rcx
+	movq	24(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rcx,16(%rdi)
+	movq	%rdx,24(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	.L14shortcut
+.align	4
+.L14loop:
+	movl	0(%rdi),%eax
+	movl	28(%rdi),%edx
+.L14shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,32(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,36(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,40(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,44(%rdi)
+
+	cmpl	$6,%ecx
+	je	.L14break
+	addl	$1,%ecx
+
+	movl	%eax,%edx
+	movl	16(%rdi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movl	%eax,48(%rdi)
+	xorl	20(%rdi),%eax
+	movl	%eax,52(%rdi)
+	xorl	24(%rdi),%eax
+	movl	%eax,56(%rdi)
+	xorl	28(%rdi),%eax
+	movl	%eax,60(%rdi)
+
+	leaq	32(%rdi),%rdi
+	jmp	.L14loop
+.L14break:
+	movl	$14,48(%rdi)
+	xorq	%rax,%rax
+	jmp	.Lexit
+
+.Lbadpointer:
+	movq	$-1,%rax
+.Lexit:
+.byte	0xf3,0xc3
+.cfi_endproc	
+.size	_x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
+.align	16
+.globl	aes_nohw_set_decrypt_key
+.hidden aes_nohw_set_decrypt_key
+.type	aes_nohw_set_decrypt_key,@function
+aes_nohw_set_decrypt_key:
+.cfi_startproc	
+	pushq	%rbx
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbx,-16
+	pushq	%rbp
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbp,-24
+	pushq	%r12
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r12,-32
+	pushq	%r13
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r13,-40
+	pushq	%r14
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r14,-48
+	pushq	%r15
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r15,-56
+	pushq	%rdx
+.cfi_adjust_cfa_offset	8
+.Ldec_key_prologue:
+
+	call	_x86_64_AES_set_encrypt_key
+	movq	(%rsp),%r8
+	cmpl	$0,%eax
+	jne	.Labort
+
+	movl	240(%r8),%r14d
+	xorq	%rdi,%rdi
+	leaq	(%rdi,%r14,4),%rcx
+	movq	%r8,%rsi
+	leaq	(%r8,%rcx,4),%rdi
+.align	4
+.Linvert:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	0(%rdi),%rcx
+	movq	8(%rdi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rcx,0(%rsi)
+	movq	%rdx,8(%rsi)
+	leaq	16(%rsi),%rsi
+	leaq	-16(%rdi),%rdi
+	cmpq	%rsi,%rdi
+	jne	.Linvert
+
+	leaq	.LAES_Te+2048+1024(%rip),%rax
+
+	movq	40(%rax),%rsi
+	movq	48(%rax),%rdi
+	movq	56(%rax),%rbp
+
+	movq	%r8,%r15
+	subl	$1,%r14d
+.align	4
+.Lpermute:
+	leaq	16(%r15),%r15
+	movq	0(%r15),%rax
+	movq	8(%r15),%rcx
+	movq	%rsi,%r9
+	movq	%rsi,%r12
+	andq	%rax,%r9
+	andq	%rcx,%r12
+	movq	%r9,%rbx
+	movq	%r12,%rdx
+	shrq	$7,%r9
+	leaq	(%rax,%rax,1),%r8
+	shrq	$7,%r12
+	leaq	(%rcx,%rcx,1),%r11
+	subq	%r9,%rbx
+	subq	%r12,%rdx
+	andq	%rdi,%r8
+	andq	%rdi,%r11
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r8
+	xorq	%rdx,%r11
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r8,%r10
+	andq	%r11,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	leaq	(%r8,%r8,1),%r9
+	shrq	$7,%r13
+	leaq	(%r11,%r11,1),%r12
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	andq	%rdi,%r9
+	andq	%rdi,%r12
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r9
+	xorq	%rdx,%r12
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r9,%r10
+	andq	%r12,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	xorq	%rax,%r8
+	shrq	$7,%r13
+	xorq	%rcx,%r11
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	leaq	(%r9,%r9,1),%r10
+	leaq	(%r12,%r12,1),%r13
+	xorq	%rax,%r9
+	xorq	%rcx,%r12
+	andq	%rdi,%r10
+	andq	%rdi,%r13
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r10
+	xorq	%rdx,%r13
+
+	xorq	%r10,%rax
+	xorq	%r13,%rcx
+	xorq	%r10,%r8
+	xorq	%r13,%r11
+	movq	%rax,%rbx
+	movq	%rcx,%rdx
+	xorq	%r10,%r9
+	shrq	$32,%rbx
+	xorq	%r13,%r12
+	shrq	$32,%rdx
+	xorq	%r8,%r10
+	roll	$8,%eax
+	xorq	%r11,%r13
+	roll	$8,%ecx
+	xorq	%r9,%r10
+	roll	$8,%ebx
+	xorq	%r12,%r13
+
+	roll	$8,%edx
+	xorl	%r10d,%eax
+	shrq	$32,%r10
+	xorl	%r13d,%ecx
+	shrq	$32,%r13
+	xorl	%r10d,%ebx
+	xorl	%r13d,%edx
+
+	movq	%r8,%r10
+	roll	$24,%r8d
+	movq	%r11,%r13
+	roll	$24,%r11d
+	shrq	$32,%r10
+	xorl	%r8d,%eax
+	shrq	$32,%r13
+	xorl	%r11d,%ecx
+	roll	$24,%r10d
+	movq	%r9,%r8
+	roll	$24,%r13d
+	movq	%r12,%r11
+	shrq	$32,%r8
+	xorl	%r10d,%ebx
+	shrq	$32,%r11
+	xorl	%r13d,%edx
+
+
+	roll	$16,%r9d
+
+	roll	$16,%r12d
+
+	roll	$16,%r8d
+
+	xorl	%r9d,%eax
+	roll	$16,%r11d
+	xorl	%r12d,%ecx
+
+	xorl	%r8d,%ebx
+	xorl	%r11d,%edx
+	movl	%eax,0(%r15)
+	movl	%ebx,4(%r15)
+	movl	%ecx,8(%r15)
+	movl	%edx,12(%r15)
+	subl	$1,%r14d
+	jnz	.Lpermute
+
+	xorq	%rax,%rax
+.Labort:
+	movq	8(%rsp),%r15
+.cfi_restore	%r15
+	movq	16(%rsp),%r14
+.cfi_restore	%r14
+	movq	24(%rsp),%r13
+.cfi_restore	%r13
+	movq	32(%rsp),%r12
+.cfi_restore	%r12
+	movq	40(%rsp),%rbp
+.cfi_restore	%rbp
+	movq	48(%rsp),%rbx
+.cfi_restore	%rbx
+	addq	$56,%rsp
+.cfi_adjust_cfa_offset	-56
+.Ldec_key_epilogue:
+	.byte	0xf3,0xc3
+.cfi_endproc	
+.size	aes_nohw_set_decrypt_key,.-aes_nohw_set_decrypt_key
+.align	16
+.globl	aes_nohw_cbc_encrypt
+.hidden aes_nohw_cbc_encrypt
+.type	aes_nohw_cbc_encrypt,@function
+.extern	OPENSSL_ia32cap_P
+.hidden OPENSSL_ia32cap_P
+.hidden	aes_nohw_cbc_encrypt
+aes_nohw_cbc_encrypt:
+.cfi_startproc	
+	cmpq	$0,%rdx
+	je	.Lcbc_epilogue
+	pushfq
+
+
+.cfi_adjust_cfa_offset	8
+	pushq	%rbx
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbx,-24
+	pushq	%rbp
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%rbp,-32
+	pushq	%r12
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r12,-40
+	pushq	%r13
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r13,-48
+	pushq	%r14
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r14,-56
+	pushq	%r15
+.cfi_adjust_cfa_offset	8
+.cfi_offset	%r15,-64
+.Lcbc_prologue:
+
+	cld
+	movl	%r9d,%r9d
+
+	leaq	.LAES_Te(%rip),%r14
+	leaq	.LAES_Td(%rip),%r10
+	cmpq	$0,%r9
+	cmoveq	%r10,%r14
+
+.cfi_remember_state	
+	leaq	OPENSSL_ia32cap_P(%rip),%r10
+	movl	(%r10),%r10d
+	cmpq	$512,%rdx
+	jb	.Lcbc_slow_prologue
+	testq	$15,%rdx
+	jnz	.Lcbc_slow_prologue
+	btl	$28,%r10d
+	jc	.Lcbc_slow_prologue
+
+
+	leaq	-88-248(%rsp),%r15
+	andq	$-64,%r15
+
+
+	movq	%r14,%r10
+	leaq	2304(%r14),%r11
+	movq	%r15,%r12
+	andq	$0xFFF,%r10
+	andq	$0xFFF,%r11
+	andq	$0xFFF,%r12
+
+	cmpq	%r11,%r12
+	jb	.Lcbc_te_break_out
+	subq	%r11,%r12
+	subq	%r12,%r15
+	jmp	.Lcbc_te_ok
+.Lcbc_te_break_out:
+	subq	%r10,%r12
+	andq	$0xFFF,%r12
+	addq	$320,%r12
+	subq	%r12,%r15
+.align	4
+.Lcbc_te_ok:
+
+	xchgq	%rsp,%r15
+.cfi_def_cfa_register	%r15
+
+	movq	%r15,16(%rsp)
+.cfi_escape	0x0f,0x05,0x77,0x10,0x06,0x23,0x40
+.Lcbc_fast_body:
+	movq	%rdi,24(%rsp)
+	movq	%rsi,32(%rsp)
+	movq	%rdx,40(%rsp)
+	movq	%rcx,48(%rsp)
+	movq	%r8,56(%rsp)
+	movl	$0,80+240(%rsp)
+	movq	%r8,%rbp
+	movq	%r9,%rbx
+	movq	%rsi,%r9
+	movq	%rdi,%r8
+	movq	%rcx,%r15
+
+	movl	240(%r15),%eax
+
+	movq	%r15,%r10
+	subq	%r14,%r10
+	andq	$0xfff,%r10
+	cmpq	$2304,%r10
+	jb	.Lcbc_do_ecopy
+	cmpq	$4096-248,%r10
+	jb	.Lcbc_skip_ecopy
+.align	4
+.Lcbc_do_ecopy:
+	movq	%r15,%rsi
+	leaq	80(%rsp),%rdi
+	leaq	80(%rsp),%r15
+	movl	$30,%ecx
+.long	0x90A548F3
+	movl	%eax,(%rdi)
+.Lcbc_skip_ecopy:
+	movq	%r15,0(%rsp)
+
+	movl	$18,%ecx
+.align	4
+.Lcbc_prefetch_te:
+	movq	0(%r14),%r10
+	movq	32(%r14),%r11
+	movq	64(%r14),%r12
+	movq	96(%r14),%r13
+	leaq	128(%r14),%r14
+	subl	$1,%ecx
+	jnz	.Lcbc_prefetch_te
+	leaq	-2304(%r14),%r14
+
+	cmpq	$0,%rbx
+	je	.LFAST_DECRYPT
+
+
+	movl	0(%rbp),%eax
+	movl	4(%rbp),%ebx
+	movl	8(%rbp),%ecx
+	movl	12(%rbp),%edx
+
+.align	4
+.Lcbc_fast_enc_loop:
+	xorl	0(%r8),%eax
+	xorl	4(%r8),%ebx
+	xorl	8(%r8),%ecx
+	xorl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_encrypt
+
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	subq	$16,%r10
+	testq	$-16,%r10
+	movq	%r10,40(%rsp)
+	jnz	.Lcbc_fast_enc_loop
+	movq	56(%rsp),%rbp
+	movl	%eax,0(%rbp)
+	movl	%ebx,4(%rbp)
+	movl	%ecx,8(%rbp)
+	movl	%edx,12(%rbp)
+
+	jmp	.Lcbc_fast_cleanup
+
+
+.align	16
+.LFAST_DECRYPT:
+	cmpq	%r8,%r9
+	je	.Lcbc_fast_dec_in_place
+
+	movq	%rbp,64(%rsp)
+.align	4
+.Lcbc_fast_dec_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_decrypt
+
+	movq	64(%rsp),%rbp
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	xorl	0(%rbp),%eax
+	xorl	4(%rbp),%ebx
+	xorl	8(%rbp),%ecx
+	xorl	12(%rbp),%edx
+	movq	%r8,%rbp
+
+	subq	$16,%r10
+	movq	%r10,40(%rsp)
+	movq	%rbp,64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	jnz	.Lcbc_fast_dec_loop
+	movq	56(%rsp),%r12
+	movq	0(%rbp),%r10
+	movq	8(%rbp),%r11
+	movq	%r10,0(%r12)
+	movq	%r11,8(%r12)
+	jmp	.Lcbc_fast_cleanup
+
+.align	16
+.Lcbc_fast_dec_in_place:
+	movq	0(%rbp),%r10
+	movq	8(%rbp),%r11
+	movq	%r10,0+64(%rsp)
+	movq	%r11,8+64(%rsp)
+.align	4
+.Lcbc_fast_dec_in_place_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_decrypt
+
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	xorl	0+64(%rsp),%eax
+	xorl	4+64(%rsp),%ebx
+	xorl	8+64(%rsp),%ecx
+	xorl	12+64(%rsp),%edx
+
+	movq	0(%r8),%r11
+	movq	8(%r8),%r12
+	subq	$16,%r10
+	jz	.Lcbc_fast_dec_in_place_done
+
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	movq	%r10,40(%rsp)
+	jmp	.Lcbc_fast_dec_in_place_loop
+.Lcbc_fast_dec_in_place_done:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+.align	4
+.Lcbc_fast_cleanup:
+	cmpl	$0,80+240(%rsp)
+	leaq	80(%rsp),%rdi
+	je	.Lcbc_exit
+	movl	$30,%ecx
+	xorq	%rax,%rax
+.long	0x90AB48F3
+
+	jmp	.Lcbc_exit
+
+
+.align	16
+.Lcbc_slow_prologue:
+.cfi_restore_state	
+
+	leaq	-88(%rsp),%rbp
+	andq	$-64,%rbp
+
+	leaq	-88-63(%rcx),%r10
+	subq	%rbp,%r10
+	negq	%r10
+	andq	$0x3c0,%r10
+	subq	%r10,%rbp
+
+	xchgq	%rsp,%rbp
+.cfi_def_cfa_register	%rbp
+
+	movq	%rbp,16(%rsp)
+.cfi_escape	0x0f,0x05,0x77,0x10,0x06,0x23,0x40
+.Lcbc_slow_body:
+
+
+
+
+	movq	%r8,56(%rsp)
+	movq	%r8,%rbp
+	movq	%r9,%rbx
+	movq	%rsi,%r9
+	movq	%rdi,%r8
+	movq	%rcx,%r15
+	movq	%rdx,%r10
+
+	movl	240(%r15),%eax
+	movq	%r15,0(%rsp)
+	shll	$4,%eax
+	leaq	(%r15,%rax,1),%rax
+	movq	%rax,8(%rsp)
+
+
+	leaq	2048(%r14),%r14
+	leaq	768-8(%rsp),%rax
+	subq	%r14,%rax
+	andq	$0x300,%rax
+	leaq	(%r14,%rax,1),%r14
+
+	cmpq	$0,%rbx
+	je	.LSLOW_DECRYPT
+
+
+	testq	$-16,%r10
+	movl	0(%rbp),%eax
+	movl	4(%rbp),%ebx
+	movl	8(%rbp),%ecx
+	movl	12(%rbp),%edx
+	jz	.Lcbc_slow_enc_tail
+
+.align	4
+.Lcbc_slow_enc_loop:
+	xorl	0(%r8),%eax
+	xorl	4(%r8),%ebx
+	xorl	8(%r8),%ecx
+	xorl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+	movq	%r9,32(%rsp)
+	movq	%r10,40(%rsp)
+
+	call	_x86_64_AES_encrypt_compact
+
+	movq	24(%rsp),%r8
+	movq	32(%rsp),%r9
+	movq	40(%rsp),%r10
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	subq	$16,%r10
+	testq	$-16,%r10
+	jnz	.Lcbc_slow_enc_loop
+	testq	$15,%r10
+	jnz	.Lcbc_slow_enc_tail
+	movq	56(%rsp),%rbp
+	movl	%eax,0(%rbp)
+	movl	%ebx,4(%rbp)
+	movl	%ecx,8(%rbp)
+	movl	%edx,12(%rbp)
+
+	jmp	.Lcbc_exit
+
+.align	4
+.Lcbc_slow_enc_tail:
+	movq	%rax,%r11
+	movq	%rcx,%r12
+	movq	%r10,%rcx
+	movq	%r8,%rsi
+	movq	%r9,%rdi
+.long	0x9066A4F3
+	movq	$16,%rcx
+	subq	%r10,%rcx
+	xorq	%rax,%rax
+.long	0x9066AAF3
+	movq	%r9,%r8
+	movq	$16,%r10
+	movq	%r11,%rax
+	movq	%r12,%rcx
+	jmp	.Lcbc_slow_enc_loop
+
+.align	16
+.LSLOW_DECRYPT:
+	shrq	$3,%rax
+	addq	%rax,%r14
+
+	movq	0(%rbp),%r11
+	movq	8(%rbp),%r12
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+.align	4
+.Lcbc_slow_dec_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+	movq	%r9,32(%rsp)
+	movq	%r10,40(%rsp)
+
+	call	_x86_64_AES_decrypt_compact
+
+	movq	24(%rsp),%r8
+	movq	32(%rsp),%r9
+	movq	40(%rsp),%r10
+	xorl	0+64(%rsp),%eax
+	xorl	4+64(%rsp),%ebx
+	xorl	8+64(%rsp),%ecx
+	xorl	12+64(%rsp),%edx
+
+	movq	0(%r8),%r11
+	movq	8(%r8),%r12
+	subq	$16,%r10
+	jc	.Lcbc_slow_dec_partial
+	jz	.Lcbc_slow_dec_done
+
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	jmp	.Lcbc_slow_dec_loop
+.Lcbc_slow_dec_done:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	jmp	.Lcbc_exit
+
+.align	4
+.Lcbc_slow_dec_partial:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0+64(%rsp)
+	movl	%ebx,4+64(%rsp)
+	movl	%ecx,8+64(%rsp)
+	movl	%edx,12+64(%rsp)
+
+	movq	%r9,%rdi
+	leaq	64(%rsp),%rsi
+	leaq	16(%r10),%rcx
+.long	0x9066A4F3
+	jmp	.Lcbc_exit
+
+.align	16
+.Lcbc_exit:
+	movq	16(%rsp),%rsi
+.cfi_def_cfa	%rsi,64
+	movq	(%rsi),%r15
+.cfi_restore	%r15
+	movq	8(%rsi),%r14
+.cfi_restore	%r14
+	movq	16(%rsi),%r13
+.cfi_restore	%r13
+	movq	24(%rsi),%r12
+.cfi_restore	%r12
+	movq	32(%rsi),%rbp
+.cfi_restore	%rbp
+	movq	40(%rsi),%rbx
+.cfi_restore	%rbx
+	leaq	48(%rsi),%rsp
+.cfi_def_cfa	%rsp,16
+.Lcbc_popfq:
+	popfq
+
+
+.cfi_adjust_cfa_offset	-8
+.Lcbc_epilogue:
+	.byte	0xf3,0xc3
+.cfi_endproc	
+.size	aes_nohw_cbc_encrypt,.-aes_nohw_cbc_encrypt
+.align	64
+.LAES_Te:
+.long	0xa56363c6,0xa56363c6
+.long	0x847c7cf8,0x847c7cf8
+.long	0x997777ee,0x997777ee
+.long	0x8d7b7bf6,0x8d7b7bf6
+.long	0x0df2f2ff,0x0df2f2ff
+.long	0xbd6b6bd6,0xbd6b6bd6
+.long	0xb16f6fde,0xb16f6fde
+.long	0x54c5c591,0x54c5c591
+.long	0x50303060,0x50303060
+.long	0x03010102,0x03010102
+.long	0xa96767ce,0xa96767ce
+.long	0x7d2b2b56,0x7d2b2b56
+.long	0x19fefee7,0x19fefee7
+.long	0x62d7d7b5,0x62d7d7b5
+.long	0xe6abab4d,0xe6abab4d
+.long	0x9a7676ec,0x9a7676ec
+.long	0x45caca8f,0x45caca8f
+.long	0x9d82821f,0x9d82821f
+.long	0x40c9c989,0x40c9c989
+.long	0x877d7dfa,0x877d7dfa
+.long	0x15fafaef,0x15fafaef
+.long	0xeb5959b2,0xeb5959b2
+.long	0xc947478e,0xc947478e
+.long	0x0bf0f0fb,0x0bf0f0fb
+.long	0xecadad41,0xecadad41
+.long	0x67d4d4b3,0x67d4d4b3
+.long	0xfda2a25f,0xfda2a25f
+.long	0xeaafaf45,0xeaafaf45
+.long	0xbf9c9c23,0xbf9c9c23
+.long	0xf7a4a453,0xf7a4a453
+.long	0x967272e4,0x967272e4
+.long	0x5bc0c09b,0x5bc0c09b
+.long	0xc2b7b775,0xc2b7b775
+.long	0x1cfdfde1,0x1cfdfde1
+.long	0xae93933d,0xae93933d
+.long	0x6a26264c,0x6a26264c
+.long	0x5a36366c,0x5a36366c
+.long	0x413f3f7e,0x413f3f7e
+.long	0x02f7f7f5,0x02f7f7f5
+.long	0x4fcccc83,0x4fcccc83
+.long	0x5c343468,0x5c343468
+.long	0xf4a5a551,0xf4a5a551
+.long	0x34e5e5d1,0x34e5e5d1
+.long	0x08f1f1f9,0x08f1f1f9
+.long	0x937171e2,0x937171e2
+.long	0x73d8d8ab,0x73d8d8ab
+.long	0x53313162,0x53313162
+.long	0x3f15152a,0x3f15152a
+.long	0x0c040408,0x0c040408
+.long	0x52c7c795,0x52c7c795
+.long	0x65232346,0x65232346
+.long	0x5ec3c39d,0x5ec3c39d
+.long	0x28181830,0x28181830
+.long	0xa1969637,0xa1969637
+.long	0x0f05050a,0x0f05050a
+.long	0xb59a9a2f,0xb59a9a2f
+.long	0x0907070e,0x0907070e
+.long	0x36121224,0x36121224
+.long	0x9b80801b,0x9b80801b
+.long	0x3de2e2df,0x3de2e2df
+.long	0x26ebebcd,0x26ebebcd
+.long	0x6927274e,0x6927274e
+.long	0xcdb2b27f,0xcdb2b27f
+.long	0x9f7575ea,0x9f7575ea
+.long	0x1b090912,0x1b090912
+.long	0x9e83831d,0x9e83831d
+.long	0x742c2c58,0x742c2c58
+.long	0x2e1a1a34,0x2e1a1a34
+.long	0x2d1b1b36,0x2d1b1b36
+.long	0xb26e6edc,0xb26e6edc
+.long	0xee5a5ab4,0xee5a5ab4
+.long	0xfba0a05b,0xfba0a05b
+.long	0xf65252a4,0xf65252a4
+.long	0x4d3b3b76,0x4d3b3b76
+.long	0x61d6d6b7,0x61d6d6b7
+.long	0xceb3b37d,0xceb3b37d
+.long	0x7b292952,0x7b292952
+.long	0x3ee3e3dd,0x3ee3e3dd
+.long	0x712f2f5e,0x712f2f5e
+.long	0x97848413,0x97848413
+.long	0xf55353a6,0xf55353a6
+.long	0x68d1d1b9,0x68d1d1b9
+.long	0x00000000,0x00000000
+.long	0x2cededc1,0x2cededc1
+.long	0x60202040,0x60202040
+.long	0x1ffcfce3,0x1ffcfce3
+.long	0xc8b1b179,0xc8b1b179
+.long	0xed5b5bb6,0xed5b5bb6
+.long	0xbe6a6ad4,0xbe6a6ad4
+.long	0x46cbcb8d,0x46cbcb8d
+.long	0xd9bebe67,0xd9bebe67
+.long	0x4b393972,0x4b393972
+.long	0xde4a4a94,0xde4a4a94
+.long	0xd44c4c98,0xd44c4c98
+.long	0xe85858b0,0xe85858b0
+.long	0x4acfcf85,0x4acfcf85
+.long	0x6bd0d0bb,0x6bd0d0bb
+.long	0x2aefefc5,0x2aefefc5
+.long	0xe5aaaa4f,0xe5aaaa4f
+.long	0x16fbfbed,0x16fbfbed
+.long	0xc5434386,0xc5434386
+.long	0xd74d4d9a,0xd74d4d9a
+.long	0x55333366,0x55333366
+.long	0x94858511,0x94858511
+.long	0xcf45458a,0xcf45458a
+.long	0x10f9f9e9,0x10f9f9e9
+.long	0x06020204,0x06020204
+.long	0x817f7ffe,0x817f7ffe
+.long	0xf05050a0,0xf05050a0
+.long	0x443c3c78,0x443c3c78
+.long	0xba9f9f25,0xba9f9f25
+.long	0xe3a8a84b,0xe3a8a84b
+.long	0xf35151a2,0xf35151a2
+.long	0xfea3a35d,0xfea3a35d
+.long	0xc0404080,0xc0404080
+.long	0x8a8f8f05,0x8a8f8f05
+.long	0xad92923f,0xad92923f
+.long	0xbc9d9d21,0xbc9d9d21
+.long	0x48383870,0x48383870
+.long	0x04f5f5f1,0x04f5f5f1
+.long	0xdfbcbc63,0xdfbcbc63
+.long	0xc1b6b677,0xc1b6b677
+.long	0x75dadaaf,0x75dadaaf
+.long	0x63212142,0x63212142
+.long	0x30101020,0x30101020
+.long	0x1affffe5,0x1affffe5
+.long	0x0ef3f3fd,0x0ef3f3fd
+.long	0x6dd2d2bf,0x6dd2d2bf
+.long	0x4ccdcd81,0x4ccdcd81
+.long	0x140c0c18,0x140c0c18
+.long	0x35131326,0x35131326
+.long	0x2fececc3,0x2fececc3
+.long	0xe15f5fbe,0xe15f5fbe
+.long	0xa2979735,0xa2979735
+.long	0xcc444488,0xcc444488
+.long	0x3917172e,0x3917172e
+.long	0x57c4c493,0x57c4c493
+.long	0xf2a7a755,0xf2a7a755
+.long	0x827e7efc,0x827e7efc
+.long	0x473d3d7a,0x473d3d7a
+.long	0xac6464c8,0xac6464c8
+.long	0xe75d5dba,0xe75d5dba
+.long	0x2b191932,0x2b191932
+.long	0x957373e6,0x957373e6
+.long	0xa06060c0,0xa06060c0
+.long	0x98818119,0x98818119
+.long	0xd14f4f9e,0xd14f4f9e
+.long	0x7fdcdca3,0x7fdcdca3
+.long	0x66222244,0x66222244
+.long	0x7e2a2a54,0x7e2a2a54
+.long	0xab90903b,0xab90903b
+.long	0x8388880b,0x8388880b
+.long	0xca46468c,0xca46468c
+.long	0x29eeeec7,0x29eeeec7
+.long	0xd3b8b86b,0xd3b8b86b
+.long	0x3c141428,0x3c141428
+.long	0x79dedea7,0x79dedea7
+.long	0xe25e5ebc,0xe25e5ebc
+.long	0x1d0b0b16,0x1d0b0b16
+.long	0x76dbdbad,0x76dbdbad
+.long	0x3be0e0db,0x3be0e0db
+.long	0x56323264,0x56323264
+.long	0x4e3a3a74,0x4e3a3a74
+.long	0x1e0a0a14,0x1e0a0a14
+.long	0xdb494992,0xdb494992
+.long	0x0a06060c,0x0a06060c
+.long	0x6c242448,0x6c242448
+.long	0xe45c5cb8,0xe45c5cb8
+.long	0x5dc2c29f,0x5dc2c29f
+.long	0x6ed3d3bd,0x6ed3d3bd
+.long	0xefacac43,0xefacac43
+.long	0xa66262c4,0xa66262c4
+.long	0xa8919139,0xa8919139
+.long	0xa4959531,0xa4959531
+.long	0x37e4e4d3,0x37e4e4d3
+.long	0x8b7979f2,0x8b7979f2
+.long	0x32e7e7d5,0x32e7e7d5
+.long	0x43c8c88b,0x43c8c88b
+.long	0x5937376e,0x5937376e
+.long	0xb76d6dda,0xb76d6dda
+.long	0x8c8d8d01,0x8c8d8d01
+.long	0x64d5d5b1,0x64d5d5b1
+.long	0xd24e4e9c,0xd24e4e9c
+.long	0xe0a9a949,0xe0a9a949
+.long	0xb46c6cd8,0xb46c6cd8
+.long	0xfa5656ac,0xfa5656ac
+.long	0x07f4f4f3,0x07f4f4f3
+.long	0x25eaeacf,0x25eaeacf
+.long	0xaf6565ca,0xaf6565ca
+.long	0x8e7a7af4,0x8e7a7af4
+.long	0xe9aeae47,0xe9aeae47
+.long	0x18080810,0x18080810
+.long	0xd5baba6f,0xd5baba6f
+.long	0x887878f0,0x887878f0
+.long	0x6f25254a,0x6f25254a
+.long	0x722e2e5c,0x722e2e5c
+.long	0x241c1c38,0x241c1c38
+.long	0xf1a6a657,0xf1a6a657
+.long	0xc7b4b473,0xc7b4b473
+.long	0x51c6c697,0x51c6c697
+.long	0x23e8e8cb,0x23e8e8cb
+.long	0x7cdddda1,0x7cdddda1
+.long	0x9c7474e8,0x9c7474e8
+.long	0x211f1f3e,0x211f1f3e
+.long	0xdd4b4b96,0xdd4b4b96
+.long	0xdcbdbd61,0xdcbdbd61
+.long	0x868b8b0d,0x868b8b0d
+.long	0x858a8a0f,0x858a8a0f
+.long	0x907070e0,0x907070e0
+.long	0x423e3e7c,0x423e3e7c
+.long	0xc4b5b571,0xc4b5b571
+.long	0xaa6666cc,0xaa6666cc
+.long	0xd8484890,0xd8484890
+.long	0x05030306,0x05030306
+.long	0x01f6f6f7,0x01f6f6f7
+.long	0x120e0e1c,0x120e0e1c
+.long	0xa36161c2,0xa36161c2
+.long	0x5f35356a,0x5f35356a
+.long	0xf95757ae,0xf95757ae
+.long	0xd0b9b969,0xd0b9b969
+.long	0x91868617,0x91868617
+.long	0x58c1c199,0x58c1c199
+.long	0x271d1d3a,0x271d1d3a
+.long	0xb99e9e27,0xb99e9e27
+.long	0x38e1e1d9,0x38e1e1d9
+.long	0x13f8f8eb,0x13f8f8eb
+.long	0xb398982b,0xb398982b
+.long	0x33111122,0x33111122
+.long	0xbb6969d2,0xbb6969d2
+.long	0x70d9d9a9,0x70d9d9a9
+.long	0x898e8e07,0x898e8e07
+.long	0xa7949433,0xa7949433
+.long	0xb69b9b2d,0xb69b9b2d
+.long	0x221e1e3c,0x221e1e3c
+.long	0x92878715,0x92878715
+.long	0x20e9e9c9,0x20e9e9c9
+.long	0x49cece87,0x49cece87
+.long	0xff5555aa,0xff5555aa
+.long	0x78282850,0x78282850
+.long	0x7adfdfa5,0x7adfdfa5
+.long	0x8f8c8c03,0x8f8c8c03
+.long	0xf8a1a159,0xf8a1a159
+.long	0x80898909,0x80898909
+.long	0x170d0d1a,0x170d0d1a
+.long	0xdabfbf65,0xdabfbf65
+.long	0x31e6e6d7,0x31e6e6d7
+.long	0xc6424284,0xc6424284
+.long	0xb86868d0,0xb86868d0
+.long	0xc3414182,0xc3414182
+.long	0xb0999929,0xb0999929
+.long	0x772d2d5a,0x772d2d5a
+.long	0x110f0f1e,0x110f0f1e
+.long	0xcbb0b07b,0xcbb0b07b
+.long	0xfc5454a8,0xfc5454a8
+.long	0xd6bbbb6d,0xd6bbbb6d
+.long	0x3a16162c,0x3a16162c
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.long	0x00000001, 0x00000002, 0x00000004, 0x00000008
+.long	0x00000010, 0x00000020, 0x00000040, 0x00000080
+.long	0x0000001b, 0x00000036, 0x80808080, 0x80808080
+.long	0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
+.align	64
+.LAES_Td:
+.long	0x50a7f451,0x50a7f451
+.long	0x5365417e,0x5365417e
+.long	0xc3a4171a,0xc3a4171a
+.long	0x965e273a,0x965e273a
+.long	0xcb6bab3b,0xcb6bab3b
+.long	0xf1459d1f,0xf1459d1f
+.long	0xab58faac,0xab58faac
+.long	0x9303e34b,0x9303e34b
+.long	0x55fa3020,0x55fa3020
+.long	0xf66d76ad,0xf66d76ad
+.long	0x9176cc88,0x9176cc88
+.long	0x254c02f5,0x254c02f5
+.long	0xfcd7e54f,0xfcd7e54f
+.long	0xd7cb2ac5,0xd7cb2ac5
+.long	0x80443526,0x80443526
+.long	0x8fa362b5,0x8fa362b5
+.long	0x495ab1de,0x495ab1de
+.long	0x671bba25,0x671bba25
+.long	0x980eea45,0x980eea45
+.long	0xe1c0fe5d,0xe1c0fe5d
+.long	0x02752fc3,0x02752fc3
+.long	0x12f04c81,0x12f04c81
+.long	0xa397468d,0xa397468d
+.long	0xc6f9d36b,0xc6f9d36b
+.long	0xe75f8f03,0xe75f8f03
+.long	0x959c9215,0x959c9215
+.long	0xeb7a6dbf,0xeb7a6dbf
+.long	0xda595295,0xda595295
+.long	0x2d83bed4,0x2d83bed4
+.long	0xd3217458,0xd3217458
+.long	0x2969e049,0x2969e049
+.long	0x44c8c98e,0x44c8c98e
+.long	0x6a89c275,0x6a89c275
+.long	0x78798ef4,0x78798ef4
+.long	0x6b3e5899,0x6b3e5899
+.long	0xdd71b927,0xdd71b927
+.long	0xb64fe1be,0xb64fe1be
+.long	0x17ad88f0,0x17ad88f0
+.long	0x66ac20c9,0x66ac20c9
+.long	0xb43ace7d,0xb43ace7d
+.long	0x184adf63,0x184adf63
+.long	0x82311ae5,0x82311ae5
+.long	0x60335197,0x60335197
+.long	0x457f5362,0x457f5362
+.long	0xe07764b1,0xe07764b1
+.long	0x84ae6bbb,0x84ae6bbb
+.long	0x1ca081fe,0x1ca081fe
+.long	0x942b08f9,0x942b08f9
+.long	0x58684870,0x58684870
+.long	0x19fd458f,0x19fd458f
+.long	0x876cde94,0x876cde94
+.long	0xb7f87b52,0xb7f87b52
+.long	0x23d373ab,0x23d373ab
+.long	0xe2024b72,0xe2024b72
+.long	0x578f1fe3,0x578f1fe3
+.long	0x2aab5566,0x2aab5566
+.long	0x0728ebb2,0x0728ebb2
+.long	0x03c2b52f,0x03c2b52f
+.long	0x9a7bc586,0x9a7bc586
+.long	0xa50837d3,0xa50837d3
+.long	0xf2872830,0xf2872830
+.long	0xb2a5bf23,0xb2a5bf23
+.long	0xba6a0302,0xba6a0302
+.long	0x5c8216ed,0x5c8216ed
+.long	0x2b1ccf8a,0x2b1ccf8a
+.long	0x92b479a7,0x92b479a7
+.long	0xf0f207f3,0xf0f207f3
+.long	0xa1e2694e,0xa1e2694e
+.long	0xcdf4da65,0xcdf4da65
+.long	0xd5be0506,0xd5be0506
+.long	0x1f6234d1,0x1f6234d1
+.long	0x8afea6c4,0x8afea6c4
+.long	0x9d532e34,0x9d532e34
+.long	0xa055f3a2,0xa055f3a2
+.long	0x32e18a05,0x32e18a05
+.long	0x75ebf6a4,0x75ebf6a4
+.long	0x39ec830b,0x39ec830b
+.long	0xaaef6040,0xaaef6040
+.long	0x069f715e,0x069f715e
+.long	0x51106ebd,0x51106ebd
+.long	0xf98a213e,0xf98a213e
+.long	0x3d06dd96,0x3d06dd96
+.long	0xae053edd,0xae053edd
+.long	0x46bde64d,0x46bde64d
+.long	0xb58d5491,0xb58d5491
+.long	0x055dc471,0x055dc471
+.long	0x6fd40604,0x6fd40604
+.long	0xff155060,0xff155060
+.long	0x24fb9819,0x24fb9819
+.long	0x97e9bdd6,0x97e9bdd6
+.long	0xcc434089,0xcc434089
+.long	0x779ed967,0x779ed967
+.long	0xbd42e8b0,0xbd42e8b0
+.long	0x888b8907,0x888b8907
+.long	0x385b19e7,0x385b19e7
+.long	0xdbeec879,0xdbeec879
+.long	0x470a7ca1,0x470a7ca1
+.long	0xe90f427c,0xe90f427c
+.long	0xc91e84f8,0xc91e84f8
+.long	0x00000000,0x00000000
+.long	0x83868009,0x83868009
+.long	0x48ed2b32,0x48ed2b32
+.long	0xac70111e,0xac70111e
+.long	0x4e725a6c,0x4e725a6c
+.long	0xfbff0efd,0xfbff0efd
+.long	0x5638850f,0x5638850f
+.long	0x1ed5ae3d,0x1ed5ae3d
+.long	0x27392d36,0x27392d36
+.long	0x64d90f0a,0x64d90f0a
+.long	0x21a65c68,0x21a65c68
+.long	0xd1545b9b,0xd1545b9b
+.long	0x3a2e3624,0x3a2e3624
+.long	0xb1670a0c,0xb1670a0c
+.long	0x0fe75793,0x0fe75793
+.long	0xd296eeb4,0xd296eeb4
+.long	0x9e919b1b,0x9e919b1b
+.long	0x4fc5c080,0x4fc5c080
+.long	0xa220dc61,0xa220dc61
+.long	0x694b775a,0x694b775a
+.long	0x161a121c,0x161a121c
+.long	0x0aba93e2,0x0aba93e2
+.long	0xe52aa0c0,0xe52aa0c0
+.long	0x43e0223c,0x43e0223c
+.long	0x1d171b12,0x1d171b12
+.long	0x0b0d090e,0x0b0d090e
+.long	0xadc78bf2,0xadc78bf2
+.long	0xb9a8b62d,0xb9a8b62d
+.long	0xc8a91e14,0xc8a91e14
+.long	0x8519f157,0x8519f157
+.long	0x4c0775af,0x4c0775af
+.long	0xbbdd99ee,0xbbdd99ee
+.long	0xfd607fa3,0xfd607fa3
+.long	0x9f2601f7,0x9f2601f7
+.long	0xbcf5725c,0xbcf5725c
+.long	0xc53b6644,0xc53b6644
+.long	0x347efb5b,0x347efb5b
+.long	0x7629438b,0x7629438b
+.long	0xdcc623cb,0xdcc623cb
+.long	0x68fcedb6,0x68fcedb6
+.long	0x63f1e4b8,0x63f1e4b8
+.long	0xcadc31d7,0xcadc31d7
+.long	0x10856342,0x10856342
+.long	0x40229713,0x40229713
+.long	0x2011c684,0x2011c684
+.long	0x7d244a85,0x7d244a85
+.long	0xf83dbbd2,0xf83dbbd2
+.long	0x1132f9ae,0x1132f9ae
+.long	0x6da129c7,0x6da129c7
+.long	0x4b2f9e1d,0x4b2f9e1d
+.long	0xf330b2dc,0xf330b2dc
+.long	0xec52860d,0xec52860d
+.long	0xd0e3c177,0xd0e3c177
+.long	0x6c16b32b,0x6c16b32b
+.long	0x99b970a9,0x99b970a9
+.long	0xfa489411,0xfa489411
+.long	0x2264e947,0x2264e947
+.long	0xc48cfca8,0xc48cfca8
+.long	0x1a3ff0a0,0x1a3ff0a0
+.long	0xd82c7d56,0xd82c7d56
+.long	0xef903322,0xef903322
+.long	0xc74e4987,0xc74e4987
+.long	0xc1d138d9,0xc1d138d9
+.long	0xfea2ca8c,0xfea2ca8c
+.long	0x360bd498,0x360bd498
+.long	0xcf81f5a6,0xcf81f5a6
+.long	0x28de7aa5,0x28de7aa5
+.long	0x268eb7da,0x268eb7da
+.long	0xa4bfad3f,0xa4bfad3f
+.long	0xe49d3a2c,0xe49d3a2c
+.long	0x0d927850,0x0d927850
+.long	0x9bcc5f6a,0x9bcc5f6a
+.long	0x62467e54,0x62467e54
+.long	0xc2138df6,0xc2138df6
+.long	0xe8b8d890,0xe8b8d890
+.long	0x5ef7392e,0x5ef7392e
+.long	0xf5afc382,0xf5afc382
+.long	0xbe805d9f,0xbe805d9f
+.long	0x7c93d069,0x7c93d069
+.long	0xa92dd56f,0xa92dd56f
+.long	0xb31225cf,0xb31225cf
+.long	0x3b99acc8,0x3b99acc8
+.long	0xa77d1810,0xa77d1810
+.long	0x6e639ce8,0x6e639ce8
+.long	0x7bbb3bdb,0x7bbb3bdb
+.long	0x097826cd,0x097826cd
+.long	0xf418596e,0xf418596e
+.long	0x01b79aec,0x01b79aec
+.long	0xa89a4f83,0xa89a4f83
+.long	0x656e95e6,0x656e95e6
+.long	0x7ee6ffaa,0x7ee6ffaa
+.long	0x08cfbc21,0x08cfbc21
+.long	0xe6e815ef,0xe6e815ef
+.long	0xd99be7ba,0xd99be7ba
+.long	0xce366f4a,0xce366f4a
+.long	0xd4099fea,0xd4099fea
+.long	0xd67cb029,0xd67cb029
+.long	0xafb2a431,0xafb2a431
+.long	0x31233f2a,0x31233f2a
+.long	0x3094a5c6,0x3094a5c6
+.long	0xc066a235,0xc066a235
+.long	0x37bc4e74,0x37bc4e74
+.long	0xa6ca82fc,0xa6ca82fc
+.long	0xb0d090e0,0xb0d090e0
+.long	0x15d8a733,0x15d8a733
+.long	0x4a9804f1,0x4a9804f1
+.long	0xf7daec41,0xf7daec41
+.long	0x0e50cd7f,0x0e50cd7f
+.long	0x2ff69117,0x2ff69117
+.long	0x8dd64d76,0x8dd64d76
+.long	0x4db0ef43,0x4db0ef43
+.long	0x544daacc,0x544daacc
+.long	0xdf0496e4,0xdf0496e4
+.long	0xe3b5d19e,0xe3b5d19e
+.long	0x1b886a4c,0x1b886a4c
+.long	0xb81f2cc1,0xb81f2cc1
+.long	0x7f516546,0x7f516546
+.long	0x04ea5e9d,0x04ea5e9d
+.long	0x5d358c01,0x5d358c01
+.long	0x737487fa,0x737487fa
+.long	0x2e410bfb,0x2e410bfb
+.long	0x5a1d67b3,0x5a1d67b3
+.long	0x52d2db92,0x52d2db92
+.long	0x335610e9,0x335610e9
+.long	0x1347d66d,0x1347d66d
+.long	0x8c61d79a,0x8c61d79a
+.long	0x7a0ca137,0x7a0ca137
+.long	0x8e14f859,0x8e14f859
+.long	0x893c13eb,0x893c13eb
+.long	0xee27a9ce,0xee27a9ce
+.long	0x35c961b7,0x35c961b7
+.long	0xede51ce1,0xede51ce1
+.long	0x3cb1477a,0x3cb1477a
+.long	0x59dfd29c,0x59dfd29c
+.long	0x3f73f255,0x3f73f255
+.long	0x79ce1418,0x79ce1418
+.long	0xbf37c773,0xbf37c773
+.long	0xeacdf753,0xeacdf753
+.long	0x5baafd5f,0x5baafd5f
+.long	0x146f3ddf,0x146f3ddf
+.long	0x86db4478,0x86db4478
+.long	0x81f3afca,0x81f3afca
+.long	0x3ec468b9,0x3ec468b9
+.long	0x2c342438,0x2c342438
+.long	0x5f40a3c2,0x5f40a3c2
+.long	0x72c31d16,0x72c31d16
+.long	0x0c25e2bc,0x0c25e2bc
+.long	0x8b493c28,0x8b493c28
+.long	0x41950dff,0x41950dff
+.long	0x7101a839,0x7101a839
+.long	0xdeb30c08,0xdeb30c08
+.long	0x9ce4b4d8,0x9ce4b4d8
+.long	0x90c15664,0x90c15664
+.long	0x6184cb7b,0x6184cb7b
+.long	0x70b632d5,0x70b632d5
+.long	0x745c6c48,0x745c6c48
+.long	0x4257b8d0,0x4257b8d0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align	64
+#endif
+.section	.note.GNU-stack,"",@progbits
diff --git a/mac-x86/crypto/fipsmodule/aes-586.S b/mac-x86/crypto/fipsmodule/aes-586.S
new file mode 100644
index 0000000..3634f64
--- /dev/null
+++ b/mac-x86/crypto/fipsmodule/aes-586.S
@@ -0,0 +1,3226 @@
+# This file is generated from a similarly-named Perl script in the BoringSSL
+# source tree. Do not edit by hand.
+
+#if defined(__i386__)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+.text
+.private_extern	__x86_AES_encrypt_compact
+.align	4
+__x86_AES_encrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	4,0x90
+L000loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%ecx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	xorl	%esi,%ecx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%ecx
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%ecx
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%edx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%edx
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%edx
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%eax,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%eax
+	xorl	%edi,%esi
+	movl	$2155905152,%ebp
+	xorl	%esi,%eax
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%edi
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	andl	$4278124286,%edi
+	subl	%ebp,%esi
+	movl	%ebx,%ebp
+	andl	$454761243,%esi
+	rorl	$16,%ebp
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	xorl	%esi,%ebx
+	rorl	$24,%edi
+	xorl	%ebp,%esi
+	roll	$24,%ebx
+	xorl	%edi,%esi
+	xorl	%esi,%ebx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	L000loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.private_extern	__sse_AES_encrypt_compact
+.align	4
+__sse_AES_encrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	4,0x90
+L001loop:
+	pshufw	$8,%mm0,%mm1
+	pshufw	$13,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movl	%edi,20(%esp)
+	movzbl	%al,%esi
+	movzbl	%ah,%edx
+	pshufw	$13,%mm0,%mm2
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movzbl	%bl,%edi
+	movzbl	-128(%ebp,%edx,1),%edx
+	shrl	$16,%eax
+	shll	$8,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$16,%esi
+	pshufw	$8,%mm4,%mm6
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$24,%esi
+	shrl	$16,%ebx
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	movd	%mm2,%eax
+	movd	%ecx,%mm0
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%ah,%edi
+	shll	$16,%ecx
+	movd	%mm6,%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	shll	$8,%esi
+	shrl	$16,%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shrl	$16,%eax
+	movd	%ecx,%mm1
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%ah,%edi
+	shll	$16,%ecx
+	andl	$255,%eax
+	orl	%esi,%ecx
+	punpckldq	%mm1,%mm0
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%eax,1),%eax
+	orl	%esi,%ecx
+	shll	$16,%eax
+	movzbl	-128(%ebp,%edi,1),%esi
+	orl	%eax,%edx
+	shll	$8,%esi
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%esi,%ecx
+	orl	%ebx,%edx
+	movl	20(%esp),%edi
+	movd	%ecx,%mm4
+	movd	%edx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	L002out
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pcmpgtb	%mm0,%mm3
+	pcmpgtb	%mm4,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	pshufw	$177,%mm0,%mm2
+	pshufw	$177,%mm4,%mm6
+	paddb	%mm0,%mm0
+	paddb	%mm4,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pshufw	$177,%mm2,%mm3
+	pshufw	$177,%mm6,%mm7
+	pxor	%mm0,%mm1
+	pxor	%mm4,%mm5
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm3,%mm2
+	movq	%mm7,%mm6
+	pslld	$8,%mm3
+	pslld	$8,%mm7
+	psrld	$24,%mm2
+	psrld	$24,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	psrld	$8,%mm1
+	psrld	$8,%mm5
+	movl	-128(%ebp),%eax
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	movl	-64(%ebp),%ebx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	(%ebp),%ecx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	L001loop
+.align	4,0x90
+L002out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.private_extern	__x86_AES_encrypt
+.align	4
+__x86_AES_encrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	4,0x90
+L003loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ch,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movzbl	%bh,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	20(%esp),%edi
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ah,%eax
+	xorl	3(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	xorl	1(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	L003loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%bh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%ch,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%dh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	2(%ebp,%edx,8),%edx
+	andl	$255,%edx
+	movzbl	%ah,%eax
+	movl	(%ebp,%eax,8),%eax
+	andl	$65280,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movl	(%ebp,%ebx,8),%ebx
+	andl	$16711680,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movl	2(%ebp,%ecx,8),%ecx
+	andl	$4278190080,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	6,0x90
+LAES_Te:
+.long	2774754246,2774754246
+.long	2222750968,2222750968
+.long	2574743534,2574743534
+.long	2373680118,2373680118
+.long	234025727,234025727
+.long	3177933782,3177933782
+.long	2976870366,2976870366
+.long	1422247313,1422247313
+.long	1345335392,1345335392
+.long	50397442,50397442
+.long	2842126286,2842126286
+.long	2099981142,2099981142
+.long	436141799,436141799
+.long	1658312629,1658312629
+.long	3870010189,3870010189
+.long	2591454956,2591454956
+.long	1170918031,1170918031
+.long	2642575903,2642575903
+.long	1086966153,1086966153
+.long	2273148410,2273148410
+.long	368769775,368769775
+.long	3948501426,3948501426
+.long	3376891790,3376891790
+.long	200339707,200339707
+.long	3970805057,3970805057
+.long	1742001331,1742001331
+.long	4255294047,4255294047
+.long	3937382213,3937382213
+.long	3214711843,3214711843
+.long	4154762323,4154762323
+.long	2524082916,2524082916
+.long	1539358875,1539358875
+.long	3266819957,3266819957
+.long	486407649,486407649
+.long	2928907069,2928907069
+.long	1780885068,1780885068
+.long	1513502316,1513502316
+.long	1094664062,1094664062
+.long	49805301,49805301
+.long	1338821763,1338821763
+.long	1546925160,1546925160
+.long	4104496465,4104496465
+.long	887481809,887481809
+.long	150073849,150073849
+.long	2473685474,2473685474
+.long	1943591083,1943591083
+.long	1395732834,1395732834
+.long	1058346282,1058346282
+.long	201589768,201589768
+.long	1388824469,1388824469
+.long	1696801606,1696801606
+.long	1589887901,1589887901
+.long	672667696,672667696
+.long	2711000631,2711000631
+.long	251987210,251987210
+.long	3046808111,3046808111
+.long	151455502,151455502
+.long	907153956,907153956
+.long	2608889883,2608889883
+.long	1038279391,1038279391
+.long	652995533,652995533
+.long	1764173646,1764173646
+.long	3451040383,3451040383
+.long	2675275242,2675275242
+.long	453576978,453576978
+.long	2659418909,2659418909
+.long	1949051992,1949051992
+.long	773462580,773462580
+.long	756751158,756751158
+.long	2993581788,2993581788
+.long	3998898868,3998898868
+.long	4221608027,4221608027
+.long	4132590244,4132590244
+.long	1295727478,1295727478
+.long	1641469623,1641469623
+.long	3467883389,3467883389
+.long	2066295122,2066295122
+.long	1055122397,1055122397
+.long	1898917726,1898917726
+.long	2542044179,2542044179
+.long	4115878822,4115878822
+.long	1758581177,1758581177
+.long	0,0
+.long	753790401,753790401
+.long	1612718144,1612718144
+.long	536673507,536673507
+.long	3367088505,3367088505
+.long	3982187446,3982187446
+.long	3194645204,3194645204
+.long	1187761037,1187761037
+.long	3653156455,3653156455
+.long	1262041458,1262041458
+.long	3729410708,3729410708
+.long	3561770136,3561770136
+.long	3898103984,3898103984
+.long	1255133061,1255133061
+.long	1808847035,1808847035
+.long	720367557,720367557
+.long	3853167183,3853167183
+.long	385612781,385612781
+.long	3309519750,3309519750
+.long	3612167578,3612167578
+.long	1429418854,1429418854
+.long	2491778321,2491778321
+.long	3477423498,3477423498
+.long	284817897,284817897
+.long	100794884,100794884
+.long	2172616702,2172616702
+.long	4031795360,4031795360
+.long	1144798328,1144798328
+.long	3131023141,3131023141
+.long	3819481163,3819481163
+.long	4082192802,4082192802
+.long	4272137053,4272137053
+.long	3225436288,3225436288
+.long	2324664069,2324664069
+.long	2912064063,2912064063
+.long	3164445985,3164445985
+.long	1211644016,1211644016
+.long	83228145,83228145
+.long	3753688163,3753688163
+.long	3249976951,3249976951
+.long	1977277103,1977277103
+.long	1663115586,1663115586
+.long	806359072,806359072
+.long	452984805,452984805
+.long	250868733,250868733
+.long	1842533055,1842533055
+.long	1288555905,1288555905
+.long	336333848,336333848
+.long	890442534,890442534
+.long	804056259,804056259
+.long	3781124030,3781124030
+.long	2727843637,2727843637
+.long	3427026056,3427026056
+.long	957814574,957814574
+.long	1472513171,1472513171
+.long	4071073621,4071073621
+.long	2189328124,2189328124
+.long	1195195770,1195195770
+.long	2892260552,2892260552
+.long	3881655738,3881655738
+.long	723065138,723065138
+.long	2507371494,2507371494
+.long	2690670784,2690670784
+.long	2558624025,2558624025
+.long	3511635870,3511635870
+.long	2145180835,2145180835
+.long	1713513028,1713513028
+.long	2116692564,2116692564
+.long	2878378043,2878378043
+.long	2206763019,2206763019
+.long	3393603212,3393603212
+.long	703524551,703524551
+.long	3552098411,3552098411
+.long	1007948840,1007948840
+.long	2044649127,2044649127
+.long	3797835452,3797835452
+.long	487262998,487262998
+.long	1994120109,1994120109
+.long	1004593371,1004593371
+.long	1446130276,1446130276
+.long	1312438900,1312438900
+.long	503974420,503974420
+.long	3679013266,3679013266
+.long	168166924,168166924
+.long	1814307912,1814307912
+.long	3831258296,3831258296
+.long	1573044895,1573044895
+.long	1859376061,1859376061
+.long	4021070915,4021070915
+.long	2791465668,2791465668
+.long	2828112185,2828112185
+.long	2761266481,2761266481
+.long	937747667,937747667
+.long	2339994098,2339994098
+.long	854058965,854058965
+.long	1137232011,1137232011
+.long	1496790894,1496790894
+.long	3077402074,3077402074
+.long	2358086913,2358086913
+.long	1691735473,1691735473
+.long	3528347292,3528347292
+.long	3769215305,3769215305
+.long	3027004632,3027004632
+.long	4199962284,4199962284
+.long	133494003,133494003
+.long	636152527,636152527
+.long	2942657994,2942657994
+.long	2390391540,2390391540
+.long	3920539207,3920539207
+.long	403179536,403179536
+.long	3585784431,3585784431
+.long	2289596656,2289596656
+.long	1864705354,1864705354
+.long	1915629148,1915629148
+.long	605822008,605822008
+.long	4054230615,4054230615
+.long	3350508659,3350508659
+.long	1371981463,1371981463
+.long	602466507,602466507
+.long	2094914977,2094914977
+.long	2624877800,2624877800
+.long	555687742,555687742
+.long	3712699286,3712699286
+.long	3703422305,3703422305
+.long	2257292045,2257292045
+.long	2240449039,2240449039
+.long	2423288032,2423288032
+.long	1111375484,1111375484
+.long	3300242801,3300242801
+.long	2858837708,2858837708
+.long	3628615824,3628615824
+.long	84083462,84083462
+.long	32962295,32962295
+.long	302911004,302911004
+.long	2741068226,2741068226
+.long	1597322602,1597322602
+.long	4183250862,4183250862
+.long	3501832553,3501832553
+.long	2441512471,2441512471
+.long	1489093017,1489093017
+.long	656219450,656219450
+.long	3114180135,3114180135
+.long	954327513,954327513
+.long	335083755,335083755
+.long	3013122091,3013122091
+.long	856756514,856756514
+.long	3144247762,3144247762
+.long	1893325225,1893325225
+.long	2307821063,2307821063
+.long	2811532339,2811532339
+.long	3063651117,3063651117
+.long	572399164,572399164
+.long	2458355477,2458355477
+.long	552200649,552200649
+.long	1238290055,1238290055
+.long	4283782570,4283782570
+.long	2015897680,2015897680
+.long	2061492133,2061492133
+.long	2408352771,2408352771
+.long	4171342169,4171342169
+.long	2156497161,2156497161
+.long	386731290,386731290
+.long	3669999461,3669999461
+.long	837215959,837215959
+.long	3326231172,3326231172
+.long	3093850320,3093850320
+.long	3275833730,3275833730
+.long	2962856233,2962856233
+.long	1999449434,1999449434
+.long	286199582,286199582
+.long	3417354363,3417354363
+.long	4233385128,4233385128
+.long	3602627437,3602627437
+.long	974525996,974525996
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.long	1,2,4,8
+.long	16,32,64,128
+.long	27,54,0,0
+.long	0,0,0,0
+.globl	_aes_nohw_encrypt
+.private_extern	_aes_nohw_encrypt
+.align	4
+_aes_nohw_encrypt:
+L_aes_nohw_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	L004pic_point
+L004pic_point:
+	popl	%ebp
+	movl	L_OPENSSL_ia32cap_P$non_lazy_ptr-L004pic_point(%ebp),%eax
+	leal	LAES_Te-L004pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	L005x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	__sse_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	4,0x90
+L005x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	__x86_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.private_extern	__x86_AES_decrypt_compact
+.align	4
+__x86_AES_decrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	4,0x90
+L006loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%eax
+	subl	%edi,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%esi,%eax
+	movl	$2155905152,%edi
+	andl	%eax,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ecx,%eax
+	xorl	%esi,%ebx
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ecx,%ebx
+	roll	$8,%ecx
+	xorl	%esi,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebp,%eax
+	xorl	%ebx,%ecx
+	xorl	%ebp,%ebx
+	roll	$24,%eax
+	xorl	%ebp,%ecx
+	roll	$16,%ebx
+	xorl	%eax,%ecx
+	roll	$8,%ebp
+	xorl	%ebx,%ecx
+	movl	4(%esp),%eax
+	xorl	%ebp,%ecx
+	movl	%ecx,12(%esp)
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%esi,%ebx
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%edx,%ebx
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%edx,%ecx
+	roll	$8,%edx
+	xorl	%esi,%ebp
+	xorl	%ebx,%edx
+	xorl	%ebp,%ebx
+	xorl	%ecx,%edx
+	xorl	%ebp,%ecx
+	roll	$24,%ebx
+	xorl	%ebp,%edx
+	roll	$16,%ecx
+	xorl	%ebx,%edx
+	roll	$8,%ebp
+	xorl	%ecx,%edx
+	movl	8(%esp),%ebx
+	xorl	%ebp,%edx
+	movl	%edx,16(%esp)
+	movl	$2155905152,%edi
+	andl	%eax,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%eax,%edx
+	roll	$8,%eax
+	xorl	%esi,%ebp
+	xorl	%ecx,%eax
+	xorl	%ebp,%ecx
+	xorl	%edx,%eax
+	xorl	%ebp,%edx
+	roll	$24,%ecx
+	xorl	%ebp,%eax
+	roll	$16,%edx
+	xorl	%ecx,%eax
+	roll	$8,%ebp
+	xorl	%edx,%eax
+	xorl	%ebp,%eax
+	movl	$2155905152,%edi
+	andl	%ebx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%edi
+	andl	%ecx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%edi
+	andl	%edx,%edi
+	movl	%edi,%esi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ebx,%edx
+	roll	$8,%ebx
+	xorl	%esi,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ebp,%ecx
+	xorl	%edx,%ebx
+	xorl	%ebp,%edx
+	roll	$24,%ecx
+	xorl	%ebp,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%ebp
+	xorl	%edx,%ebx
+	movl	12(%esp),%ecx
+	xorl	%ebp,%ebx
+	movl	16(%esp),%edx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	L006loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.private_extern	__sse_AES_decrypt_compact
+.align	4
+__sse_AES_decrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	4,0x90
+L007loop:
+	pshufw	$12,%mm0,%mm1
+	pshufw	$9,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movl	%edi,20(%esp)
+	movzbl	%al,%esi
+	movzbl	%ah,%edx
+	pshufw	$6,%mm0,%mm2
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movzbl	%bl,%edi
+	movzbl	-128(%ebp,%edx,1),%edx
+	shrl	$16,%eax
+	shll	$8,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$16,%esi
+	pshufw	$3,%mm4,%mm6
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$24,%esi
+	shrl	$16,%ebx
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$8,%esi
+	movd	%mm2,%eax
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	shll	$16,%esi
+	movd	%mm6,%ebx
+	movd	%ecx,%mm0
+	movzbl	-128(%ebp,%edi,1),%ecx
+	movzbl	%al,%edi
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bl,%edi
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%ah,%edi
+	shll	$16,%esi
+	shrl	$16,%eax
+	orl	%esi,%edx
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%bh,%edi
+	shrl	$16,%ebx
+	shll	$8,%esi
+	movd	%edx,%mm1
+	movzbl	-128(%ebp,%edi,1),%edx
+	movzbl	%bh,%edi
+	shll	$24,%edx
+	andl	$255,%ebx
+	orl	%esi,%edx
+	punpckldq	%mm1,%mm0
+	movzbl	-128(%ebp,%edi,1),%esi
+	movzbl	%al,%edi
+	shll	$8,%esi
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%esi,%ecx
+	movzbl	-128(%ebp,%edi,1),%esi
+	orl	%ebx,%edx
+	shll	$16,%esi
+	movzbl	-128(%ebp,%eax,1),%eax
+	orl	%esi,%edx
+	shll	$24,%eax
+	orl	%eax,%ecx
+	movl	20(%esp),%edi
+	movd	%edx,%mm4
+	movd	%ecx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	L008out
+	movq	%mm0,%mm3
+	movq	%mm4,%mm7
+	pshufw	$228,%mm0,%mm2
+	pshufw	$228,%mm4,%mm6
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pshufw	$177,%mm0,%mm0
+	pshufw	$177,%mm4,%mm4
+	pslld	$8,%mm2
+	pslld	$8,%mm6
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pslld	$16,%mm2
+	pslld	$16,%mm6
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movq	8(%esp),%mm3
+	pxor	%mm2,%mm2
+	pxor	%mm6,%mm6
+	pcmpgtb	%mm1,%mm2
+	pcmpgtb	%mm5,%mm6
+	pand	%mm3,%mm2
+	pand	%mm3,%mm6
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm2,%mm1
+	pxor	%mm6,%mm5
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	%mm1,%mm2
+	movq	%mm5,%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	psrld	$8,%mm2
+	psrld	$8,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pshufw	$177,%mm1,%mm3
+	pshufw	$177,%mm5,%mm7
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	pshufw	$177,%mm1,%mm2
+	pshufw	$177,%mm5,%mm6
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pslld	$8,%mm1
+	pslld	$8,%mm5
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	-128(%ebp),%eax
+	pslld	$16,%mm1
+	pslld	$16,%mm5
+	movl	-64(%ebp),%ebx
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	movl	(%ebp),%ecx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	L007loop
+.align	4,0x90
+L008out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.private_extern	__x86_AES_decrypt
+.align	4
+__x86_AES_decrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	4,0x90
+L009loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ah,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ch,%ecx
+	xorl	3(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	xorl	1(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	L009loop
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+	leal	-128(%ebp),%ebp
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	leal	-2048(%ebp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	6,0x90
+LAES_Td:
+.long	1353184337,1353184337
+.long	1399144830,1399144830
+.long	3282310938,3282310938
+.long	2522752826,2522752826
+.long	3412831035,3412831035
+.long	4047871263,4047871263
+.long	2874735276,2874735276
+.long	2466505547,2466505547
+.long	1442459680,1442459680
+.long	4134368941,4134368941
+.long	2440481928,2440481928
+.long	625738485,625738485
+.long	4242007375,4242007375
+.long	3620416197,3620416197
+.long	2151953702,2151953702
+.long	2409849525,2409849525
+.long	1230680542,1230680542
+.long	1729870373,1729870373
+.long	2551114309,2551114309
+.long	3787521629,3787521629
+.long	41234371,41234371
+.long	317738113,317738113
+.long	2744600205,2744600205
+.long	3338261355,3338261355
+.long	3881799427,3881799427
+.long	2510066197,2510066197
+.long	3950669247,3950669247
+.long	3663286933,3663286933
+.long	763608788,763608788
+.long	3542185048,3542185048
+.long	694804553,694804553
+.long	1154009486,1154009486
+.long	1787413109,1787413109
+.long	2021232372,2021232372
+.long	1799248025,1799248025
+.long	3715217703,3715217703
+.long	3058688446,3058688446
+.long	397248752,397248752
+.long	1722556617,1722556617
+.long	3023752829,3023752829
+.long	407560035,407560035
+.long	2184256229,2184256229
+.long	1613975959,1613975959
+.long	1165972322,1165972322
+.long	3765920945,3765920945
+.long	2226023355,2226023355
+.long	480281086,480281086
+.long	2485848313,2485848313
+.long	1483229296,1483229296
+.long	436028815,436028815
+.long	2272059028,2272059028
+.long	3086515026,3086515026
+.long	601060267,601060267
+.long	3791801202,3791801202
+.long	1468997603,1468997603
+.long	715871590,715871590
+.long	120122290,120122290
+.long	63092015,63092015
+.long	2591802758,2591802758
+.long	2768779219,2768779219
+.long	4068943920,4068943920
+.long	2997206819,2997206819
+.long	3127509762,3127509762
+.long	1552029421,1552029421
+.long	723308426,723308426
+.long	2461301159,2461301159
+.long	4042393587,4042393587
+.long	2715969870,2715969870
+.long	3455375973,3455375973
+.long	3586000134,3586000134
+.long	526529745,526529745
+.long	2331944644,2331944644
+.long	2639474228,2639474228
+.long	2689987490,2689987490
+.long	853641733,853641733
+.long	1978398372,1978398372
+.long	971801355,971801355
+.long	2867814464,2867814464
+.long	111112542,111112542
+.long	1360031421,1360031421
+.long	4186579262,4186579262
+.long	1023860118,1023860118
+.long	2919579357,2919579357
+.long	1186850381,1186850381
+.long	3045938321,3045938321
+.long	90031217,90031217
+.long	1876166148,1876166148
+.long	4279586912,4279586912
+.long	620468249,620468249
+.long	2548678102,2548678102
+.long	3426959497,3426959497
+.long	2006899047,2006899047
+.long	3175278768,3175278768
+.long	2290845959,2290845959
+.long	945494503,945494503
+.long	3689859193,3689859193
+.long	1191869601,1191869601
+.long	3910091388,3910091388
+.long	3374220536,3374220536
+.long	0,0
+.long	2206629897,2206629897
+.long	1223502642,1223502642
+.long	2893025566,2893025566
+.long	1316117100,1316117100
+.long	4227796733,4227796733
+.long	1446544655,1446544655
+.long	517320253,517320253
+.long	658058550,658058550
+.long	1691946762,1691946762
+.long	564550760,564550760
+.long	3511966619,3511966619
+.long	976107044,976107044
+.long	2976320012,2976320012
+.long	266819475,266819475
+.long	3533106868,3533106868
+.long	2660342555,2660342555
+.long	1338359936,1338359936
+.long	2720062561,2720062561
+.long	1766553434,1766553434
+.long	370807324,370807324
+.long	179999714,179999714
+.long	3844776128,3844776128
+.long	1138762300,1138762300
+.long	488053522,488053522
+.long	185403662,185403662
+.long	2915535858,2915535858
+.long	3114841645,3114841645
+.long	3366526484,3366526484
+.long	2233069911,2233069911
+.long	1275557295,1275557295
+.long	3151862254,3151862254
+.long	4250959779,4250959779
+.long	2670068215,2670068215
+.long	3170202204,3170202204
+.long	3309004356,3309004356
+.long	880737115,880737115
+.long	1982415755,1982415755
+.long	3703972811,3703972811
+.long	1761406390,1761406390
+.long	1676797112,1676797112
+.long	3403428311,3403428311
+.long	277177154,277177154
+.long	1076008723,1076008723
+.long	538035844,538035844
+.long	2099530373,2099530373
+.long	4164795346,4164795346
+.long	288553390,288553390
+.long	1839278535,1839278535
+.long	1261411869,1261411869
+.long	4080055004,4080055004
+.long	3964831245,3964831245
+.long	3504587127,3504587127
+.long	1813426987,1813426987
+.long	2579067049,2579067049
+.long	4199060497,4199060497
+.long	577038663,577038663
+.long	3297574056,3297574056
+.long	440397984,440397984
+.long	3626794326,3626794326
+.long	4019204898,4019204898
+.long	3343796615,3343796615
+.long	3251714265,3251714265
+.long	4272081548,4272081548
+.long	906744984,906744984
+.long	3481400742,3481400742
+.long	685669029,685669029
+.long	646887386,646887386
+.long	2764025151,2764025151
+.long	3835509292,3835509292
+.long	227702864,227702864
+.long	2613862250,2613862250
+.long	1648787028,1648787028
+.long	3256061430,3256061430
+.long	3904428176,3904428176
+.long	1593260334,1593260334
+.long	4121936770,4121936770
+.long	3196083615,3196083615
+.long	2090061929,2090061929
+.long	2838353263,2838353263
+.long	3004310991,3004310991
+.long	999926984,999926984
+.long	2809993232,2809993232
+.long	1852021992,1852021992
+.long	2075868123,2075868123
+.long	158869197,158869197
+.long	4095236462,4095236462
+.long	28809964,28809964
+.long	2828685187,2828685187
+.long	1701746150,1701746150
+.long	2129067946,2129067946
+.long	147831841,147831841
+.long	3873969647,3873969647
+.long	3650873274,3650873274
+.long	3459673930,3459673930
+.long	3557400554,3557400554
+.long	3598495785,3598495785
+.long	2947720241,2947720241
+.long	824393514,824393514
+.long	815048134,815048134
+.long	3227951669,3227951669
+.long	935087732,935087732
+.long	2798289660,2798289660
+.long	2966458592,2966458592
+.long	366520115,366520115
+.long	1251476721,1251476721
+.long	4158319681,4158319681
+.long	240176511,240176511
+.long	804688151,804688151
+.long	2379631990,2379631990
+.long	1303441219,1303441219
+.long	1414376140,1414376140
+.long	3741619940,3741619940
+.long	3820343710,3820343710
+.long	461924940,461924940
+.long	3089050817,3089050817
+.long	2136040774,2136040774
+.long	82468509,82468509
+.long	1563790337,1563790337
+.long	1937016826,1937016826
+.long	776014843,776014843
+.long	1511876531,1511876531
+.long	1389550482,1389550482
+.long	861278441,861278441
+.long	323475053,323475053
+.long	2355222426,2355222426
+.long	2047648055,2047648055
+.long	2383738969,2383738969
+.long	2302415851,2302415851
+.long	3995576782,3995576782
+.long	902390199,902390199
+.long	3991215329,3991215329
+.long	1018251130,1018251130
+.long	1507840668,1507840668
+.long	1064563285,1064563285
+.long	2043548696,2043548696
+.long	3208103795,3208103795
+.long	3939366739,3939366739
+.long	1537932639,1537932639
+.long	342834655,342834655
+.long	2262516856,2262516856
+.long	2180231114,2180231114
+.long	1053059257,1053059257
+.long	741614648,741614648
+.long	1598071746,1598071746
+.long	1925389590,1925389590
+.long	203809468,203809468
+.long	2336832552,2336832552
+.long	1100287487,1100287487
+.long	1895934009,1895934009
+.long	3736275976,3736275976
+.long	2632234200,2632234200
+.long	2428589668,2428589668
+.long	1636092795,1636092795
+.long	1890988757,1890988757
+.long	1952214088,1952214088
+.long	1113045200,1113045200
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.globl	_aes_nohw_decrypt
+.private_extern	_aes_nohw_decrypt
+.align	4
+_aes_nohw_decrypt:
+L_aes_nohw_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	L010pic_point
+L010pic_point:
+	popl	%ebp
+	movl	L_OPENSSL_ia32cap_P$non_lazy_ptr-L010pic_point(%ebp),%eax
+	leal	LAES_Td-L010pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	L011x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	__sse_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	4,0x90
+L011x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	__x86_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.globl	_aes_nohw_cbc_encrypt
+.private_extern	_aes_nohw_cbc_encrypt
+.align	4
+_aes_nohw_cbc_encrypt:
+L_aes_nohw_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	L012drop_out
+	call	L013pic_point
+L013pic_point:
+	popl	%ebp
+	movl	L_OPENSSL_ia32cap_P$non_lazy_ptr-L013pic_point(%ebp),%eax
+	cmpl	$0,40(%esp)
+	leal	LAES_Te-L013pic_point(%ebp),%ebp
+	jne	L014picked_te
+	leal	LAES_Td-LAES_Te(%ebp),%ebp
+L014picked_te:
+	pushfl
+	cld
+	cmpl	$512,%ecx
+	jb	L015slow_way
+	testl	$15,%ecx
+	jnz	L015slow_way
+	btl	$28,(%eax)
+	jc	L015slow_way
+	leal	-324(%esp),%esi
+	andl	$-64,%esi
+	movl	%ebp,%eax
+	leal	2304(%ebp),%ebx
+	movl	%esi,%edx
+	andl	$4095,%eax
+	andl	$4095,%ebx
+	andl	$4095,%edx
+	cmpl	%ebx,%edx
+	jb	L016tbl_break_out
+	subl	%ebx,%edx
+	subl	%edx,%esi
+	jmp	L017tbl_ok
+.align	2,0x90
+L016tbl_break_out:
+	subl	%eax,%edx
+	andl	$4095,%edx
+	addl	$384,%edx
+	subl	%edx,%esi
+.align	2,0x90
+L017tbl_ok:
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	12(%edx),%edi
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	$0,316(%esp)
+	movl	%edi,%ebx
+	movl	$61,%ecx
+	subl	%ebp,%ebx
+	movl	%edi,%esi
+	andl	$4095,%ebx
+	leal	76(%esp),%edi
+	cmpl	$2304,%ebx
+	jb	L018do_copy
+	cmpl	$3852,%ebx
+	jb	L019skip_copy
+.align	2,0x90
+L018do_copy:
+	movl	%edi,44(%esp)
+.long	2784229001
+L019skip_copy:
+	movl	$16,%edi
+.align	2,0x90
+L020prefetch_tbl:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%esi
+	leal	128(%ebp),%ebp
+	subl	$1,%edi
+	jnz	L020prefetch_tbl
+	subl	$2048,%ebp
+	movl	32(%esp),%esi
+	movl	48(%esp),%edi
+	cmpl	$0,%edx
+	je	L021fast_decrypt
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4,0x90
+L022fast_enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	__x86_AES_encrypt
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%esi
+	movl	40(%esp),%ecx
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	L022fast_enc_loop
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	L023skip_ezero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	2,0x90
+.long	2884892297
+L023skip_ezero:
+	movl	28(%esp),%esp
+	popfl
+L012drop_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L021fast_decrypt:
+	cmpl	36(%esp),%esi
+	je	L024fast_dec_in_place
+	movl	%edi,52(%esp)
+.align	2,0x90
+.align	4,0x90
+L025fast_dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	__x86_AES_decrypt
+	movl	52(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	36(%esp),%edi
+	movl	32(%esp),%esi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	%esi,52(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	L025fast_dec_loop
+	movl	52(%esp),%edi
+	movl	48(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	L026fast_dec_out
+.align	4,0x90
+L024fast_dec_in_place:
+L027fast_dec_in_place_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	__x86_AES_decrypt
+	movl	48(%esp),%edi
+	movl	36(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	L027fast_dec_in_place_loop
+.align	2,0x90
+L026fast_dec_out:
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	L028skip_dzero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	2,0x90
+.long	2884892297
+L028skip_dzero:
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L015slow_way:
+	movl	(%eax),%eax
+	movl	36(%esp),%edi
+	leal	-80(%esp),%esi
+	andl	$-64,%esi
+	leal	-143(%edi),%ebx
+	subl	%esi,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esi
+	leal	768(%esi),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	%eax,52(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	%esi,%edi
+	movl	%eax,%esi
+	cmpl	$0,%edx
+	je	L029slow_decrypt
+	cmpl	$16,%ecx
+	movl	%ebx,%edx
+	jb	L030slow_enc_tail
+	btl	$25,52(%esp)
+	jnc	L031slow_enc_x86
+	movq	(%edi),%mm0
+	movq	8(%edi),%mm4
+.align	4,0x90
+L032slow_enc_loop_sse:
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	__sse_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	40(%esp),%ecx
+	movq	%mm0,(%edi)
+	movq	%mm4,8(%edi)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	L032slow_enc_loop_sse
+	testl	$15,%ecx
+	jnz	L030slow_enc_tail
+	movl	48(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L031slow_enc_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	2,0x90
+L033slow_enc_loop_x86:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	__x86_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	L033slow_enc_loop_x86
+	testl	$15,%ecx
+	jnz	L030slow_enc_tail
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L030slow_enc_tail:
+	emms
+	movl	%edx,%edi
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	L034enc_in_place
+.align	2,0x90
+.long	2767451785
+	jmp	L035enc_skip_in_place
+L034enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+L035enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	2,0x90
+.long	2868115081
+	movl	48(%esp),%edi
+	movl	%edx,%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,40(%esp)
+	jmp	L033slow_enc_loop_x86
+.align	4,0x90
+L029slow_decrypt:
+	btl	$25,52(%esp)
+	jnc	L036slow_dec_loop_x86
+.align	2,0x90
+L037slow_dec_loop_sse:
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	__sse_AES_decrypt_compact
+	movl	32(%esp),%esi
+	leal	60(%esp),%eax
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	48(%esp),%edi
+	movq	(%esi),%mm1
+	movq	8(%esi),%mm5
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movq	%mm1,(%edi)
+	movq	%mm5,8(%edi)
+	subl	$16,%ecx
+	jc	L038slow_dec_partial_sse
+	movq	%mm0,(%ebx)
+	movq	%mm4,8(%ebx)
+	leal	16(%ebx),%ebx
+	movl	%ebx,36(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	movl	%ecx,40(%esp)
+	jnz	L037slow_dec_loop_sse
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L038slow_dec_partial_sse:
+	movq	%mm0,(%eax)
+	movq	%mm4,8(%eax)
+	emms
+	addl	$16,%ecx
+	movl	%ebx,%edi
+	movl	%eax,%esi
+.align	2,0x90
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L036slow_dec_loop_x86:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	__x86_AES_decrypt_compact
+	movl	48(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	L039slow_dec_partial_x86
+	movl	%esi,40(%esp)
+	movl	36(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	jnz	L036slow_dec_loop_x86
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4,0x90
+L039slow_dec_partial_x86:
+	leal	60(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	36(%esp),%edi
+	leal	60(%esp),%esi
+.align	2,0x90
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.private_extern	__x86_AES_set_encrypt_key
+.align	4
+__x86_AES_set_encrypt_key:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%esi
+	movl	32(%esp),%edi
+	testl	$-1,%esi
+	jz	L040badpointer
+	testl	$-1,%edi
+	jz	L040badpointer
+	call	L041pic_point
+L041pic_point:
+	popl	%ebp
+	leal	LAES_Te-L041pic_point(%ebp),%ebp
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	movl	28(%esp),%ecx
+	cmpl	$128,%ecx
+	je	L04210rounds
+	cmpl	$192,%ecx
+	je	L04312rounds
+	cmpl	$256,%ecx
+	je	L04414rounds
+	movl	$-2,%eax
+	jmp	L045exit
+L04210rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	xorl	%ecx,%ecx
+	jmp	L04610shortcut
+.align	2,0x90
+L04710loop:
+	movl	(%edi),%eax
+	movl	12(%edi),%edx
+L04610shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,16(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,20(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,24(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,28(%edi)
+	incl	%ecx
+	addl	$16,%edi
+	cmpl	$10,%ecx
+	jl	L04710loop
+	movl	$10,80(%edi)
+	xorl	%eax,%eax
+	jmp	L045exit
+L04312rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%ecx
+	movl	20(%esi),%edx
+	movl	%ecx,16(%edi)
+	movl	%edx,20(%edi)
+	xorl	%ecx,%ecx
+	jmp	L04812shortcut
+.align	2,0x90
+L04912loop:
+	movl	(%edi),%eax
+	movl	20(%edi),%edx
+L04812shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,24(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,28(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,32(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,36(%edi)
+	cmpl	$7,%ecx
+	je	L05012break
+	incl	%ecx
+	xorl	16(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,44(%edi)
+	addl	$24,%edi
+	jmp	L04912loop
+L05012break:
+	movl	$12,72(%edi)
+	xorl	%eax,%eax
+	jmp	L045exit
+L04414rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	movl	%eax,16(%edi)
+	movl	%ebx,20(%edi)
+	movl	%ecx,24(%edi)
+	movl	%edx,28(%edi)
+	xorl	%ecx,%ecx
+	jmp	L05114shortcut
+.align	2,0x90
+L05214loop:
+	movl	28(%edi),%edx
+L05114shortcut:
+	movl	(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,32(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,36(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,44(%edi)
+	cmpl	$6,%ecx
+	je	L05314break
+	incl	%ecx
+	movl	%eax,%edx
+	movl	16(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movl	%eax,48(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,52(%edi)
+	xorl	24(%edi),%eax
+	movl	%eax,56(%edi)
+	xorl	28(%edi),%eax
+	movl	%eax,60(%edi)
+	addl	$32,%edi
+	jmp	L05214loop
+L05314break:
+	movl	$14,48(%edi)
+	xorl	%eax,%eax
+	jmp	L045exit
+L040badpointer:
+	movl	$-1,%eax
+L045exit:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.globl	_aes_nohw_set_encrypt_key
+.private_extern	_aes_nohw_set_encrypt_key
+.align	4
+_aes_nohw_set_encrypt_key:
+L_aes_nohw_set_encrypt_key_begin:
+	call	__x86_AES_set_encrypt_key
+	ret
+.globl	_aes_nohw_set_decrypt_key
+.private_extern	_aes_nohw_set_decrypt_key
+.align	4
+_aes_nohw_set_decrypt_key:
+L_aes_nohw_set_decrypt_key_begin:
+	call	__x86_AES_set_encrypt_key
+	cmpl	$0,%eax
+	je	L054proceed
+	ret
+L054proceed:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%esi
+	movl	240(%esi),%ecx
+	leal	(,%ecx,4),%ecx
+	leal	(%esi,%ecx,4),%edi
+.align	2,0x90
+L055invert:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,(%esi)
+	movl	%edx,4(%esi)
+	movl	8(%esi),%eax
+	movl	12(%esi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,8(%edi)
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	addl	$16,%esi
+	subl	$16,%edi
+	cmpl	%edi,%esi
+	jne	L055invert
+	movl	28(%esp),%edi
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,28(%esp)
+	movl	16(%edi),%eax
+.align	2,0x90
+L056permute:
+	addl	$16,%edi
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%esi,%ebx
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%eax,%ebx
+	xorl	%esi,%ecx
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%eax,%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	roll	$8,%eax
+	xorl	%esi,%edx
+	movl	4(%edi),%ebp
+	xorl	%ebx,%eax
+	xorl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$24,%ebx
+	xorl	%edx,%ecx
+	xorl	%edx,%eax
+	roll	$16,%ecx
+	xorl	%ebx,%eax
+	roll	$8,%edx
+	xorl	%ecx,%eax
+	movl	%ebp,%ebx
+	xorl	%edx,%eax
+	movl	%eax,(%edi)
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%esi,%ecx
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%esi,%edx
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%ebx,%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	roll	$8,%ebx
+	xorl	%esi,%eax
+	movl	8(%edi),%ebp
+	xorl	%ecx,%ebx
+	xorl	%eax,%ecx
+	xorl	%edx,%ebx
+	roll	$24,%ecx
+	xorl	%eax,%edx
+	xorl	%eax,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%eax
+	xorl	%edx,%ebx
+	movl	%ebp,%ecx
+	xorl	%eax,%ebx
+	movl	%ebx,4(%edi)
+	movl	$2155905152,%ebp
+	andl	%ecx,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%esi,%edx
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%ecx,%edx
+	xorl	%esi,%eax
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%ecx,%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	roll	$8,%ecx
+	xorl	%esi,%ebx
+	movl	12(%edi),%ebp
+	xorl	%edx,%ecx
+	xorl	%ebx,%edx
+	xorl	%eax,%ecx
+	roll	$24,%edx
+	xorl	%ebx,%eax
+	xorl	%ebx,%ecx
+	roll	$16,%eax
+	xorl	%edx,%ecx
+	roll	$8,%ebx
+	xorl	%eax,%ecx
+	movl	%ebp,%edx
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%edi)
+	movl	$2155905152,%ebp
+	andl	%edx,%ebp
+	leal	(%edx,%edx,1),%eax
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%esi,%eax
+	movl	$2155905152,%ebp
+	andl	%eax,%ebp
+	leal	(%eax,%eax,1),%ebx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%edx,%eax
+	xorl	%esi,%ebx
+	movl	$2155905152,%ebp
+	andl	%ebx,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	movl	%ebp,%esi
+	shrl	$7,%ebp
+	xorl	%edx,%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	roll	$8,%edx
+	xorl	%esi,%ecx
+	movl	16(%edi),%ebp
+	xorl	%eax,%edx
+	xorl	%ecx,%eax
+	xorl	%ebx,%edx
+	roll	$24,%eax
+	xorl	%ecx,%ebx
+	xorl	%ecx,%edx
+	roll	$16,%ebx
+	xorl	%eax,%edx
+	roll	$8,%ecx
+	xorl	%ebx,%edx
+	movl	%ebp,%eax
+	xorl	%ecx,%edx
+	movl	%edx,12(%edi)
+	cmpl	28(%esp),%edi
+	jb	L056permute
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.byte	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.section __IMPORT,__pointers,non_lazy_symbol_pointers
+L_OPENSSL_ia32cap_P$non_lazy_ptr:
+.indirect_symbol	_OPENSSL_ia32cap_P
+.long	0
+#endif
diff --git a/mac-x86_64/crypto/fipsmodule/aes-x86_64.S b/mac-x86_64/crypto/fipsmodule/aes-x86_64.S
new file mode 100644
index 0000000..8875d0a
--- /dev/null
+++ b/mac-x86_64/crypto/fipsmodule/aes-x86_64.S
@@ -0,0 +1,2645 @@
+# This file is generated from a similarly-named Perl script in the BoringSSL
+# source tree. Do not edit by hand.
+
+#if defined(__has_feature)
+#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
+#define OPENSSL_NO_ASM
+#endif
+#endif
+
+#if defined(__x86_64__) && !defined(OPENSSL_NO_ASM)
+#if defined(BORINGSSL_PREFIX)
+#include <boringssl_prefix_symbols_asm.h>
+#endif
+.text	
+
+.p2align	4
+_x86_64_AES_encrypt:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+
+	movl	240(%r15),%r13d
+	subl	$1,%r13d
+	jmp	L$enc_loop
+.p2align	4
+L$enc_loop:
+
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movl	0(%r14,%rsi,8),%r10d
+	movl	0(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r12d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movzbl	%dl,%ebp
+	xorl	3(%r14,%rsi,8),%r10d
+	xorl	3(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r8d
+
+	movzbl	%dh,%esi
+	shrl	$16,%ecx
+	movzbl	%ah,%ebp
+	xorl	3(%r14,%rsi,8),%r12d
+	shrl	$16,%edx
+	xorl	3(%r14,%rbp,8),%r8d
+
+	shrl	$16,%ebx
+	leaq	16(%r15),%r15
+	shrl	$16,%eax
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	xorl	2(%r14,%rsi,8),%r10d
+	xorl	2(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movzbl	%bl,%ebp
+	xorl	1(%r14,%rsi,8),%r10d
+	xorl	1(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r8d
+
+	movl	12(%r15),%edx
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movl	0(%r15),%eax
+	xorl	1(%r14,%rdi,8),%r12d
+	xorl	1(%r14,%rbp,8),%r8d
+
+	movl	4(%r15),%ebx
+	movl	8(%r15),%ecx
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	subl	$1,%r13d
+	jnz	L$enc_loop
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movzbl	2(%r14,%rsi,8),%r10d
+	movzbl	2(%r14,%rdi,8),%r11d
+	movzbl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%dl,%esi
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movzbl	2(%r14,%rsi,8),%r8d
+	movl	0(%r14,%rdi,8),%edi
+	movl	0(%r14,%rbp,8),%ebp
+
+	andl	$0x0000ff00,%edi
+	andl	$0x0000ff00,%ebp
+
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+	shrl	$16,%ecx
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	shrl	$16,%edx
+	movl	0(%r14,%rsi,8),%esi
+	movl	0(%r14,%rdi,8),%edi
+
+	andl	$0x0000ff00,%esi
+	andl	$0x0000ff00,%edi
+	shrl	$16,%ebx
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+	shrl	$16,%eax
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	movl	0(%r14,%rsi,8),%esi
+	movl	0(%r14,%rdi,8),%edi
+	movl	0(%r14,%rbp,8),%ebp
+
+	andl	$0x00ff0000,%esi
+	andl	$0x00ff0000,%edi
+	andl	$0x00ff0000,%ebp
+
+	xorl	%esi,%r10d
+	xorl	%edi,%r11d
+	xorl	%ebp,%r12d
+
+	movzbl	%bl,%esi
+	movzbl	%dh,%edi
+	movzbl	%ah,%ebp
+	movl	0(%r14,%rsi,8),%esi
+	movl	2(%r14,%rdi,8),%edi
+	movl	2(%r14,%rbp,8),%ebp
+
+	andl	$0x00ff0000,%esi
+	andl	$0xff000000,%edi
+	andl	$0xff000000,%ebp
+
+	xorl	%esi,%r8d
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movl	16+12(%r15),%edx
+	movl	2(%r14,%rsi,8),%esi
+	movl	2(%r14,%rdi,8),%edi
+	movl	16+0(%r15),%eax
+
+	andl	$0xff000000,%esi
+	andl	$0xff000000,%edi
+
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+
+	movl	16+4(%r15),%ebx
+	movl	16+8(%r15),%ecx
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+.byte	0xf3,0xc3
+
+
+.p2align	4
+_x86_64_AES_encrypt_compact:
+
+	leaq	128(%r14),%r8
+	movl	0-128(%r8),%edi
+	movl	32-128(%r8),%ebp
+	movl	64-128(%r8),%r10d
+	movl	96-128(%r8),%r11d
+	movl	128-128(%r8),%edi
+	movl	160-128(%r8),%ebp
+	movl	192-128(%r8),%r10d
+	movl	224-128(%r8),%r11d
+	jmp	L$enc_loop_compact
+.p2align	4
+L$enc_loop_compact:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+	leaq	16(%r15),%r15
+	movzbl	%al,%r10d
+	movzbl	%bl,%r11d
+	movzbl	%cl,%r12d
+	movzbl	%dl,%r8d
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	shrl	$16,%ecx
+	movzbl	%dh,%ebp
+	movzbl	(%r14,%r10,1),%r10d
+	movzbl	(%r14,%r11,1),%r11d
+	movzbl	(%r14,%r12,1),%r12d
+	movzbl	(%r14,%r8,1),%r8d
+
+	movzbl	(%r14,%rsi,1),%r9d
+	movzbl	%ah,%esi
+	movzbl	(%r14,%rdi,1),%r13d
+	movzbl	%cl,%edi
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+
+	shll	$8,%r9d
+	shrl	$16,%edx
+	shll	$8,%r13d
+	xorl	%r9d,%r10d
+	shrl	$16,%eax
+	movzbl	%dl,%r9d
+	shrl	$16,%ebx
+	xorl	%r13d,%r11d
+	shll	$8,%ebp
+	movzbl	%al,%r13d
+	movzbl	(%r14,%rdi,1),%edi
+	xorl	%ebp,%r12d
+
+	shll	$8,%esi
+	movzbl	%bl,%ebp
+	shll	$16,%edi
+	xorl	%esi,%r8d
+	movzbl	(%r14,%r9,1),%r9d
+	movzbl	%dh,%esi
+	movzbl	(%r14,%r13,1),%r13d
+	xorl	%edi,%r10d
+
+	shrl	$8,%ecx
+	movzbl	%ah,%edi
+	shll	$16,%r9d
+	shrl	$8,%ebx
+	shll	$16,%r13d
+	xorl	%r9d,%r11d
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rcx,1),%edx
+	movzbl	(%r14,%rbx,1),%ecx
+
+	shll	$16,%ebp
+	xorl	%r13d,%r12d
+	shll	$24,%esi
+	xorl	%ebp,%r8d
+	shll	$24,%edi
+	xorl	%esi,%r10d
+	shll	$24,%edx
+	xorl	%edi,%r11d
+	shll	$24,%ecx
+	movl	%r10d,%eax
+	movl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	cmpq	16(%rsp),%r15
+	je	L$enc_compact_done
+	movl	$0x80808080,%r10d
+	movl	$0x80808080,%r11d
+	andl	%eax,%r10d
+	andl	%ebx,%r11d
+	movl	%r10d,%esi
+	movl	%r11d,%edi
+	shrl	$7,%r10d
+	leal	(%rax,%rax,1),%r8d
+	shrl	$7,%r11d
+	leal	(%rbx,%rbx,1),%r9d
+	subl	%r10d,%esi
+	subl	%r11d,%edi
+	andl	$0xfefefefe,%r8d
+	andl	$0xfefefefe,%r9d
+	andl	$0x1b1b1b1b,%esi
+	andl	$0x1b1b1b1b,%edi
+	movl	%eax,%r10d
+	movl	%ebx,%r11d
+	xorl	%esi,%r8d
+	xorl	%edi,%r9d
+
+	xorl	%r8d,%eax
+	xorl	%r9d,%ebx
+	movl	$0x80808080,%r12d
+	roll	$24,%eax
+	movl	$0x80808080,%ebp
+	roll	$24,%ebx
+	andl	%ecx,%r12d
+	andl	%edx,%ebp
+	xorl	%r8d,%eax
+	xorl	%r9d,%ebx
+	movl	%r12d,%esi
+	rorl	$16,%r10d
+	movl	%ebp,%edi
+	rorl	$16,%r11d
+	leal	(%rcx,%rcx,1),%r8d
+	shrl	$7,%r12d
+	xorl	%r10d,%eax
+	shrl	$7,%ebp
+	xorl	%r11d,%ebx
+	rorl	$8,%r10d
+	leal	(%rdx,%rdx,1),%r9d
+	rorl	$8,%r11d
+	subl	%r12d,%esi
+	subl	%ebp,%edi
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+
+	andl	$0xfefefefe,%r8d
+	andl	$0xfefefefe,%r9d
+	andl	$0x1b1b1b1b,%esi
+	andl	$0x1b1b1b1b,%edi
+	movl	%ecx,%r12d
+	movl	%edx,%ebp
+	xorl	%esi,%r8d
+	xorl	%edi,%r9d
+
+	rorl	$16,%r12d
+	xorl	%r8d,%ecx
+	rorl	$16,%ebp
+	xorl	%r9d,%edx
+	roll	$24,%ecx
+	movl	0(%r14),%esi
+	roll	$24,%edx
+	xorl	%r8d,%ecx
+	movl	64(%r14),%edi
+	xorl	%r9d,%edx
+	movl	128(%r14),%r8d
+	xorl	%r12d,%ecx
+	rorl	$8,%r12d
+	xorl	%ebp,%edx
+	rorl	$8,%ebp
+	xorl	%r12d,%ecx
+	movl	192(%r14),%r9d
+	xorl	%ebp,%edx
+	jmp	L$enc_loop_compact
+.p2align	4
+L$enc_compact_done:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+.byte	0xf3,0xc3
+
+
+.p2align	4
+.globl	_aes_nohw_encrypt
+.private_extern _aes_nohw_encrypt
+
+.private_extern	_aes_nohw_encrypt
+_aes_nohw_encrypt:
+
+	movq	%rsp,%rax
+
+	pushq	%rbx
+
+	pushq	%rbp
+
+	pushq	%r12
+
+	pushq	%r13
+
+	pushq	%r14
+
+	pushq	%r15
+
+
+
+	leaq	-63(%rdx),%rcx
+	andq	$-64,%rsp
+	subq	%rsp,%rcx
+	negq	%rcx
+	andq	$0x3c0,%rcx
+	subq	%rcx,%rsp
+	subq	$32,%rsp
+
+	movq	%rsi,16(%rsp)
+	movq	%rax,24(%rsp)
+
+L$enc_prologue:
+
+	movq	%rdx,%r15
+	movl	240(%r15),%r13d
+
+	movl	0(%rdi),%eax
+	movl	4(%rdi),%ebx
+	movl	8(%rdi),%ecx
+	movl	12(%rdi),%edx
+
+	shll	$4,%r13d
+	leaq	(%r15,%r13,1),%rbp
+	movq	%r15,(%rsp)
+	movq	%rbp,8(%rsp)
+
+
+	leaq	L$AES_Te+2048(%rip),%r14
+	leaq	768(%rsp),%rbp
+	subq	%r14,%rbp
+	andq	$0x300,%rbp
+	leaq	(%r14,%rbp,1),%r14
+
+	call	_x86_64_AES_encrypt_compact
+
+	movq	16(%rsp),%r9
+	movq	24(%rsp),%rsi
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	movq	-48(%rsi),%r15
+
+	movq	-40(%rsi),%r14
+
+	movq	-32(%rsi),%r13
+
+	movq	-24(%rsi),%r12
+
+	movq	-16(%rsi),%rbp
+
+	movq	-8(%rsi),%rbx
+
+	leaq	(%rsi),%rsp
+
+L$enc_epilogue:
+	.byte	0xf3,0xc3
+
+
+
+.p2align	4
+_x86_64_AES_decrypt:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+
+	movl	240(%r15),%r13d
+	subl	$1,%r13d
+	jmp	L$dec_loop
+.p2align	4
+L$dec_loop:
+
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movl	0(%r14,%rsi,8),%r10d
+	movl	0(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r12d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movzbl	%dl,%ebp
+	xorl	3(%r14,%rsi,8),%r10d
+	xorl	3(%r14,%rdi,8),%r11d
+	movl	0(%r14,%rbp,8),%r8d
+
+	movzbl	%bh,%esi
+	shrl	$16,%eax
+	movzbl	%ch,%ebp
+	xorl	3(%r14,%rsi,8),%r12d
+	shrl	$16,%edx
+	xorl	3(%r14,%rbp,8),%r8d
+
+	shrl	$16,%ebx
+	leaq	16(%r15),%r15
+	shrl	$16,%ecx
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	xorl	2(%r14,%rsi,8),%r10d
+	xorl	2(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r12d
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	movzbl	%bl,%ebp
+	xorl	1(%r14,%rsi,8),%r10d
+	xorl	1(%r14,%rdi,8),%r11d
+	xorl	2(%r14,%rbp,8),%r8d
+
+	movzbl	%dh,%esi
+	movl	12(%r15),%edx
+	movzbl	%ah,%ebp
+	xorl	1(%r14,%rsi,8),%r12d
+	movl	0(%r15),%eax
+	xorl	1(%r14,%rbp,8),%r8d
+
+	xorl	%r10d,%eax
+	movl	4(%r15),%ebx
+	movl	8(%r15),%ecx
+	xorl	%r12d,%ecx
+	xorl	%r11d,%ebx
+	xorl	%r8d,%edx
+	subl	$1,%r13d
+	jnz	L$dec_loop
+	leaq	2048(%r14),%r14
+	movzbl	%al,%esi
+	movzbl	%bl,%edi
+	movzbl	%cl,%ebp
+	movzbl	(%r14,%rsi,1),%r10d
+	movzbl	(%r14,%rdi,1),%r11d
+	movzbl	(%r14,%rbp,1),%r12d
+
+	movzbl	%dl,%esi
+	movzbl	%dh,%edi
+	movzbl	%ah,%ebp
+	movzbl	(%r14,%rsi,1),%r8d
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$8,%edi
+	shll	$8,%ebp
+
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+	shrl	$16,%edx
+
+	movzbl	%bh,%esi
+	movzbl	%ch,%edi
+	shrl	$16,%eax
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+
+	shll	$8,%esi
+	shll	$8,%edi
+	shrl	$16,%ebx
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+	shrl	$16,%ecx
+
+	movzbl	%cl,%esi
+	movzbl	%dl,%edi
+	movzbl	%al,%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$16,%esi
+	shll	$16,%edi
+	shll	$16,%ebp
+
+	xorl	%esi,%r10d
+	xorl	%edi,%r11d
+	xorl	%ebp,%r12d
+
+	movzbl	%bl,%esi
+	movzbl	%bh,%edi
+	movzbl	%ch,%ebp
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movzbl	(%r14,%rbp,1),%ebp
+
+	shll	$16,%esi
+	shll	$24,%edi
+	shll	$24,%ebp
+
+	xorl	%esi,%r8d
+	xorl	%edi,%r10d
+	xorl	%ebp,%r11d
+
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	movl	16+12(%r15),%edx
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%edi
+	movl	16+0(%r15),%eax
+
+	shll	$24,%esi
+	shll	$24,%edi
+
+	xorl	%esi,%r12d
+	xorl	%edi,%r8d
+
+	movl	16+4(%r15),%ebx
+	movl	16+8(%r15),%ecx
+	leaq	-2048(%r14),%r14
+	xorl	%r10d,%eax
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+.byte	0xf3,0xc3
+
+
+.p2align	4
+_x86_64_AES_decrypt_compact:
+
+	leaq	128(%r14),%r8
+	movl	0-128(%r8),%edi
+	movl	32-128(%r8),%ebp
+	movl	64-128(%r8),%r10d
+	movl	96-128(%r8),%r11d
+	movl	128-128(%r8),%edi
+	movl	160-128(%r8),%ebp
+	movl	192-128(%r8),%r10d
+	movl	224-128(%r8),%r11d
+	jmp	L$dec_loop_compact
+
+.p2align	4
+L$dec_loop_compact:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+	leaq	16(%r15),%r15
+	movzbl	%al,%r10d
+	movzbl	%bl,%r11d
+	movzbl	%cl,%r12d
+	movzbl	%dl,%r8d
+	movzbl	%dh,%esi
+	movzbl	%ah,%edi
+	shrl	$16,%edx
+	movzbl	%bh,%ebp
+	movzbl	(%r14,%r10,1),%r10d
+	movzbl	(%r14,%r11,1),%r11d
+	movzbl	(%r14,%r12,1),%r12d
+	movzbl	(%r14,%r8,1),%r8d
+
+	movzbl	(%r14,%rsi,1),%r9d
+	movzbl	%ch,%esi
+	movzbl	(%r14,%rdi,1),%r13d
+	movzbl	(%r14,%rbp,1),%ebp
+	movzbl	(%r14,%rsi,1),%esi
+
+	shrl	$16,%ecx
+	shll	$8,%r13d
+	shll	$8,%r9d
+	movzbl	%cl,%edi
+	shrl	$16,%eax
+	xorl	%r9d,%r10d
+	shrl	$16,%ebx
+	movzbl	%dl,%r9d
+
+	shll	$8,%ebp
+	xorl	%r13d,%r11d
+	shll	$8,%esi
+	movzbl	%al,%r13d
+	movzbl	(%r14,%rdi,1),%edi
+	xorl	%ebp,%r12d
+	movzbl	%bl,%ebp
+
+	shll	$16,%edi
+	xorl	%esi,%r8d
+	movzbl	(%r14,%r9,1),%r9d
+	movzbl	%bh,%esi
+	movzbl	(%r14,%rbp,1),%ebp
+	xorl	%edi,%r10d
+	movzbl	(%r14,%r13,1),%r13d
+	movzbl	%ch,%edi
+
+	shll	$16,%ebp
+	shll	$16,%r9d
+	shll	$16,%r13d
+	xorl	%ebp,%r8d
+	movzbl	%dh,%ebp
+	xorl	%r9d,%r11d
+	shrl	$8,%eax
+	xorl	%r13d,%r12d
+
+	movzbl	(%r14,%rsi,1),%esi
+	movzbl	(%r14,%rdi,1),%ebx
+	movzbl	(%r14,%rbp,1),%ecx
+	movzbl	(%r14,%rax,1),%edx
+
+	movl	%r10d,%eax
+	shll	$24,%esi
+	shll	$24,%ebx
+	shll	$24,%ecx
+	xorl	%esi,%eax
+	shll	$24,%edx
+	xorl	%r11d,%ebx
+	xorl	%r12d,%ecx
+	xorl	%r8d,%edx
+	cmpq	16(%rsp),%r15
+	je	L$dec_compact_done
+
+	movq	256+0(%r14),%rsi
+	shlq	$32,%rbx
+	shlq	$32,%rdx
+	movq	256+8(%r14),%rdi
+	orq	%rbx,%rax
+	orq	%rdx,%rcx
+	movq	256+16(%r14),%rbp
+	movq	%rsi,%r9
+	movq	%rsi,%r12
+	andq	%rax,%r9
+	andq	%rcx,%r12
+	movq	%r9,%rbx
+	movq	%r12,%rdx
+	shrq	$7,%r9
+	leaq	(%rax,%rax,1),%r8
+	shrq	$7,%r12
+	leaq	(%rcx,%rcx,1),%r11
+	subq	%r9,%rbx
+	subq	%r12,%rdx
+	andq	%rdi,%r8
+	andq	%rdi,%r11
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r8
+	xorq	%rdx,%r11
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r8,%r10
+	andq	%r11,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	leaq	(%r8,%r8,1),%r9
+	shrq	$7,%r13
+	leaq	(%r11,%r11,1),%r12
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	andq	%rdi,%r9
+	andq	%rdi,%r12
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r9
+	xorq	%rdx,%r12
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r9,%r10
+	andq	%r12,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	xorq	%rax,%r8
+	shrq	$7,%r13
+	xorq	%rcx,%r11
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	leaq	(%r9,%r9,1),%r10
+	leaq	(%r12,%r12,1),%r13
+	xorq	%rax,%r9
+	xorq	%rcx,%r12
+	andq	%rdi,%r10
+	andq	%rdi,%r13
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r10
+	xorq	%rdx,%r13
+
+	xorq	%r10,%rax
+	xorq	%r13,%rcx
+	xorq	%r10,%r8
+	xorq	%r13,%r11
+	movq	%rax,%rbx
+	movq	%rcx,%rdx
+	xorq	%r10,%r9
+	shrq	$32,%rbx
+	xorq	%r13,%r12
+	shrq	$32,%rdx
+	xorq	%r8,%r10
+	roll	$8,%eax
+	xorq	%r11,%r13
+	roll	$8,%ecx
+	xorq	%r9,%r10
+	roll	$8,%ebx
+	xorq	%r12,%r13
+
+	roll	$8,%edx
+	xorl	%r10d,%eax
+	shrq	$32,%r10
+	xorl	%r13d,%ecx
+	shrq	$32,%r13
+	xorl	%r10d,%ebx
+	xorl	%r13d,%edx
+
+	movq	%r8,%r10
+	roll	$24,%r8d
+	movq	%r11,%r13
+	roll	$24,%r11d
+	shrq	$32,%r10
+	xorl	%r8d,%eax
+	shrq	$32,%r13
+	xorl	%r11d,%ecx
+	roll	$24,%r10d
+	movq	%r9,%r8
+	roll	$24,%r13d
+	movq	%r12,%r11
+	shrq	$32,%r8
+	xorl	%r10d,%ebx
+	shrq	$32,%r11
+	xorl	%r13d,%edx
+
+	movq	0(%r14),%rsi
+	roll	$16,%r9d
+	movq	64(%r14),%rdi
+	roll	$16,%r12d
+	movq	128(%r14),%rbp
+	roll	$16,%r8d
+	movq	192(%r14),%r10
+	xorl	%r9d,%eax
+	roll	$16,%r11d
+	xorl	%r12d,%ecx
+	movq	256(%r14),%r13
+	xorl	%r8d,%ebx
+	xorl	%r11d,%edx
+	jmp	L$dec_loop_compact
+.p2align	4
+L$dec_compact_done:
+	xorl	0(%r15),%eax
+	xorl	4(%r15),%ebx
+	xorl	8(%r15),%ecx
+	xorl	12(%r15),%edx
+.byte	0xf3,0xc3
+
+
+.p2align	4
+.globl	_aes_nohw_decrypt
+.private_extern _aes_nohw_decrypt
+
+.private_extern	_aes_nohw_decrypt
+_aes_nohw_decrypt:
+
+	movq	%rsp,%rax
+
+	pushq	%rbx
+
+	pushq	%rbp
+
+	pushq	%r12
+
+	pushq	%r13
+
+	pushq	%r14
+
+	pushq	%r15
+
+
+
+	leaq	-63(%rdx),%rcx
+	andq	$-64,%rsp
+	subq	%rsp,%rcx
+	negq	%rcx
+	andq	$0x3c0,%rcx
+	subq	%rcx,%rsp
+	subq	$32,%rsp
+
+	movq	%rsi,16(%rsp)
+	movq	%rax,24(%rsp)
+
+L$dec_prologue:
+
+	movq	%rdx,%r15
+	movl	240(%r15),%r13d
+
+	movl	0(%rdi),%eax
+	movl	4(%rdi),%ebx
+	movl	8(%rdi),%ecx
+	movl	12(%rdi),%edx
+
+	shll	$4,%r13d
+	leaq	(%r15,%r13,1),%rbp
+	movq	%r15,(%rsp)
+	movq	%rbp,8(%rsp)
+
+
+	leaq	L$AES_Td+2048(%rip),%r14
+	leaq	768(%rsp),%rbp
+	subq	%r14,%rbp
+	andq	$0x300,%rbp
+	leaq	(%r14,%rbp,1),%r14
+	shrq	$3,%rbp
+	addq	%rbp,%r14
+
+	call	_x86_64_AES_decrypt_compact
+
+	movq	16(%rsp),%r9
+	movq	24(%rsp),%rsi
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	movq	-48(%rsi),%r15
+
+	movq	-40(%rsi),%r14
+
+	movq	-32(%rsi),%r13
+
+	movq	-24(%rsi),%r12
+
+	movq	-16(%rsi),%rbp
+
+	movq	-8(%rsi),%rbx
+
+	leaq	(%rsi),%rsp
+
+L$dec_epilogue:
+	.byte	0xf3,0xc3
+
+
+.p2align	4
+.globl	_aes_nohw_set_encrypt_key
+.private_extern _aes_nohw_set_encrypt_key
+
+_aes_nohw_set_encrypt_key:
+
+	pushq	%rbx
+
+	pushq	%rbp
+
+	pushq	%r12
+
+	pushq	%r13
+
+	pushq	%r14
+
+	pushq	%r15
+
+	subq	$8,%rsp
+
+L$enc_key_prologue:
+
+	call	_x86_64_AES_set_encrypt_key
+
+	movq	40(%rsp),%rbp
+
+	movq	48(%rsp),%rbx
+
+	addq	$56,%rsp
+
+L$enc_key_epilogue:
+	.byte	0xf3,0xc3
+
+
+
+
+.p2align	4
+_x86_64_AES_set_encrypt_key:
+
+	movl	%esi,%ecx
+	movq	%rdi,%rsi
+	movq	%rdx,%rdi
+
+	testq	$-1,%rsi
+	jz	L$badpointer
+	testq	$-1,%rdi
+	jz	L$badpointer
+
+	leaq	L$AES_Te(%rip),%rbp
+	leaq	2048+128(%rbp),%rbp
+
+
+	movl	0-128(%rbp),%eax
+	movl	32-128(%rbp),%ebx
+	movl	64-128(%rbp),%r8d
+	movl	96-128(%rbp),%edx
+	movl	128-128(%rbp),%eax
+	movl	160-128(%rbp),%ebx
+	movl	192-128(%rbp),%r8d
+	movl	224-128(%rbp),%edx
+
+	cmpl	$128,%ecx
+	je	L$10rounds
+	cmpl	$192,%ecx
+	je	L$12rounds
+	cmpl	$256,%ecx
+	je	L$14rounds
+	movq	$-2,%rax
+	jmp	L$exit
+
+L$10rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rdx,8(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	L$10shortcut
+.p2align	2
+L$10loop:
+	movl	0(%rdi),%eax
+	movl	12(%rdi),%edx
+L$10shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,16(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,20(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,24(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,28(%rdi)
+	addl	$1,%ecx
+	leaq	16(%rdi),%rdi
+	cmpl	$10,%ecx
+	jl	L$10loop
+
+	movl	$10,80(%rdi)
+	xorq	%rax,%rax
+	jmp	L$exit
+
+L$12rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	16(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rdx,16(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	L$12shortcut
+.p2align	2
+L$12loop:
+	movl	0(%rdi),%eax
+	movl	20(%rdi),%edx
+L$12shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,24(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,28(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,32(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,36(%rdi)
+
+	cmpl	$7,%ecx
+	je	L$12break
+	addl	$1,%ecx
+
+	xorl	16(%rdi),%eax
+	movl	%eax,40(%rdi)
+	xorl	20(%rdi),%eax
+	movl	%eax,44(%rdi)
+
+	leaq	24(%rdi),%rdi
+	jmp	L$12loop
+L$12break:
+	movl	$12,72(%rdi)
+	xorq	%rax,%rax
+	jmp	L$exit
+
+L$14rounds:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	16(%rsi),%rcx
+	movq	24(%rsi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rcx,16(%rdi)
+	movq	%rdx,24(%rdi)
+
+	shrq	$32,%rdx
+	xorl	%ecx,%ecx
+	jmp	L$14shortcut
+.p2align	2
+L$14loop:
+	movl	0(%rdi),%eax
+	movl	28(%rdi),%edx
+L$14shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	xorl	1024-128(%rbp,%rcx,4),%eax
+	movl	%eax,32(%rdi)
+	xorl	4(%rdi),%eax
+	movl	%eax,36(%rdi)
+	xorl	8(%rdi),%eax
+	movl	%eax,40(%rdi)
+	xorl	12(%rdi),%eax
+	movl	%eax,44(%rdi)
+
+	cmpl	$6,%ecx
+	je	L$14break
+	addl	$1,%ecx
+
+	movl	%eax,%edx
+	movl	16(%rdi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+
+	movzbl	-128(%rbp,%rsi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+
+	movl	%eax,48(%rdi)
+	xorl	20(%rdi),%eax
+	movl	%eax,52(%rdi)
+	xorl	24(%rdi),%eax
+	movl	%eax,56(%rdi)
+	xorl	28(%rdi),%eax
+	movl	%eax,60(%rdi)
+
+	leaq	32(%rdi),%rdi
+	jmp	L$14loop
+L$14break:
+	movl	$14,48(%rdi)
+	xorq	%rax,%rax
+	jmp	L$exit
+
+L$badpointer:
+	movq	$-1,%rax
+L$exit:
+.byte	0xf3,0xc3
+
+
+.p2align	4
+.globl	_aes_nohw_set_decrypt_key
+.private_extern _aes_nohw_set_decrypt_key
+
+_aes_nohw_set_decrypt_key:
+
+	pushq	%rbx
+
+	pushq	%rbp
+
+	pushq	%r12
+
+	pushq	%r13
+
+	pushq	%r14
+
+	pushq	%r15
+
+	pushq	%rdx
+
+L$dec_key_prologue:
+
+	call	_x86_64_AES_set_encrypt_key
+	movq	(%rsp),%r8
+	cmpl	$0,%eax
+	jne	L$abort
+
+	movl	240(%r8),%r14d
+	xorq	%rdi,%rdi
+	leaq	(%rdi,%r14,4),%rcx
+	movq	%r8,%rsi
+	leaq	(%r8,%rcx,4),%rdi
+.p2align	2
+L$invert:
+	movq	0(%rsi),%rax
+	movq	8(%rsi),%rbx
+	movq	0(%rdi),%rcx
+	movq	8(%rdi),%rdx
+	movq	%rax,0(%rdi)
+	movq	%rbx,8(%rdi)
+	movq	%rcx,0(%rsi)
+	movq	%rdx,8(%rsi)
+	leaq	16(%rsi),%rsi
+	leaq	-16(%rdi),%rdi
+	cmpq	%rsi,%rdi
+	jne	L$invert
+
+	leaq	L$AES_Te+2048+1024(%rip),%rax
+
+	movq	40(%rax),%rsi
+	movq	48(%rax),%rdi
+	movq	56(%rax),%rbp
+
+	movq	%r8,%r15
+	subl	$1,%r14d
+.p2align	2
+L$permute:
+	leaq	16(%r15),%r15
+	movq	0(%r15),%rax
+	movq	8(%r15),%rcx
+	movq	%rsi,%r9
+	movq	%rsi,%r12
+	andq	%rax,%r9
+	andq	%rcx,%r12
+	movq	%r9,%rbx
+	movq	%r12,%rdx
+	shrq	$7,%r9
+	leaq	(%rax,%rax,1),%r8
+	shrq	$7,%r12
+	leaq	(%rcx,%rcx,1),%r11
+	subq	%r9,%rbx
+	subq	%r12,%rdx
+	andq	%rdi,%r8
+	andq	%rdi,%r11
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r8
+	xorq	%rdx,%r11
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r8,%r10
+	andq	%r11,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	leaq	(%r8,%r8,1),%r9
+	shrq	$7,%r13
+	leaq	(%r11,%r11,1),%r12
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	andq	%rdi,%r9
+	andq	%rdi,%r12
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r9
+	xorq	%rdx,%r12
+	movq	%rsi,%r10
+	movq	%rsi,%r13
+
+	andq	%r9,%r10
+	andq	%r12,%r13
+	movq	%r10,%rbx
+	movq	%r13,%rdx
+	shrq	$7,%r10
+	xorq	%rax,%r8
+	shrq	$7,%r13
+	xorq	%rcx,%r11
+	subq	%r10,%rbx
+	subq	%r13,%rdx
+	leaq	(%r9,%r9,1),%r10
+	leaq	(%r12,%r12,1),%r13
+	xorq	%rax,%r9
+	xorq	%rcx,%r12
+	andq	%rdi,%r10
+	andq	%rdi,%r13
+	andq	%rbp,%rbx
+	andq	%rbp,%rdx
+	xorq	%rbx,%r10
+	xorq	%rdx,%r13
+
+	xorq	%r10,%rax
+	xorq	%r13,%rcx
+	xorq	%r10,%r8
+	xorq	%r13,%r11
+	movq	%rax,%rbx
+	movq	%rcx,%rdx
+	xorq	%r10,%r9
+	shrq	$32,%rbx
+	xorq	%r13,%r12
+	shrq	$32,%rdx
+	xorq	%r8,%r10
+	roll	$8,%eax
+	xorq	%r11,%r13
+	roll	$8,%ecx
+	xorq	%r9,%r10
+	roll	$8,%ebx
+	xorq	%r12,%r13
+
+	roll	$8,%edx
+	xorl	%r10d,%eax
+	shrq	$32,%r10
+	xorl	%r13d,%ecx
+	shrq	$32,%r13
+	xorl	%r10d,%ebx
+	xorl	%r13d,%edx
+
+	movq	%r8,%r10
+	roll	$24,%r8d
+	movq	%r11,%r13
+	roll	$24,%r11d
+	shrq	$32,%r10
+	xorl	%r8d,%eax
+	shrq	$32,%r13
+	xorl	%r11d,%ecx
+	roll	$24,%r10d
+	movq	%r9,%r8
+	roll	$24,%r13d
+	movq	%r12,%r11
+	shrq	$32,%r8
+	xorl	%r10d,%ebx
+	shrq	$32,%r11
+	xorl	%r13d,%edx
+
+
+	roll	$16,%r9d
+
+	roll	$16,%r12d
+
+	roll	$16,%r8d
+
+	xorl	%r9d,%eax
+	roll	$16,%r11d
+	xorl	%r12d,%ecx
+
+	xorl	%r8d,%ebx
+	xorl	%r11d,%edx
+	movl	%eax,0(%r15)
+	movl	%ebx,4(%r15)
+	movl	%ecx,8(%r15)
+	movl	%edx,12(%r15)
+	subl	$1,%r14d
+	jnz	L$permute
+
+	xorq	%rax,%rax
+L$abort:
+	movq	8(%rsp),%r15
+
+	movq	16(%rsp),%r14
+
+	movq	24(%rsp),%r13
+
+	movq	32(%rsp),%r12
+
+	movq	40(%rsp),%rbp
+
+	movq	48(%rsp),%rbx
+
+	addq	$56,%rsp
+
+L$dec_key_epilogue:
+	.byte	0xf3,0xc3
+
+
+.p2align	4
+.globl	_aes_nohw_cbc_encrypt
+.private_extern _aes_nohw_cbc_encrypt
+
+
+.private_extern	_aes_nohw_cbc_encrypt
+_aes_nohw_cbc_encrypt:
+
+	cmpq	$0,%rdx
+	je	L$cbc_epilogue
+	pushfq
+
+
+
+	pushq	%rbx
+
+	pushq	%rbp
+
+	pushq	%r12
+
+	pushq	%r13
+
+	pushq	%r14
+
+	pushq	%r15
+
+L$cbc_prologue:
+
+	cld
+	movl	%r9d,%r9d
+
+	leaq	L$AES_Te(%rip),%r14
+	leaq	L$AES_Td(%rip),%r10
+	cmpq	$0,%r9
+	cmoveq	%r10,%r14
+
+
+	leaq	_OPENSSL_ia32cap_P(%rip),%r10
+	movl	(%r10),%r10d
+	cmpq	$512,%rdx
+	jb	L$cbc_slow_prologue
+	testq	$15,%rdx
+	jnz	L$cbc_slow_prologue
+	btl	$28,%r10d
+	jc	L$cbc_slow_prologue
+
+
+	leaq	-88-248(%rsp),%r15
+	andq	$-64,%r15
+
+
+	movq	%r14,%r10
+	leaq	2304(%r14),%r11
+	movq	%r15,%r12
+	andq	$0xFFF,%r10
+	andq	$0xFFF,%r11
+	andq	$0xFFF,%r12
+
+	cmpq	%r11,%r12
+	jb	L$cbc_te_break_out
+	subq	%r11,%r12
+	subq	%r12,%r15
+	jmp	L$cbc_te_ok
+L$cbc_te_break_out:
+	subq	%r10,%r12
+	andq	$0xFFF,%r12
+	addq	$320,%r12
+	subq	%r12,%r15
+.p2align	2
+L$cbc_te_ok:
+
+	xchgq	%rsp,%r15
+
+
+	movq	%r15,16(%rsp)
+
+L$cbc_fast_body:
+	movq	%rdi,24(%rsp)
+	movq	%rsi,32(%rsp)
+	movq	%rdx,40(%rsp)
+	movq	%rcx,48(%rsp)
+	movq	%r8,56(%rsp)
+	movl	$0,80+240(%rsp)
+	movq	%r8,%rbp
+	movq	%r9,%rbx
+	movq	%rsi,%r9
+	movq	%rdi,%r8
+	movq	%rcx,%r15
+
+	movl	240(%r15),%eax
+
+	movq	%r15,%r10
+	subq	%r14,%r10
+	andq	$0xfff,%r10
+	cmpq	$2304,%r10
+	jb	L$cbc_do_ecopy
+	cmpq	$4096-248,%r10
+	jb	L$cbc_skip_ecopy
+.p2align	2
+L$cbc_do_ecopy:
+	movq	%r15,%rsi
+	leaq	80(%rsp),%rdi
+	leaq	80(%rsp),%r15
+	movl	$30,%ecx
+.long	0x90A548F3
+	movl	%eax,(%rdi)
+L$cbc_skip_ecopy:
+	movq	%r15,0(%rsp)
+
+	movl	$18,%ecx
+.p2align	2
+L$cbc_prefetch_te:
+	movq	0(%r14),%r10
+	movq	32(%r14),%r11
+	movq	64(%r14),%r12
+	movq	96(%r14),%r13
+	leaq	128(%r14),%r14
+	subl	$1,%ecx
+	jnz	L$cbc_prefetch_te
+	leaq	-2304(%r14),%r14
+
+	cmpq	$0,%rbx
+	je	L$FAST_DECRYPT
+
+
+	movl	0(%rbp),%eax
+	movl	4(%rbp),%ebx
+	movl	8(%rbp),%ecx
+	movl	12(%rbp),%edx
+
+.p2align	2
+L$cbc_fast_enc_loop:
+	xorl	0(%r8),%eax
+	xorl	4(%r8),%ebx
+	xorl	8(%r8),%ecx
+	xorl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_encrypt
+
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	subq	$16,%r10
+	testq	$-16,%r10
+	movq	%r10,40(%rsp)
+	jnz	L$cbc_fast_enc_loop
+	movq	56(%rsp),%rbp
+	movl	%eax,0(%rbp)
+	movl	%ebx,4(%rbp)
+	movl	%ecx,8(%rbp)
+	movl	%edx,12(%rbp)
+
+	jmp	L$cbc_fast_cleanup
+
+
+.p2align	4
+L$FAST_DECRYPT:
+	cmpq	%r8,%r9
+	je	L$cbc_fast_dec_in_place
+
+	movq	%rbp,64(%rsp)
+.p2align	2
+L$cbc_fast_dec_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_decrypt
+
+	movq	64(%rsp),%rbp
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	xorl	0(%rbp),%eax
+	xorl	4(%rbp),%ebx
+	xorl	8(%rbp),%ecx
+	xorl	12(%rbp),%edx
+	movq	%r8,%rbp
+
+	subq	$16,%r10
+	movq	%r10,40(%rsp)
+	movq	%rbp,64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	jnz	L$cbc_fast_dec_loop
+	movq	56(%rsp),%r12
+	movq	0(%rbp),%r10
+	movq	8(%rbp),%r11
+	movq	%r10,0(%r12)
+	movq	%r11,8(%r12)
+	jmp	L$cbc_fast_cleanup
+
+.p2align	4
+L$cbc_fast_dec_in_place:
+	movq	0(%rbp),%r10
+	movq	8(%rbp),%r11
+	movq	%r10,0+64(%rsp)
+	movq	%r11,8+64(%rsp)
+.p2align	2
+L$cbc_fast_dec_in_place_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+
+	call	_x86_64_AES_decrypt
+
+	movq	24(%rsp),%r8
+	movq	40(%rsp),%r10
+	xorl	0+64(%rsp),%eax
+	xorl	4+64(%rsp),%ebx
+	xorl	8+64(%rsp),%ecx
+	xorl	12+64(%rsp),%edx
+
+	movq	0(%r8),%r11
+	movq	8(%r8),%r12
+	subq	$16,%r10
+	jz	L$cbc_fast_dec_in_place_done
+
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	movq	%r10,40(%rsp)
+	jmp	L$cbc_fast_dec_in_place_loop
+L$cbc_fast_dec_in_place_done:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+.p2align	2
+L$cbc_fast_cleanup:
+	cmpl	$0,80+240(%rsp)
+	leaq	80(%rsp),%rdi
+	je	L$cbc_exit
+	movl	$30,%ecx
+	xorq	%rax,%rax
+.long	0x90AB48F3
+
+	jmp	L$cbc_exit
+
+
+.p2align	4
+L$cbc_slow_prologue:
+
+
+	leaq	-88(%rsp),%rbp
+	andq	$-64,%rbp
+
+	leaq	-88-63(%rcx),%r10
+	subq	%rbp,%r10
+	negq	%r10
+	andq	$0x3c0,%r10
+	subq	%r10,%rbp
+
+	xchgq	%rsp,%rbp
+
+
+	movq	%rbp,16(%rsp)
+
+L$cbc_slow_body:
+
+
+
+
+	movq	%r8,56(%rsp)
+	movq	%r8,%rbp
+	movq	%r9,%rbx
+	movq	%rsi,%r9
+	movq	%rdi,%r8
+	movq	%rcx,%r15
+	movq	%rdx,%r10
+
+	movl	240(%r15),%eax
+	movq	%r15,0(%rsp)
+	shll	$4,%eax
+	leaq	(%r15,%rax,1),%rax
+	movq	%rax,8(%rsp)
+
+
+	leaq	2048(%r14),%r14
+	leaq	768-8(%rsp),%rax
+	subq	%r14,%rax
+	andq	$0x300,%rax
+	leaq	(%r14,%rax,1),%r14
+
+	cmpq	$0,%rbx
+	je	L$SLOW_DECRYPT
+
+
+	testq	$-16,%r10
+	movl	0(%rbp),%eax
+	movl	4(%rbp),%ebx
+	movl	8(%rbp),%ecx
+	movl	12(%rbp),%edx
+	jz	L$cbc_slow_enc_tail
+
+.p2align	2
+L$cbc_slow_enc_loop:
+	xorl	0(%r8),%eax
+	xorl	4(%r8),%ebx
+	xorl	8(%r8),%ecx
+	xorl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+	movq	%r9,32(%rsp)
+	movq	%r10,40(%rsp)
+
+	call	_x86_64_AES_encrypt_compact
+
+	movq	24(%rsp),%r8
+	movq	32(%rsp),%r9
+	movq	40(%rsp),%r10
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	subq	$16,%r10
+	testq	$-16,%r10
+	jnz	L$cbc_slow_enc_loop
+	testq	$15,%r10
+	jnz	L$cbc_slow_enc_tail
+	movq	56(%rsp),%rbp
+	movl	%eax,0(%rbp)
+	movl	%ebx,4(%rbp)
+	movl	%ecx,8(%rbp)
+	movl	%edx,12(%rbp)
+
+	jmp	L$cbc_exit
+
+.p2align	2
+L$cbc_slow_enc_tail:
+	movq	%rax,%r11
+	movq	%rcx,%r12
+	movq	%r10,%rcx
+	movq	%r8,%rsi
+	movq	%r9,%rdi
+.long	0x9066A4F3
+	movq	$16,%rcx
+	subq	%r10,%rcx
+	xorq	%rax,%rax
+.long	0x9066AAF3
+	movq	%r9,%r8
+	movq	$16,%r10
+	movq	%r11,%rax
+	movq	%r12,%rcx
+	jmp	L$cbc_slow_enc_loop
+
+.p2align	4
+L$SLOW_DECRYPT:
+	shrq	$3,%rax
+	addq	%rax,%r14
+
+	movq	0(%rbp),%r11
+	movq	8(%rbp),%r12
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+.p2align	2
+L$cbc_slow_dec_loop:
+	movl	0(%r8),%eax
+	movl	4(%r8),%ebx
+	movl	8(%r8),%ecx
+	movl	12(%r8),%edx
+	movq	0(%rsp),%r15
+	movq	%r8,24(%rsp)
+	movq	%r9,32(%rsp)
+	movq	%r10,40(%rsp)
+
+	call	_x86_64_AES_decrypt_compact
+
+	movq	24(%rsp),%r8
+	movq	32(%rsp),%r9
+	movq	40(%rsp),%r10
+	xorl	0+64(%rsp),%eax
+	xorl	4+64(%rsp),%ebx
+	xorl	8+64(%rsp),%ecx
+	xorl	12+64(%rsp),%edx
+
+	movq	0(%r8),%r11
+	movq	8(%r8),%r12
+	subq	$16,%r10
+	jc	L$cbc_slow_dec_partial
+	jz	L$cbc_slow_dec_done
+
+	movq	%r11,0+64(%rsp)
+	movq	%r12,8+64(%rsp)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	leaq	16(%r8),%r8
+	leaq	16(%r9),%r9
+	jmp	L$cbc_slow_dec_loop
+L$cbc_slow_dec_done:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0(%r9)
+	movl	%ebx,4(%r9)
+	movl	%ecx,8(%r9)
+	movl	%edx,12(%r9)
+
+	jmp	L$cbc_exit
+
+.p2align	2
+L$cbc_slow_dec_partial:
+	movq	56(%rsp),%rdi
+	movq	%r11,0(%rdi)
+	movq	%r12,8(%rdi)
+
+	movl	%eax,0+64(%rsp)
+	movl	%ebx,4+64(%rsp)
+	movl	%ecx,8+64(%rsp)
+	movl	%edx,12+64(%rsp)
+
+	movq	%r9,%rdi
+	leaq	64(%rsp),%rsi
+	leaq	16(%r10),%rcx
+.long	0x9066A4F3
+	jmp	L$cbc_exit
+
+.p2align	4
+L$cbc_exit:
+	movq	16(%rsp),%rsi
+
+	movq	(%rsi),%r15
+
+	movq	8(%rsi),%r14
+
+	movq	16(%rsi),%r13
+
+	movq	24(%rsi),%r12
+
+	movq	32(%rsi),%rbp
+
+	movq	40(%rsi),%rbx
+
+	leaq	48(%rsi),%rsp
+
+L$cbc_popfq:
+	popfq
+
+
+
+L$cbc_epilogue:
+	.byte	0xf3,0xc3
+
+
+.p2align	6
+L$AES_Te:
+.long	0xa56363c6,0xa56363c6
+.long	0x847c7cf8,0x847c7cf8
+.long	0x997777ee,0x997777ee
+.long	0x8d7b7bf6,0x8d7b7bf6
+.long	0x0df2f2ff,0x0df2f2ff
+.long	0xbd6b6bd6,0xbd6b6bd6
+.long	0xb16f6fde,0xb16f6fde
+.long	0x54c5c591,0x54c5c591
+.long	0x50303060,0x50303060
+.long	0x03010102,0x03010102
+.long	0xa96767ce,0xa96767ce
+.long	0x7d2b2b56,0x7d2b2b56
+.long	0x19fefee7,0x19fefee7
+.long	0x62d7d7b5,0x62d7d7b5
+.long	0xe6abab4d,0xe6abab4d
+.long	0x9a7676ec,0x9a7676ec
+.long	0x45caca8f,0x45caca8f
+.long	0x9d82821f,0x9d82821f
+.long	0x40c9c989,0x40c9c989
+.long	0x877d7dfa,0x877d7dfa
+.long	0x15fafaef,0x15fafaef
+.long	0xeb5959b2,0xeb5959b2
+.long	0xc947478e,0xc947478e
+.long	0x0bf0f0fb,0x0bf0f0fb
+.long	0xecadad41,0xecadad41
+.long	0x67d4d4b3,0x67d4d4b3
+.long	0xfda2a25f,0xfda2a25f
+.long	0xeaafaf45,0xeaafaf45
+.long	0xbf9c9c23,0xbf9c9c23
+.long	0xf7a4a453,0xf7a4a453
+.long	0x967272e4,0x967272e4
+.long	0x5bc0c09b,0x5bc0c09b
+.long	0xc2b7b775,0xc2b7b775
+.long	0x1cfdfde1,0x1cfdfde1
+.long	0xae93933d,0xae93933d
+.long	0x6a26264c,0x6a26264c
+.long	0x5a36366c,0x5a36366c
+.long	0x413f3f7e,0x413f3f7e
+.long	0x02f7f7f5,0x02f7f7f5
+.long	0x4fcccc83,0x4fcccc83
+.long	0x5c343468,0x5c343468
+.long	0xf4a5a551,0xf4a5a551
+.long	0x34e5e5d1,0x34e5e5d1
+.long	0x08f1f1f9,0x08f1f1f9
+.long	0x937171e2,0x937171e2
+.long	0x73d8d8ab,0x73d8d8ab
+.long	0x53313162,0x53313162
+.long	0x3f15152a,0x3f15152a
+.long	0x0c040408,0x0c040408
+.long	0x52c7c795,0x52c7c795
+.long	0x65232346,0x65232346
+.long	0x5ec3c39d,0x5ec3c39d
+.long	0x28181830,0x28181830
+.long	0xa1969637,0xa1969637
+.long	0x0f05050a,0x0f05050a
+.long	0xb59a9a2f,0xb59a9a2f
+.long	0x0907070e,0x0907070e
+.long	0x36121224,0x36121224
+.long	0x9b80801b,0x9b80801b
+.long	0x3de2e2df,0x3de2e2df
+.long	0x26ebebcd,0x26ebebcd
+.long	0x6927274e,0x6927274e
+.long	0xcdb2b27f,0xcdb2b27f
+.long	0x9f7575ea,0x9f7575ea
+.long	0x1b090912,0x1b090912
+.long	0x9e83831d,0x9e83831d
+.long	0x742c2c58,0x742c2c58
+.long	0x2e1a1a34,0x2e1a1a34
+.long	0x2d1b1b36,0x2d1b1b36
+.long	0xb26e6edc,0xb26e6edc
+.long	0xee5a5ab4,0xee5a5ab4
+.long	0xfba0a05b,0xfba0a05b
+.long	0xf65252a4,0xf65252a4
+.long	0x4d3b3b76,0x4d3b3b76
+.long	0x61d6d6b7,0x61d6d6b7
+.long	0xceb3b37d,0xceb3b37d
+.long	0x7b292952,0x7b292952
+.long	0x3ee3e3dd,0x3ee3e3dd
+.long	0x712f2f5e,0x712f2f5e
+.long	0x97848413,0x97848413
+.long	0xf55353a6,0xf55353a6
+.long	0x68d1d1b9,0x68d1d1b9
+.long	0x00000000,0x00000000
+.long	0x2cededc1,0x2cededc1
+.long	0x60202040,0x60202040
+.long	0x1ffcfce3,0x1ffcfce3
+.long	0xc8b1b179,0xc8b1b179
+.long	0xed5b5bb6,0xed5b5bb6
+.long	0xbe6a6ad4,0xbe6a6ad4
+.long	0x46cbcb8d,0x46cbcb8d
+.long	0xd9bebe67,0xd9bebe67
+.long	0x4b393972,0x4b393972
+.long	0xde4a4a94,0xde4a4a94
+.long	0xd44c4c98,0xd44c4c98
+.long	0xe85858b0,0xe85858b0
+.long	0x4acfcf85,0x4acfcf85
+.long	0x6bd0d0bb,0x6bd0d0bb
+.long	0x2aefefc5,0x2aefefc5
+.long	0xe5aaaa4f,0xe5aaaa4f
+.long	0x16fbfbed,0x16fbfbed
+.long	0xc5434386,0xc5434386
+.long	0xd74d4d9a,0xd74d4d9a
+.long	0x55333366,0x55333366
+.long	0x94858511,0x94858511
+.long	0xcf45458a,0xcf45458a
+.long	0x10f9f9e9,0x10f9f9e9
+.long	0x06020204,0x06020204
+.long	0x817f7ffe,0x817f7ffe
+.long	0xf05050a0,0xf05050a0
+.long	0x443c3c78,0x443c3c78
+.long	0xba9f9f25,0xba9f9f25
+.long	0xe3a8a84b,0xe3a8a84b
+.long	0xf35151a2,0xf35151a2
+.long	0xfea3a35d,0xfea3a35d
+.long	0xc0404080,0xc0404080
+.long	0x8a8f8f05,0x8a8f8f05
+.long	0xad92923f,0xad92923f
+.long	0xbc9d9d21,0xbc9d9d21
+.long	0x48383870,0x48383870
+.long	0x04f5f5f1,0x04f5f5f1
+.long	0xdfbcbc63,0xdfbcbc63
+.long	0xc1b6b677,0xc1b6b677
+.long	0x75dadaaf,0x75dadaaf
+.long	0x63212142,0x63212142
+.long	0x30101020,0x30101020
+.long	0x1affffe5,0x1affffe5
+.long	0x0ef3f3fd,0x0ef3f3fd
+.long	0x6dd2d2bf,0x6dd2d2bf
+.long	0x4ccdcd81,0x4ccdcd81
+.long	0x140c0c18,0x140c0c18
+.long	0x35131326,0x35131326
+.long	0x2fececc3,0x2fececc3
+.long	0xe15f5fbe,0xe15f5fbe
+.long	0xa2979735,0xa2979735
+.long	0xcc444488,0xcc444488
+.long	0x3917172e,0x3917172e
+.long	0x57c4c493,0x57c4c493
+.long	0xf2a7a755,0xf2a7a755
+.long	0x827e7efc,0x827e7efc
+.long	0x473d3d7a,0x473d3d7a
+.long	0xac6464c8,0xac6464c8
+.long	0xe75d5dba,0xe75d5dba
+.long	0x2b191932,0x2b191932
+.long	0x957373e6,0x957373e6
+.long	0xa06060c0,0xa06060c0
+.long	0x98818119,0x98818119
+.long	0xd14f4f9e,0xd14f4f9e
+.long	0x7fdcdca3,0x7fdcdca3
+.long	0x66222244,0x66222244
+.long	0x7e2a2a54,0x7e2a2a54
+.long	0xab90903b,0xab90903b
+.long	0x8388880b,0x8388880b
+.long	0xca46468c,0xca46468c
+.long	0x29eeeec7,0x29eeeec7
+.long	0xd3b8b86b,0xd3b8b86b
+.long	0x3c141428,0x3c141428
+.long	0x79dedea7,0x79dedea7
+.long	0xe25e5ebc,0xe25e5ebc
+.long	0x1d0b0b16,0x1d0b0b16
+.long	0x76dbdbad,0x76dbdbad
+.long	0x3be0e0db,0x3be0e0db
+.long	0x56323264,0x56323264
+.long	0x4e3a3a74,0x4e3a3a74
+.long	0x1e0a0a14,0x1e0a0a14
+.long	0xdb494992,0xdb494992
+.long	0x0a06060c,0x0a06060c
+.long	0x6c242448,0x6c242448
+.long	0xe45c5cb8,0xe45c5cb8
+.long	0x5dc2c29f,0x5dc2c29f
+.long	0x6ed3d3bd,0x6ed3d3bd
+.long	0xefacac43,0xefacac43
+.long	0xa66262c4,0xa66262c4
+.long	0xa8919139,0xa8919139
+.long	0xa4959531,0xa4959531
+.long	0x37e4e4d3,0x37e4e4d3
+.long	0x8b7979f2,0x8b7979f2
+.long	0x32e7e7d5,0x32e7e7d5
+.long	0x43c8c88b,0x43c8c88b
+.long	0x5937376e,0x5937376e
+.long	0xb76d6dda,0xb76d6dda
+.long	0x8c8d8d01,0x8c8d8d01
+.long	0x64d5d5b1,0x64d5d5b1
+.long	0xd24e4e9c,0xd24e4e9c
+.long	0xe0a9a949,0xe0a9a949
+.long	0xb46c6cd8,0xb46c6cd8
+.long	0xfa5656ac,0xfa5656ac
+.long	0x07f4f4f3,0x07f4f4f3
+.long	0x25eaeacf,0x25eaeacf
+.long	0xaf6565ca,0xaf6565ca
+.long	0x8e7a7af4,0x8e7a7af4
+.long	0xe9aeae47,0xe9aeae47
+.long	0x18080810,0x18080810
+.long	0xd5baba6f,0xd5baba6f
+.long	0x887878f0,0x887878f0
+.long	0x6f25254a,0x6f25254a
+.long	0x722e2e5c,0x722e2e5c
+.long	0x241c1c38,0x241c1c38
+.long	0xf1a6a657,0xf1a6a657
+.long	0xc7b4b473,0xc7b4b473
+.long	0x51c6c697,0x51c6c697
+.long	0x23e8e8cb,0x23e8e8cb
+.long	0x7cdddda1,0x7cdddda1
+.long	0x9c7474e8,0x9c7474e8
+.long	0x211f1f3e,0x211f1f3e
+.long	0xdd4b4b96,0xdd4b4b96
+.long	0xdcbdbd61,0xdcbdbd61
+.long	0x868b8b0d,0x868b8b0d
+.long	0x858a8a0f,0x858a8a0f
+.long	0x907070e0,0x907070e0
+.long	0x423e3e7c,0x423e3e7c
+.long	0xc4b5b571,0xc4b5b571
+.long	0xaa6666cc,0xaa6666cc
+.long	0xd8484890,0xd8484890
+.long	0x05030306,0x05030306
+.long	0x01f6f6f7,0x01f6f6f7
+.long	0x120e0e1c,0x120e0e1c
+.long	0xa36161c2,0xa36161c2
+.long	0x5f35356a,0x5f35356a
+.long	0xf95757ae,0xf95757ae
+.long	0xd0b9b969,0xd0b9b969
+.long	0x91868617,0x91868617
+.long	0x58c1c199,0x58c1c199
+.long	0x271d1d3a,0x271d1d3a
+.long	0xb99e9e27,0xb99e9e27
+.long	0x38e1e1d9,0x38e1e1d9
+.long	0x13f8f8eb,0x13f8f8eb
+.long	0xb398982b,0xb398982b
+.long	0x33111122,0x33111122
+.long	0xbb6969d2,0xbb6969d2
+.long	0x70d9d9a9,0x70d9d9a9
+.long	0x898e8e07,0x898e8e07
+.long	0xa7949433,0xa7949433
+.long	0xb69b9b2d,0xb69b9b2d
+.long	0x221e1e3c,0x221e1e3c
+.long	0x92878715,0x92878715
+.long	0x20e9e9c9,0x20e9e9c9
+.long	0x49cece87,0x49cece87
+.long	0xff5555aa,0xff5555aa
+.long	0x78282850,0x78282850
+.long	0x7adfdfa5,0x7adfdfa5
+.long	0x8f8c8c03,0x8f8c8c03
+.long	0xf8a1a159,0xf8a1a159
+.long	0x80898909,0x80898909
+.long	0x170d0d1a,0x170d0d1a
+.long	0xdabfbf65,0xdabfbf65
+.long	0x31e6e6d7,0x31e6e6d7
+.long	0xc6424284,0xc6424284
+.long	0xb86868d0,0xb86868d0
+.long	0xc3414182,0xc3414182
+.long	0xb0999929,0xb0999929
+.long	0x772d2d5a,0x772d2d5a
+.long	0x110f0f1e,0x110f0f1e
+.long	0xcbb0b07b,0xcbb0b07b
+.long	0xfc5454a8,0xfc5454a8
+.long	0xd6bbbb6d,0xd6bbbb6d
+.long	0x3a16162c,0x3a16162c
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte	0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte	0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte	0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte	0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte	0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte	0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte	0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte	0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte	0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte	0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte	0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte	0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte	0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte	0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte	0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte	0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte	0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte	0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte	0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte	0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte	0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte	0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte	0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte	0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte	0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte	0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte	0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte	0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte	0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte	0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte	0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte	0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.long	0x00000001, 0x00000002, 0x00000004, 0x00000008
+.long	0x00000010, 0x00000020, 0x00000040, 0x00000080
+.long	0x0000001b, 0x00000036, 0x80808080, 0x80808080
+.long	0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
+.p2align	6
+L$AES_Td:
+.long	0x50a7f451,0x50a7f451
+.long	0x5365417e,0x5365417e
+.long	0xc3a4171a,0xc3a4171a
+.long	0x965e273a,0x965e273a
+.long	0xcb6bab3b,0xcb6bab3b
+.long	0xf1459d1f,0xf1459d1f
+.long	0xab58faac,0xab58faac
+.long	0x9303e34b,0x9303e34b
+.long	0x55fa3020,0x55fa3020
+.long	0xf66d76ad,0xf66d76ad
+.long	0x9176cc88,0x9176cc88
+.long	0x254c02f5,0x254c02f5
+.long	0xfcd7e54f,0xfcd7e54f
+.long	0xd7cb2ac5,0xd7cb2ac5
+.long	0x80443526,0x80443526
+.long	0x8fa362b5,0x8fa362b5
+.long	0x495ab1de,0x495ab1de
+.long	0x671bba25,0x671bba25
+.long	0x980eea45,0x980eea45
+.long	0xe1c0fe5d,0xe1c0fe5d
+.long	0x02752fc3,0x02752fc3
+.long	0x12f04c81,0x12f04c81
+.long	0xa397468d,0xa397468d
+.long	0xc6f9d36b,0xc6f9d36b
+.long	0xe75f8f03,0xe75f8f03
+.long	0x959c9215,0x959c9215
+.long	0xeb7a6dbf,0xeb7a6dbf
+.long	0xda595295,0xda595295
+.long	0x2d83bed4,0x2d83bed4
+.long	0xd3217458,0xd3217458
+.long	0x2969e049,0x2969e049
+.long	0x44c8c98e,0x44c8c98e
+.long	0x6a89c275,0x6a89c275
+.long	0x78798ef4,0x78798ef4
+.long	0x6b3e5899,0x6b3e5899
+.long	0xdd71b927,0xdd71b927
+.long	0xb64fe1be,0xb64fe1be
+.long	0x17ad88f0,0x17ad88f0
+.long	0x66ac20c9,0x66ac20c9
+.long	0xb43ace7d,0xb43ace7d
+.long	0x184adf63,0x184adf63
+.long	0x82311ae5,0x82311ae5
+.long	0x60335197,0x60335197
+.long	0x457f5362,0x457f5362
+.long	0xe07764b1,0xe07764b1
+.long	0x84ae6bbb,0x84ae6bbb
+.long	0x1ca081fe,0x1ca081fe
+.long	0x942b08f9,0x942b08f9
+.long	0x58684870,0x58684870
+.long	0x19fd458f,0x19fd458f
+.long	0x876cde94,0x876cde94
+.long	0xb7f87b52,0xb7f87b52
+.long	0x23d373ab,0x23d373ab
+.long	0xe2024b72,0xe2024b72
+.long	0x578f1fe3,0x578f1fe3
+.long	0x2aab5566,0x2aab5566
+.long	0x0728ebb2,0x0728ebb2
+.long	0x03c2b52f,0x03c2b52f
+.long	0x9a7bc586,0x9a7bc586
+.long	0xa50837d3,0xa50837d3
+.long	0xf2872830,0xf2872830
+.long	0xb2a5bf23,0xb2a5bf23
+.long	0xba6a0302,0xba6a0302
+.long	0x5c8216ed,0x5c8216ed
+.long	0x2b1ccf8a,0x2b1ccf8a
+.long	0x92b479a7,0x92b479a7
+.long	0xf0f207f3,0xf0f207f3
+.long	0xa1e2694e,0xa1e2694e
+.long	0xcdf4da65,0xcdf4da65
+.long	0xd5be0506,0xd5be0506
+.long	0x1f6234d1,0x1f6234d1
+.long	0x8afea6c4,0x8afea6c4
+.long	0x9d532e34,0x9d532e34
+.long	0xa055f3a2,0xa055f3a2
+.long	0x32e18a05,0x32e18a05
+.long	0x75ebf6a4,0x75ebf6a4
+.long	0x39ec830b,0x39ec830b
+.long	0xaaef6040,0xaaef6040
+.long	0x069f715e,0x069f715e
+.long	0x51106ebd,0x51106ebd
+.long	0xf98a213e,0xf98a213e
+.long	0x3d06dd96,0x3d06dd96
+.long	0xae053edd,0xae053edd
+.long	0x46bde64d,0x46bde64d
+.long	0xb58d5491,0xb58d5491
+.long	0x055dc471,0x055dc471
+.long	0x6fd40604,0x6fd40604
+.long	0xff155060,0xff155060
+.long	0x24fb9819,0x24fb9819
+.long	0x97e9bdd6,0x97e9bdd6
+.long	0xcc434089,0xcc434089
+.long	0x779ed967,0x779ed967
+.long	0xbd42e8b0,0xbd42e8b0
+.long	0x888b8907,0x888b8907
+.long	0x385b19e7,0x385b19e7
+.long	0xdbeec879,0xdbeec879
+.long	0x470a7ca1,0x470a7ca1
+.long	0xe90f427c,0xe90f427c
+.long	0xc91e84f8,0xc91e84f8
+.long	0x00000000,0x00000000
+.long	0x83868009,0x83868009
+.long	0x48ed2b32,0x48ed2b32
+.long	0xac70111e,0xac70111e
+.long	0x4e725a6c,0x4e725a6c
+.long	0xfbff0efd,0xfbff0efd
+.long	0x5638850f,0x5638850f
+.long	0x1ed5ae3d,0x1ed5ae3d
+.long	0x27392d36,0x27392d36
+.long	0x64d90f0a,0x64d90f0a
+.long	0x21a65c68,0x21a65c68
+.long	0xd1545b9b,0xd1545b9b
+.long	0x3a2e3624,0x3a2e3624
+.long	0xb1670a0c,0xb1670a0c
+.long	0x0fe75793,0x0fe75793
+.long	0xd296eeb4,0xd296eeb4
+.long	0x9e919b1b,0x9e919b1b
+.long	0x4fc5c080,0x4fc5c080
+.long	0xa220dc61,0xa220dc61
+.long	0x694b775a,0x694b775a
+.long	0x161a121c,0x161a121c
+.long	0x0aba93e2,0x0aba93e2
+.long	0xe52aa0c0,0xe52aa0c0
+.long	0x43e0223c,0x43e0223c
+.long	0x1d171b12,0x1d171b12
+.long	0x0b0d090e,0x0b0d090e
+.long	0xadc78bf2,0xadc78bf2
+.long	0xb9a8b62d,0xb9a8b62d
+.long	0xc8a91e14,0xc8a91e14
+.long	0x8519f157,0x8519f157
+.long	0x4c0775af,0x4c0775af
+.long	0xbbdd99ee,0xbbdd99ee
+.long	0xfd607fa3,0xfd607fa3
+.long	0x9f2601f7,0x9f2601f7
+.long	0xbcf5725c,0xbcf5725c
+.long	0xc53b6644,0xc53b6644
+.long	0x347efb5b,0x347efb5b
+.long	0x7629438b,0x7629438b
+.long	0xdcc623cb,0xdcc623cb
+.long	0x68fcedb6,0x68fcedb6
+.long	0x63f1e4b8,0x63f1e4b8
+.long	0xcadc31d7,0xcadc31d7
+.long	0x10856342,0x10856342
+.long	0x40229713,0x40229713
+.long	0x2011c684,0x2011c684
+.long	0x7d244a85,0x7d244a85
+.long	0xf83dbbd2,0xf83dbbd2
+.long	0x1132f9ae,0x1132f9ae
+.long	0x6da129c7,0x6da129c7
+.long	0x4b2f9e1d,0x4b2f9e1d
+.long	0xf330b2dc,0xf330b2dc
+.long	0xec52860d,0xec52860d
+.long	0xd0e3c177,0xd0e3c177
+.long	0x6c16b32b,0x6c16b32b
+.long	0x99b970a9,0x99b970a9
+.long	0xfa489411,0xfa489411
+.long	0x2264e947,0x2264e947
+.long	0xc48cfca8,0xc48cfca8
+.long	0x1a3ff0a0,0x1a3ff0a0
+.long	0xd82c7d56,0xd82c7d56
+.long	0xef903322,0xef903322
+.long	0xc74e4987,0xc74e4987
+.long	0xc1d138d9,0xc1d138d9
+.long	0xfea2ca8c,0xfea2ca8c
+.long	0x360bd498,0x360bd498
+.long	0xcf81f5a6,0xcf81f5a6
+.long	0x28de7aa5,0x28de7aa5
+.long	0x268eb7da,0x268eb7da
+.long	0xa4bfad3f,0xa4bfad3f
+.long	0xe49d3a2c,0xe49d3a2c
+.long	0x0d927850,0x0d927850
+.long	0x9bcc5f6a,0x9bcc5f6a
+.long	0x62467e54,0x62467e54
+.long	0xc2138df6,0xc2138df6
+.long	0xe8b8d890,0xe8b8d890
+.long	0x5ef7392e,0x5ef7392e
+.long	0xf5afc382,0xf5afc382
+.long	0xbe805d9f,0xbe805d9f
+.long	0x7c93d069,0x7c93d069
+.long	0xa92dd56f,0xa92dd56f
+.long	0xb31225cf,0xb31225cf
+.long	0x3b99acc8,0x3b99acc8
+.long	0xa77d1810,0xa77d1810
+.long	0x6e639ce8,0x6e639ce8
+.long	0x7bbb3bdb,0x7bbb3bdb
+.long	0x097826cd,0x097826cd
+.long	0xf418596e,0xf418596e
+.long	0x01b79aec,0x01b79aec
+.long	0xa89a4f83,0xa89a4f83
+.long	0x656e95e6,0x656e95e6
+.long	0x7ee6ffaa,0x7ee6ffaa
+.long	0x08cfbc21,0x08cfbc21
+.long	0xe6e815ef,0xe6e815ef
+.long	0xd99be7ba,0xd99be7ba
+.long	0xce366f4a,0xce366f4a
+.long	0xd4099fea,0xd4099fea
+.long	0xd67cb029,0xd67cb029
+.long	0xafb2a431,0xafb2a431
+.long	0x31233f2a,0x31233f2a
+.long	0x3094a5c6,0x3094a5c6
+.long	0xc066a235,0xc066a235
+.long	0x37bc4e74,0x37bc4e74
+.long	0xa6ca82fc,0xa6ca82fc
+.long	0xb0d090e0,0xb0d090e0
+.long	0x15d8a733,0x15d8a733
+.long	0x4a9804f1,0x4a9804f1
+.long	0xf7daec41,0xf7daec41
+.long	0x0e50cd7f,0x0e50cd7f
+.long	0x2ff69117,0x2ff69117
+.long	0x8dd64d76,0x8dd64d76
+.long	0x4db0ef43,0x4db0ef43
+.long	0x544daacc,0x544daacc
+.long	0xdf0496e4,0xdf0496e4
+.long	0xe3b5d19e,0xe3b5d19e
+.long	0x1b886a4c,0x1b886a4c
+.long	0xb81f2cc1,0xb81f2cc1
+.long	0x7f516546,0x7f516546
+.long	0x04ea5e9d,0x04ea5e9d
+.long	0x5d358c01,0x5d358c01
+.long	0x737487fa,0x737487fa
+.long	0x2e410bfb,0x2e410bfb
+.long	0x5a1d67b3,0x5a1d67b3
+.long	0x52d2db92,0x52d2db92
+.long	0x335610e9,0x335610e9
+.long	0x1347d66d,0x1347d66d
+.long	0x8c61d79a,0x8c61d79a
+.long	0x7a0ca137,0x7a0ca137
+.long	0x8e14f859,0x8e14f859
+.long	0x893c13eb,0x893c13eb
+.long	0xee27a9ce,0xee27a9ce
+.long	0x35c961b7,0x35c961b7
+.long	0xede51ce1,0xede51ce1
+.long	0x3cb1477a,0x3cb1477a
+.long	0x59dfd29c,0x59dfd29c
+.long	0x3f73f255,0x3f73f255
+.long	0x79ce1418,0x79ce1418
+.long	0xbf37c773,0xbf37c773
+.long	0xeacdf753,0xeacdf753
+.long	0x5baafd5f,0x5baafd5f
+.long	0x146f3ddf,0x146f3ddf
+.long	0x86db4478,0x86db4478
+.long	0x81f3afca,0x81f3afca
+.long	0x3ec468b9,0x3ec468b9
+.long	0x2c342438,0x2c342438
+.long	0x5f40a3c2,0x5f40a3c2
+.long	0x72c31d16,0x72c31d16
+.long	0x0c25e2bc,0x0c25e2bc
+.long	0x8b493c28,0x8b493c28
+.long	0x41950dff,0x41950dff
+.long	0x7101a839,0x7101a839
+.long	0xdeb30c08,0xdeb30c08
+.long	0x9ce4b4d8,0x9ce4b4d8
+.long	0x90c15664,0x90c15664
+.long	0x6184cb7b,0x6184cb7b
+.long	0x70b632d5,0x70b632d5
+.long	0x745c6c48,0x745c6c48
+.long	0x4257b8d0,0x4257b8d0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte	0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte	0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte	0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte	0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte	0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte	0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte	0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte	0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte	0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte	0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte	0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte	0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte	0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte	0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte	0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte	0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte	0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte	0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte	0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte	0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte	0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte	0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte	0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte	0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte	0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte	0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte	0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte	0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte	0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte	0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte	0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long	0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long	0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte	65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align	6
+#endif
diff --git a/win-x86/crypto/fipsmodule/aes-586.asm b/win-x86/crypto/fipsmodule/aes-586.asm
new file mode 100644
index 0000000..c3a47d8
--- /dev/null
+++ b/win-x86/crypto/fipsmodule/aes-586.asm
@@ -0,0 +1,3225 @@
+; This file is generated from a similarly-named Perl script in the BoringSSL
+; source tree. Do not edit by hand.
+
+%ifdef BORINGSSL_PREFIX
+%include "boringssl_prefix_symbols_nasm.inc"
+%endif
+%ifidn __OUTPUT_FORMAT__,obj
+section	code	use32 class=code align=64
+%elifidn __OUTPUT_FORMAT__,win32
+%ifdef __YASM_VERSION_ID__
+%if __YASM_VERSION_ID__ < 01010000h
+%error yasm version 1.1.0 or later needed.
+%endif
+; Yasm automatically includes .00 and complains about redefining it.
+; https://www.tortall.net/projects/yasm/manual/html/objfmt-win32-safeseh.html
+%else
+$@feat.00 equ 1
+%endif
+section	.text	code align=64
+%else
+section	.text	code
+%endif
+align	16
+__x86_AES_encrypt_compact:
+	mov	DWORD [20+esp],edi
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+	mov	edi,DWORD [ebp-128]
+	mov	esi,DWORD [ebp-96]
+	mov	edi,DWORD [ebp-64]
+	mov	esi,DWORD [ebp-32]
+	mov	edi,DWORD [ebp]
+	mov	esi,DWORD [32+ebp]
+	mov	edi,DWORD [64+ebp]
+	mov	esi,DWORD [96+ebp]
+align	16
+L$000loop:
+	mov	esi,eax
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	shr	ebx,16
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,ch
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	shr	ecx,24
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,dh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edx,255
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	and	edx,255
+	movzx	edx,BYTE [edx*1+ebp-128]
+	movzx	eax,ah
+	movzx	eax,BYTE [eax*1+ebp-128]
+	shl	eax,8
+	xor	edx,eax
+	mov	eax,DWORD [4+esp]
+	and	ebx,255
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	shl	ebx,16
+	xor	edx,ebx
+	mov	ebx,DWORD [8+esp]
+	movzx	ecx,BYTE [ecx*1+ebp-128]
+	shl	ecx,24
+	xor	edx,ecx
+	mov	ecx,esi
+	mov	ebp,2155905152
+	and	ebp,ecx
+	lea	edi,[ecx*1+ecx]
+	mov	esi,ebp
+	shr	ebp,7
+	and	edi,4278124286
+	sub	esi,ebp
+	mov	ebp,ecx
+	and	esi,454761243
+	ror	ebp,16
+	xor	esi,edi
+	mov	edi,ecx
+	xor	ecx,esi
+	ror	edi,24
+	xor	esi,ebp
+	rol	ecx,24
+	xor	esi,edi
+	mov	ebp,2155905152
+	xor	ecx,esi
+	and	ebp,edx
+	lea	edi,[edx*1+edx]
+	mov	esi,ebp
+	shr	ebp,7
+	and	edi,4278124286
+	sub	esi,ebp
+	mov	ebp,edx
+	and	esi,454761243
+	ror	ebp,16
+	xor	esi,edi
+	mov	edi,edx
+	xor	edx,esi
+	ror	edi,24
+	xor	esi,ebp
+	rol	edx,24
+	xor	esi,edi
+	mov	ebp,2155905152
+	xor	edx,esi
+	and	ebp,eax
+	lea	edi,[eax*1+eax]
+	mov	esi,ebp
+	shr	ebp,7
+	and	edi,4278124286
+	sub	esi,ebp
+	mov	ebp,eax
+	and	esi,454761243
+	ror	ebp,16
+	xor	esi,edi
+	mov	edi,eax
+	xor	eax,esi
+	ror	edi,24
+	xor	esi,ebp
+	rol	eax,24
+	xor	esi,edi
+	mov	ebp,2155905152
+	xor	eax,esi
+	and	ebp,ebx
+	lea	edi,[ebx*1+ebx]
+	mov	esi,ebp
+	shr	ebp,7
+	and	edi,4278124286
+	sub	esi,ebp
+	mov	ebp,ebx
+	and	esi,454761243
+	ror	ebp,16
+	xor	esi,edi
+	mov	edi,ebx
+	xor	ebx,esi
+	ror	edi,24
+	xor	esi,ebp
+	rol	ebx,24
+	xor	esi,edi
+	xor	ebx,esi
+	mov	edi,DWORD [20+esp]
+	mov	ebp,DWORD [28+esp]
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	cmp	edi,DWORD [24+esp]
+	mov	DWORD [20+esp],edi
+	jb	NEAR L$000loop
+	mov	esi,eax
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	shr	ebx,16
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,ch
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	shr	ecx,24
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,dh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edx,255
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	edi,DWORD [20+esp]
+	and	edx,255
+	movzx	edx,BYTE [edx*1+ebp-128]
+	movzx	eax,ah
+	movzx	eax,BYTE [eax*1+ebp-128]
+	shl	eax,8
+	xor	edx,eax
+	mov	eax,DWORD [4+esp]
+	and	ebx,255
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	shl	ebx,16
+	xor	edx,ebx
+	mov	ebx,DWORD [8+esp]
+	movzx	ecx,BYTE [ecx*1+ebp-128]
+	shl	ecx,24
+	xor	edx,ecx
+	mov	ecx,esi
+	xor	eax,DWORD [16+edi]
+	xor	ebx,DWORD [20+edi]
+	xor	ecx,DWORD [24+edi]
+	xor	edx,DWORD [28+edi]
+	ret
+align	16
+__sse_AES_encrypt_compact:
+	pxor	mm0,[edi]
+	pxor	mm4,[8+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+	mov	eax,454761243
+	mov	DWORD [8+esp],eax
+	mov	DWORD [12+esp],eax
+	mov	eax,DWORD [ebp-128]
+	mov	ebx,DWORD [ebp-96]
+	mov	ecx,DWORD [ebp-64]
+	mov	edx,DWORD [ebp-32]
+	mov	eax,DWORD [ebp]
+	mov	ebx,DWORD [32+ebp]
+	mov	ecx,DWORD [64+ebp]
+	mov	edx,DWORD [96+ebp]
+align	16
+L$001loop:
+	pshufw	mm1,mm0,8
+	pshufw	mm5,mm4,13
+	movd	eax,mm1
+	movd	ebx,mm5
+	mov	DWORD [20+esp],edi
+	movzx	esi,al
+	movzx	edx,ah
+	pshufw	mm2,mm0,13
+	movzx	ecx,BYTE [esi*1+ebp-128]
+	movzx	edi,bl
+	movzx	edx,BYTE [edx*1+ebp-128]
+	shr	eax,16
+	shl	edx,8
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,16
+	pshufw	mm6,mm4,8
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,ah
+	shl	esi,24
+	shr	ebx,16
+	or	edx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,8
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,al
+	shl	esi,24
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bl
+	movd	eax,mm2
+	movd	mm0,ecx
+	movzx	ecx,BYTE [edi*1+ebp-128]
+	movzx	edi,ah
+	shl	ecx,16
+	movd	ebx,mm6
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,24
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bl
+	shl	esi,8
+	shr	ebx,16
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,al
+	shr	eax,16
+	movd	mm1,ecx
+	movzx	ecx,BYTE [edi*1+ebp-128]
+	movzx	edi,ah
+	shl	ecx,16
+	and	eax,255
+	or	ecx,esi
+	punpckldq	mm0,mm1
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,24
+	and	ebx,255
+	movzx	eax,BYTE [eax*1+ebp-128]
+	or	ecx,esi
+	shl	eax,16
+	movzx	esi,BYTE [edi*1+ebp-128]
+	or	edx,eax
+	shl	esi,8
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	or	ecx,esi
+	or	edx,ebx
+	mov	edi,DWORD [20+esp]
+	movd	mm4,ecx
+	movd	mm5,edx
+	punpckldq	mm4,mm5
+	add	edi,16
+	cmp	edi,DWORD [24+esp]
+	ja	NEAR L$002out
+	movq	mm2,[8+esp]
+	pxor	mm3,mm3
+	pxor	mm7,mm7
+	movq	mm1,mm0
+	movq	mm5,mm4
+	pcmpgtb	mm3,mm0
+	pcmpgtb	mm7,mm4
+	pand	mm3,mm2
+	pand	mm7,mm2
+	pshufw	mm2,mm0,177
+	pshufw	mm6,mm4,177
+	paddb	mm0,mm0
+	paddb	mm4,mm4
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pshufw	mm3,mm2,177
+	pshufw	mm7,mm6,177
+	pxor	mm1,mm0
+	pxor	mm5,mm4
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	movq	mm2,mm3
+	movq	mm6,mm7
+	pslld	mm3,8
+	pslld	mm7,8
+	psrld	mm2,24
+	psrld	mm6,24
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	movq	mm3,mm1
+	movq	mm7,mm5
+	movq	mm2,[edi]
+	movq	mm6,[8+edi]
+	psrld	mm1,8
+	psrld	mm5,8
+	mov	eax,DWORD [ebp-128]
+	pslld	mm3,24
+	pslld	mm7,24
+	mov	ebx,DWORD [ebp-64]
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	mov	ecx,DWORD [ebp]
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	mov	edx,DWORD [64+ebp]
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	jmp	NEAR L$001loop
+align	16
+L$002out:
+	pxor	mm0,[edi]
+	pxor	mm4,[8+edi]
+	ret
+align	16
+__x86_AES_encrypt:
+	mov	DWORD [20+esp],edi
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+align	16
+L$003loop:
+	mov	esi,eax
+	and	esi,255
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,bh
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	mov	edi,edx
+	shr	edi,24
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	shr	ebx,16
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,ch
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	mov	edi,eax
+	shr	edi,24
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	shr	ecx,24
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,dh
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,eax
+	shr	edi,16
+	and	edx,255
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	movzx	edi,bh
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	edi,DWORD [20+esp]
+	mov	edx,DWORD [edx*8+ebp]
+	movzx	eax,ah
+	xor	edx,DWORD [3+eax*8+ebp]
+	mov	eax,DWORD [4+esp]
+	and	ebx,255
+	xor	edx,DWORD [2+ebx*8+ebp]
+	mov	ebx,DWORD [8+esp]
+	xor	edx,DWORD [1+ecx*8+ebp]
+	mov	ecx,esi
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	cmp	edi,DWORD [24+esp]
+	mov	DWORD [20+esp],edi
+	jb	NEAR L$003loop
+	mov	esi,eax
+	and	esi,255
+	mov	esi,DWORD [2+esi*8+ebp]
+	and	esi,255
+	movzx	edi,bh
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,65280
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,16711680
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	mov	edi,DWORD [2+edi*8+ebp]
+	and	edi,4278190080
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	shr	ebx,16
+	mov	esi,DWORD [2+esi*8+ebp]
+	and	esi,255
+	movzx	edi,ch
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,65280
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,16711680
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,24
+	mov	edi,DWORD [2+edi*8+ebp]
+	and	edi,4278190080
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	shr	ecx,24
+	mov	esi,DWORD [2+esi*8+ebp]
+	and	esi,255
+	movzx	edi,dh
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,65280
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edx,255
+	and	edi,255
+	mov	edi,DWORD [edi*8+ebp]
+	and	edi,16711680
+	xor	esi,edi
+	movzx	edi,bh
+	mov	edi,DWORD [2+edi*8+ebp]
+	and	edi,4278190080
+	xor	esi,edi
+	mov	edi,DWORD [20+esp]
+	and	edx,255
+	mov	edx,DWORD [2+edx*8+ebp]
+	and	edx,255
+	movzx	eax,ah
+	mov	eax,DWORD [eax*8+ebp]
+	and	eax,65280
+	xor	edx,eax
+	mov	eax,DWORD [4+esp]
+	and	ebx,255
+	mov	ebx,DWORD [ebx*8+ebp]
+	and	ebx,16711680
+	xor	edx,ebx
+	mov	ebx,DWORD [8+esp]
+	mov	ecx,DWORD [2+ecx*8+ebp]
+	and	ecx,4278190080
+	xor	edx,ecx
+	mov	ecx,esi
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	ret
+align	64
+L$AES_Te:
+dd	2774754246,2774754246
+dd	2222750968,2222750968
+dd	2574743534,2574743534
+dd	2373680118,2373680118
+dd	234025727,234025727
+dd	3177933782,3177933782
+dd	2976870366,2976870366
+dd	1422247313,1422247313
+dd	1345335392,1345335392
+dd	50397442,50397442
+dd	2842126286,2842126286
+dd	2099981142,2099981142
+dd	436141799,436141799
+dd	1658312629,1658312629
+dd	3870010189,3870010189
+dd	2591454956,2591454956
+dd	1170918031,1170918031
+dd	2642575903,2642575903
+dd	1086966153,1086966153
+dd	2273148410,2273148410
+dd	368769775,368769775
+dd	3948501426,3948501426
+dd	3376891790,3376891790
+dd	200339707,200339707
+dd	3970805057,3970805057
+dd	1742001331,1742001331
+dd	4255294047,4255294047
+dd	3937382213,3937382213
+dd	3214711843,3214711843
+dd	4154762323,4154762323
+dd	2524082916,2524082916
+dd	1539358875,1539358875
+dd	3266819957,3266819957
+dd	486407649,486407649
+dd	2928907069,2928907069
+dd	1780885068,1780885068
+dd	1513502316,1513502316
+dd	1094664062,1094664062
+dd	49805301,49805301
+dd	1338821763,1338821763
+dd	1546925160,1546925160
+dd	4104496465,4104496465
+dd	887481809,887481809
+dd	150073849,150073849
+dd	2473685474,2473685474
+dd	1943591083,1943591083
+dd	1395732834,1395732834
+dd	1058346282,1058346282
+dd	201589768,201589768
+dd	1388824469,1388824469
+dd	1696801606,1696801606
+dd	1589887901,1589887901
+dd	672667696,672667696
+dd	2711000631,2711000631
+dd	251987210,251987210
+dd	3046808111,3046808111
+dd	151455502,151455502
+dd	907153956,907153956
+dd	2608889883,2608889883
+dd	1038279391,1038279391
+dd	652995533,652995533
+dd	1764173646,1764173646
+dd	3451040383,3451040383
+dd	2675275242,2675275242
+dd	453576978,453576978
+dd	2659418909,2659418909
+dd	1949051992,1949051992
+dd	773462580,773462580
+dd	756751158,756751158
+dd	2993581788,2993581788
+dd	3998898868,3998898868
+dd	4221608027,4221608027
+dd	4132590244,4132590244
+dd	1295727478,1295727478
+dd	1641469623,1641469623
+dd	3467883389,3467883389
+dd	2066295122,2066295122
+dd	1055122397,1055122397
+dd	1898917726,1898917726
+dd	2542044179,2542044179
+dd	4115878822,4115878822
+dd	1758581177,1758581177
+dd	0,0
+dd	753790401,753790401
+dd	1612718144,1612718144
+dd	536673507,536673507
+dd	3367088505,3367088505
+dd	3982187446,3982187446
+dd	3194645204,3194645204
+dd	1187761037,1187761037
+dd	3653156455,3653156455
+dd	1262041458,1262041458
+dd	3729410708,3729410708
+dd	3561770136,3561770136
+dd	3898103984,3898103984
+dd	1255133061,1255133061
+dd	1808847035,1808847035
+dd	720367557,720367557
+dd	3853167183,3853167183
+dd	385612781,385612781
+dd	3309519750,3309519750
+dd	3612167578,3612167578
+dd	1429418854,1429418854
+dd	2491778321,2491778321
+dd	3477423498,3477423498
+dd	284817897,284817897
+dd	100794884,100794884
+dd	2172616702,2172616702
+dd	4031795360,4031795360
+dd	1144798328,1144798328
+dd	3131023141,3131023141
+dd	3819481163,3819481163
+dd	4082192802,4082192802
+dd	4272137053,4272137053
+dd	3225436288,3225436288
+dd	2324664069,2324664069
+dd	2912064063,2912064063
+dd	3164445985,3164445985
+dd	1211644016,1211644016
+dd	83228145,83228145
+dd	3753688163,3753688163
+dd	3249976951,3249976951
+dd	1977277103,1977277103
+dd	1663115586,1663115586
+dd	806359072,806359072
+dd	452984805,452984805
+dd	250868733,250868733
+dd	1842533055,1842533055
+dd	1288555905,1288555905
+dd	336333848,336333848
+dd	890442534,890442534
+dd	804056259,804056259
+dd	3781124030,3781124030
+dd	2727843637,2727843637
+dd	3427026056,3427026056
+dd	957814574,957814574
+dd	1472513171,1472513171
+dd	4071073621,4071073621
+dd	2189328124,2189328124
+dd	1195195770,1195195770
+dd	2892260552,2892260552
+dd	3881655738,3881655738
+dd	723065138,723065138
+dd	2507371494,2507371494
+dd	2690670784,2690670784
+dd	2558624025,2558624025
+dd	3511635870,3511635870
+dd	2145180835,2145180835
+dd	1713513028,1713513028
+dd	2116692564,2116692564
+dd	2878378043,2878378043
+dd	2206763019,2206763019
+dd	3393603212,3393603212
+dd	703524551,703524551
+dd	3552098411,3552098411
+dd	1007948840,1007948840
+dd	2044649127,2044649127
+dd	3797835452,3797835452
+dd	487262998,487262998
+dd	1994120109,1994120109
+dd	1004593371,1004593371
+dd	1446130276,1446130276
+dd	1312438900,1312438900
+dd	503974420,503974420
+dd	3679013266,3679013266
+dd	168166924,168166924
+dd	1814307912,1814307912
+dd	3831258296,3831258296
+dd	1573044895,1573044895
+dd	1859376061,1859376061
+dd	4021070915,4021070915
+dd	2791465668,2791465668
+dd	2828112185,2828112185
+dd	2761266481,2761266481
+dd	937747667,937747667
+dd	2339994098,2339994098
+dd	854058965,854058965
+dd	1137232011,1137232011
+dd	1496790894,1496790894
+dd	3077402074,3077402074
+dd	2358086913,2358086913
+dd	1691735473,1691735473
+dd	3528347292,3528347292
+dd	3769215305,3769215305
+dd	3027004632,3027004632
+dd	4199962284,4199962284
+dd	133494003,133494003
+dd	636152527,636152527
+dd	2942657994,2942657994
+dd	2390391540,2390391540
+dd	3920539207,3920539207
+dd	403179536,403179536
+dd	3585784431,3585784431
+dd	2289596656,2289596656
+dd	1864705354,1864705354
+dd	1915629148,1915629148
+dd	605822008,605822008
+dd	4054230615,4054230615
+dd	3350508659,3350508659
+dd	1371981463,1371981463
+dd	602466507,602466507
+dd	2094914977,2094914977
+dd	2624877800,2624877800
+dd	555687742,555687742
+dd	3712699286,3712699286
+dd	3703422305,3703422305
+dd	2257292045,2257292045
+dd	2240449039,2240449039
+dd	2423288032,2423288032
+dd	1111375484,1111375484
+dd	3300242801,3300242801
+dd	2858837708,2858837708
+dd	3628615824,3628615824
+dd	84083462,84083462
+dd	32962295,32962295
+dd	302911004,302911004
+dd	2741068226,2741068226
+dd	1597322602,1597322602
+dd	4183250862,4183250862
+dd	3501832553,3501832553
+dd	2441512471,2441512471
+dd	1489093017,1489093017
+dd	656219450,656219450
+dd	3114180135,3114180135
+dd	954327513,954327513
+dd	335083755,335083755
+dd	3013122091,3013122091
+dd	856756514,856756514
+dd	3144247762,3144247762
+dd	1893325225,1893325225
+dd	2307821063,2307821063
+dd	2811532339,2811532339
+dd	3063651117,3063651117
+dd	572399164,572399164
+dd	2458355477,2458355477
+dd	552200649,552200649
+dd	1238290055,1238290055
+dd	4283782570,4283782570
+dd	2015897680,2015897680
+dd	2061492133,2061492133
+dd	2408352771,2408352771
+dd	4171342169,4171342169
+dd	2156497161,2156497161
+dd	386731290,386731290
+dd	3669999461,3669999461
+dd	837215959,837215959
+dd	3326231172,3326231172
+dd	3093850320,3093850320
+dd	3275833730,3275833730
+dd	2962856233,2962856233
+dd	1999449434,1999449434
+dd	286199582,286199582
+dd	3417354363,3417354363
+dd	4233385128,4233385128
+dd	3602627437,3602627437
+dd	974525996,974525996
+db	99,124,119,123,242,107,111,197
+db	48,1,103,43,254,215,171,118
+db	202,130,201,125,250,89,71,240
+db	173,212,162,175,156,164,114,192
+db	183,253,147,38,54,63,247,204
+db	52,165,229,241,113,216,49,21
+db	4,199,35,195,24,150,5,154
+db	7,18,128,226,235,39,178,117
+db	9,131,44,26,27,110,90,160
+db	82,59,214,179,41,227,47,132
+db	83,209,0,237,32,252,177,91
+db	106,203,190,57,74,76,88,207
+db	208,239,170,251,67,77,51,133
+db	69,249,2,127,80,60,159,168
+db	81,163,64,143,146,157,56,245
+db	188,182,218,33,16,255,243,210
+db	205,12,19,236,95,151,68,23
+db	196,167,126,61,100,93,25,115
+db	96,129,79,220,34,42,144,136
+db	70,238,184,20,222,94,11,219
+db	224,50,58,10,73,6,36,92
+db	194,211,172,98,145,149,228,121
+db	231,200,55,109,141,213,78,169
+db	108,86,244,234,101,122,174,8
+db	186,120,37,46,28,166,180,198
+db	232,221,116,31,75,189,139,138
+db	112,62,181,102,72,3,246,14
+db	97,53,87,185,134,193,29,158
+db	225,248,152,17,105,217,142,148
+db	155,30,135,233,206,85,40,223
+db	140,161,137,13,191,230,66,104
+db	65,153,45,15,176,84,187,22
+db	99,124,119,123,242,107,111,197
+db	48,1,103,43,254,215,171,118
+db	202,130,201,125,250,89,71,240
+db	173,212,162,175,156,164,114,192
+db	183,253,147,38,54,63,247,204
+db	52,165,229,241,113,216,49,21
+db	4,199,35,195,24,150,5,154
+db	7,18,128,226,235,39,178,117
+db	9,131,44,26,27,110,90,160
+db	82,59,214,179,41,227,47,132
+db	83,209,0,237,32,252,177,91
+db	106,203,190,57,74,76,88,207
+db	208,239,170,251,67,77,51,133
+db	69,249,2,127,80,60,159,168
+db	81,163,64,143,146,157,56,245
+db	188,182,218,33,16,255,243,210
+db	205,12,19,236,95,151,68,23
+db	196,167,126,61,100,93,25,115
+db	96,129,79,220,34,42,144,136
+db	70,238,184,20,222,94,11,219
+db	224,50,58,10,73,6,36,92
+db	194,211,172,98,145,149,228,121
+db	231,200,55,109,141,213,78,169
+db	108,86,244,234,101,122,174,8
+db	186,120,37,46,28,166,180,198
+db	232,221,116,31,75,189,139,138
+db	112,62,181,102,72,3,246,14
+db	97,53,87,185,134,193,29,158
+db	225,248,152,17,105,217,142,148
+db	155,30,135,233,206,85,40,223
+db	140,161,137,13,191,230,66,104
+db	65,153,45,15,176,84,187,22
+db	99,124,119,123,242,107,111,197
+db	48,1,103,43,254,215,171,118
+db	202,130,201,125,250,89,71,240
+db	173,212,162,175,156,164,114,192
+db	183,253,147,38,54,63,247,204
+db	52,165,229,241,113,216,49,21
+db	4,199,35,195,24,150,5,154
+db	7,18,128,226,235,39,178,117
+db	9,131,44,26,27,110,90,160
+db	82,59,214,179,41,227,47,132
+db	83,209,0,237,32,252,177,91
+db	106,203,190,57,74,76,88,207
+db	208,239,170,251,67,77,51,133
+db	69,249,2,127,80,60,159,168
+db	81,163,64,143,146,157,56,245
+db	188,182,218,33,16,255,243,210
+db	205,12,19,236,95,151,68,23
+db	196,167,126,61,100,93,25,115
+db	96,129,79,220,34,42,144,136
+db	70,238,184,20,222,94,11,219
+db	224,50,58,10,73,6,36,92
+db	194,211,172,98,145,149,228,121
+db	231,200,55,109,141,213,78,169
+db	108,86,244,234,101,122,174,8
+db	186,120,37,46,28,166,180,198
+db	232,221,116,31,75,189,139,138
+db	112,62,181,102,72,3,246,14
+db	97,53,87,185,134,193,29,158
+db	225,248,152,17,105,217,142,148
+db	155,30,135,233,206,85,40,223
+db	140,161,137,13,191,230,66,104
+db	65,153,45,15,176,84,187,22
+db	99,124,119,123,242,107,111,197
+db	48,1,103,43,254,215,171,118
+db	202,130,201,125,250,89,71,240
+db	173,212,162,175,156,164,114,192
+db	183,253,147,38,54,63,247,204
+db	52,165,229,241,113,216,49,21
+db	4,199,35,195,24,150,5,154
+db	7,18,128,226,235,39,178,117
+db	9,131,44,26,27,110,90,160
+db	82,59,214,179,41,227,47,132
+db	83,209,0,237,32,252,177,91
+db	106,203,190,57,74,76,88,207
+db	208,239,170,251,67,77,51,133
+db	69,249,2,127,80,60,159,168
+db	81,163,64,143,146,157,56,245
+db	188,182,218,33,16,255,243,210
+db	205,12,19,236,95,151,68,23
+db	196,167,126,61,100,93,25,115
+db	96,129,79,220,34,42,144,136
+db	70,238,184,20,222,94,11,219
+db	224,50,58,10,73,6,36,92
+db	194,211,172,98,145,149,228,121
+db	231,200,55,109,141,213,78,169
+db	108,86,244,234,101,122,174,8
+db	186,120,37,46,28,166,180,198
+db	232,221,116,31,75,189,139,138
+db	112,62,181,102,72,3,246,14
+db	97,53,87,185,134,193,29,158
+db	225,248,152,17,105,217,142,148
+db	155,30,135,233,206,85,40,223
+db	140,161,137,13,191,230,66,104
+db	65,153,45,15,176,84,187,22
+dd	1,2,4,8
+dd	16,32,64,128
+dd	27,54,0,0
+dd	0,0,0,0
+global	_aes_nohw_encrypt
+align	16
+_aes_nohw_encrypt:
+L$_aes_nohw_encrypt_begin:
+	push	ebp
+	push	ebx
+	push	esi
+	push	edi
+	mov	esi,DWORD [20+esp]
+	mov	edi,DWORD [28+esp]
+	mov	eax,esp
+	sub	esp,36
+	and	esp,-64
+	lea	ebx,[edi-127]
+	sub	ebx,esp
+	neg	ebx
+	and	ebx,960
+	sub	esp,ebx
+	add	esp,4
+	mov	DWORD [28+esp],eax
+	call	L$004pic_point
+L$004pic_point:
+	pop	ebp
+	lea	eax,[_OPENSSL_ia32cap_P]
+	lea	ebp,[(L$AES_Te-L$004pic_point)+ebp]
+	lea	ebx,[764+esp]
+	sub	ebx,ebp
+	and	ebx,768
+	lea	ebp,[2176+ebx*1+ebp]
+	bt	DWORD [eax],25
+	jnc	NEAR L$005x86
+	movq	mm0,[esi]
+	movq	mm4,[8+esi]
+	call	__sse_AES_encrypt_compact
+	mov	esp,DWORD [28+esp]
+	mov	esi,DWORD [24+esp]
+	movq	[esi],mm0
+	movq	[8+esi],mm4
+	emms
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+align	16
+L$005x86:
+	mov	DWORD [24+esp],ebp
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	call	__x86_AES_encrypt_compact
+	mov	esp,DWORD [28+esp]
+	mov	esi,DWORD [24+esp]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+align	16
+__x86_AES_decrypt_compact:
+	mov	DWORD [20+esp],edi
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+	mov	edi,DWORD [ebp-128]
+	mov	esi,DWORD [ebp-96]
+	mov	edi,DWORD [ebp-64]
+	mov	esi,DWORD [ebp-32]
+	mov	edi,DWORD [ebp]
+	mov	esi,DWORD [32+ebp]
+	mov	edi,DWORD [64+ebp]
+	mov	esi,DWORD [96+ebp]
+align	16
+L$006loop:
+	mov	esi,eax
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,dh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ebx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,ah
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	and	edx,255
+	movzx	edx,BYTE [edx*1+ebp-128]
+	movzx	ecx,ch
+	movzx	ecx,BYTE [ecx*1+ebp-128]
+	shl	ecx,8
+	xor	edx,ecx
+	mov	ecx,esi
+	shr	ebx,16
+	and	ebx,255
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	shl	ebx,16
+	xor	edx,ebx
+	shr	eax,24
+	movzx	eax,BYTE [eax*1+ebp-128]
+	shl	eax,24
+	xor	edx,eax
+	mov	edi,2155905152
+	and	edi,ecx
+	mov	esi,edi
+	shr	edi,7
+	lea	eax,[ecx*1+ecx]
+	sub	esi,edi
+	and	eax,4278124286
+	and	esi,454761243
+	xor	eax,esi
+	mov	edi,2155905152
+	and	edi,eax
+	mov	esi,edi
+	shr	edi,7
+	lea	ebx,[eax*1+eax]
+	sub	esi,edi
+	and	ebx,4278124286
+	and	esi,454761243
+	xor	eax,ecx
+	xor	ebx,esi
+	mov	edi,2155905152
+	and	edi,ebx
+	mov	esi,edi
+	shr	edi,7
+	lea	ebp,[ebx*1+ebx]
+	sub	esi,edi
+	and	ebp,4278124286
+	and	esi,454761243
+	xor	ebx,ecx
+	rol	ecx,8
+	xor	ebp,esi
+	xor	ecx,eax
+	xor	eax,ebp
+	xor	ecx,ebx
+	xor	ebx,ebp
+	rol	eax,24
+	xor	ecx,ebp
+	rol	ebx,16
+	xor	ecx,eax
+	rol	ebp,8
+	xor	ecx,ebx
+	mov	eax,DWORD [4+esp]
+	xor	ecx,ebp
+	mov	DWORD [12+esp],ecx
+	mov	edi,2155905152
+	and	edi,edx
+	mov	esi,edi
+	shr	edi,7
+	lea	ebx,[edx*1+edx]
+	sub	esi,edi
+	and	ebx,4278124286
+	and	esi,454761243
+	xor	ebx,esi
+	mov	edi,2155905152
+	and	edi,ebx
+	mov	esi,edi
+	shr	edi,7
+	lea	ecx,[ebx*1+ebx]
+	sub	esi,edi
+	and	ecx,4278124286
+	and	esi,454761243
+	xor	ebx,edx
+	xor	ecx,esi
+	mov	edi,2155905152
+	and	edi,ecx
+	mov	esi,edi
+	shr	edi,7
+	lea	ebp,[ecx*1+ecx]
+	sub	esi,edi
+	and	ebp,4278124286
+	and	esi,454761243
+	xor	ecx,edx
+	rol	edx,8
+	xor	ebp,esi
+	xor	edx,ebx
+	xor	ebx,ebp
+	xor	edx,ecx
+	xor	ecx,ebp
+	rol	ebx,24
+	xor	edx,ebp
+	rol	ecx,16
+	xor	edx,ebx
+	rol	ebp,8
+	xor	edx,ecx
+	mov	ebx,DWORD [8+esp]
+	xor	edx,ebp
+	mov	DWORD [16+esp],edx
+	mov	edi,2155905152
+	and	edi,eax
+	mov	esi,edi
+	shr	edi,7
+	lea	ecx,[eax*1+eax]
+	sub	esi,edi
+	and	ecx,4278124286
+	and	esi,454761243
+	xor	ecx,esi
+	mov	edi,2155905152
+	and	edi,ecx
+	mov	esi,edi
+	shr	edi,7
+	lea	edx,[ecx*1+ecx]
+	sub	esi,edi
+	and	edx,4278124286
+	and	esi,454761243
+	xor	ecx,eax
+	xor	edx,esi
+	mov	edi,2155905152
+	and	edi,edx
+	mov	esi,edi
+	shr	edi,7
+	lea	ebp,[edx*1+edx]
+	sub	esi,edi
+	and	ebp,4278124286
+	and	esi,454761243
+	xor	edx,eax
+	rol	eax,8
+	xor	ebp,esi
+	xor	eax,ecx
+	xor	ecx,ebp
+	xor	eax,edx
+	xor	edx,ebp
+	rol	ecx,24
+	xor	eax,ebp
+	rol	edx,16
+	xor	eax,ecx
+	rol	ebp,8
+	xor	eax,edx
+	xor	eax,ebp
+	mov	edi,2155905152
+	and	edi,ebx
+	mov	esi,edi
+	shr	edi,7
+	lea	ecx,[ebx*1+ebx]
+	sub	esi,edi
+	and	ecx,4278124286
+	and	esi,454761243
+	xor	ecx,esi
+	mov	edi,2155905152
+	and	edi,ecx
+	mov	esi,edi
+	shr	edi,7
+	lea	edx,[ecx*1+ecx]
+	sub	esi,edi
+	and	edx,4278124286
+	and	esi,454761243
+	xor	ecx,ebx
+	xor	edx,esi
+	mov	edi,2155905152
+	and	edi,edx
+	mov	esi,edi
+	shr	edi,7
+	lea	ebp,[edx*1+edx]
+	sub	esi,edi
+	and	ebp,4278124286
+	and	esi,454761243
+	xor	edx,ebx
+	rol	ebx,8
+	xor	ebp,esi
+	xor	ebx,ecx
+	xor	ecx,ebp
+	xor	ebx,edx
+	xor	edx,ebp
+	rol	ecx,24
+	xor	ebx,ebp
+	rol	edx,16
+	xor	ebx,ecx
+	rol	ebp,8
+	xor	ebx,edx
+	mov	ecx,DWORD [12+esp]
+	xor	ebx,ebp
+	mov	edx,DWORD [16+esp]
+	mov	edi,DWORD [20+esp]
+	mov	ebp,DWORD [28+esp]
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	cmp	edi,DWORD [24+esp]
+	mov	DWORD [20+esp],edi
+	jb	NEAR L$006loop
+	mov	esi,eax
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,dh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ebx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,ah
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp-128]
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp-128]
+	shl	edi,24
+	xor	esi,edi
+	mov	edi,DWORD [20+esp]
+	and	edx,255
+	movzx	edx,BYTE [edx*1+ebp-128]
+	movzx	ecx,ch
+	movzx	ecx,BYTE [ecx*1+ebp-128]
+	shl	ecx,8
+	xor	edx,ecx
+	mov	ecx,esi
+	shr	ebx,16
+	and	ebx,255
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	shl	ebx,16
+	xor	edx,ebx
+	mov	ebx,DWORD [8+esp]
+	shr	eax,24
+	movzx	eax,BYTE [eax*1+ebp-128]
+	shl	eax,24
+	xor	edx,eax
+	mov	eax,DWORD [4+esp]
+	xor	eax,DWORD [16+edi]
+	xor	ebx,DWORD [20+edi]
+	xor	ecx,DWORD [24+edi]
+	xor	edx,DWORD [28+edi]
+	ret
+align	16
+__sse_AES_decrypt_compact:
+	pxor	mm0,[edi]
+	pxor	mm4,[8+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+	mov	eax,454761243
+	mov	DWORD [8+esp],eax
+	mov	DWORD [12+esp],eax
+	mov	eax,DWORD [ebp-128]
+	mov	ebx,DWORD [ebp-96]
+	mov	ecx,DWORD [ebp-64]
+	mov	edx,DWORD [ebp-32]
+	mov	eax,DWORD [ebp]
+	mov	ebx,DWORD [32+ebp]
+	mov	ecx,DWORD [64+ebp]
+	mov	edx,DWORD [96+ebp]
+align	16
+L$007loop:
+	pshufw	mm1,mm0,12
+	pshufw	mm5,mm4,9
+	movd	eax,mm1
+	movd	ebx,mm5
+	mov	DWORD [20+esp],edi
+	movzx	esi,al
+	movzx	edx,ah
+	pshufw	mm2,mm0,6
+	movzx	ecx,BYTE [esi*1+ebp-128]
+	movzx	edi,bl
+	movzx	edx,BYTE [edx*1+ebp-128]
+	shr	eax,16
+	shl	edx,8
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,16
+	pshufw	mm6,mm4,3
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,ah
+	shl	esi,24
+	shr	ebx,16
+	or	edx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	esi,24
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,al
+	shl	esi,8
+	movd	eax,mm2
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bl
+	shl	esi,16
+	movd	ebx,mm6
+	movd	mm0,ecx
+	movzx	ecx,BYTE [edi*1+ebp-128]
+	movzx	edi,al
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bl
+	or	edx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,ah
+	shl	esi,16
+	shr	eax,16
+	or	edx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shr	ebx,16
+	shl	esi,8
+	movd	mm1,edx
+	movzx	edx,BYTE [edi*1+ebp-128]
+	movzx	edi,bh
+	shl	edx,24
+	and	ebx,255
+	or	edx,esi
+	punpckldq	mm0,mm1
+	movzx	esi,BYTE [edi*1+ebp-128]
+	movzx	edi,al
+	shl	esi,8
+	movzx	eax,ah
+	movzx	ebx,BYTE [ebx*1+ebp-128]
+	or	ecx,esi
+	movzx	esi,BYTE [edi*1+ebp-128]
+	or	edx,ebx
+	shl	esi,16
+	movzx	eax,BYTE [eax*1+ebp-128]
+	or	edx,esi
+	shl	eax,24
+	or	ecx,eax
+	mov	edi,DWORD [20+esp]
+	movd	mm4,edx
+	movd	mm5,ecx
+	punpckldq	mm4,mm5
+	add	edi,16
+	cmp	edi,DWORD [24+esp]
+	ja	NEAR L$008out
+	movq	mm3,mm0
+	movq	mm7,mm4
+	pshufw	mm2,mm0,228
+	pshufw	mm6,mm4,228
+	movq	mm1,mm0
+	movq	mm5,mm4
+	pshufw	mm0,mm0,177
+	pshufw	mm4,mm4,177
+	pslld	mm2,8
+	pslld	mm6,8
+	psrld	mm3,8
+	psrld	mm7,8
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pslld	mm2,16
+	pslld	mm6,16
+	psrld	mm3,16
+	psrld	mm7,16
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	movq	mm3,[8+esp]
+	pxor	mm2,mm2
+	pxor	mm6,mm6
+	pcmpgtb	mm2,mm1
+	pcmpgtb	mm6,mm5
+	pand	mm2,mm3
+	pand	mm6,mm3
+	paddb	mm1,mm1
+	paddb	mm5,mm5
+	pxor	mm1,mm2
+	pxor	mm5,mm6
+	movq	mm3,mm1
+	movq	mm7,mm5
+	movq	mm2,mm1
+	movq	mm6,mm5
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	pslld	mm3,24
+	pslld	mm7,24
+	psrld	mm2,8
+	psrld	mm6,8
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	movq	mm2,[8+esp]
+	pxor	mm3,mm3
+	pxor	mm7,mm7
+	pcmpgtb	mm3,mm1
+	pcmpgtb	mm7,mm5
+	pand	mm3,mm2
+	pand	mm7,mm2
+	paddb	mm1,mm1
+	paddb	mm5,mm5
+	pxor	mm1,mm3
+	pxor	mm5,mm7
+	pshufw	mm3,mm1,177
+	pshufw	mm7,mm5,177
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pxor	mm3,mm3
+	pxor	mm7,mm7
+	pcmpgtb	mm3,mm1
+	pcmpgtb	mm7,mm5
+	pand	mm3,mm2
+	pand	mm7,mm2
+	paddb	mm1,mm1
+	paddb	mm5,mm5
+	pxor	mm1,mm3
+	pxor	mm5,mm7
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	movq	mm3,mm1
+	movq	mm7,mm5
+	pshufw	mm2,mm1,177
+	pshufw	mm6,mm5,177
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	pslld	mm1,8
+	pslld	mm5,8
+	psrld	mm3,8
+	psrld	mm7,8
+	movq	mm2,[edi]
+	movq	mm6,[8+edi]
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	mov	eax,DWORD [ebp-128]
+	pslld	mm1,16
+	pslld	mm5,16
+	mov	ebx,DWORD [ebp-64]
+	psrld	mm3,16
+	psrld	mm7,16
+	mov	ecx,DWORD [ebp]
+	pxor	mm0,mm1
+	pxor	mm4,mm5
+	mov	edx,DWORD [64+ebp]
+	pxor	mm0,mm3
+	pxor	mm4,mm7
+	pxor	mm0,mm2
+	pxor	mm4,mm6
+	jmp	NEAR L$007loop
+align	16
+L$008out:
+	pxor	mm0,[edi]
+	pxor	mm4,[8+edi]
+	ret
+align	16
+__x86_AES_decrypt:
+	mov	DWORD [20+esp],edi
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [24+esp],esi
+align	16
+L$009loop:
+	mov	esi,eax
+	and	esi,255
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,dh
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	mov	edi,ebx
+	shr	edi,24
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,ah
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	mov	edi,ecx
+	shr	edi,24
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	mov	esi,DWORD [esi*8+ebp]
+	movzx	edi,bh
+	xor	esi,DWORD [3+edi*8+ebp]
+	mov	edi,eax
+	shr	edi,16
+	and	edi,255
+	xor	esi,DWORD [2+edi*8+ebp]
+	mov	edi,edx
+	shr	edi,24
+	xor	esi,DWORD [1+edi*8+ebp]
+	mov	edi,DWORD [20+esp]
+	and	edx,255
+	mov	edx,DWORD [edx*8+ebp]
+	movzx	ecx,ch
+	xor	edx,DWORD [3+ecx*8+ebp]
+	mov	ecx,esi
+	shr	ebx,16
+	and	ebx,255
+	xor	edx,DWORD [2+ebx*8+ebp]
+	mov	ebx,DWORD [8+esp]
+	shr	eax,24
+	xor	edx,DWORD [1+eax*8+ebp]
+	mov	eax,DWORD [4+esp]
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	cmp	edi,DWORD [24+esp]
+	mov	DWORD [20+esp],edi
+	jb	NEAR L$009loop
+	lea	ebp,[2176+ebp]
+	mov	edi,DWORD [ebp-128]
+	mov	esi,DWORD [ebp-96]
+	mov	edi,DWORD [ebp-64]
+	mov	esi,DWORD [ebp-32]
+	mov	edi,DWORD [ebp]
+	mov	esi,DWORD [32+ebp]
+	mov	edi,DWORD [64+ebp]
+	mov	esi,DWORD [96+ebp]
+	lea	ebp,[ebp-128]
+	mov	esi,eax
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp]
+	movzx	edi,dh
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ebx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [4+esp],esi
+	mov	esi,ebx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp]
+	movzx	edi,ah
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,ecx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,24
+	xor	esi,edi
+	mov	DWORD [8+esp],esi
+	mov	esi,ecx
+	and	esi,255
+	movzx	esi,BYTE [esi*1+ebp]
+	movzx	edi,bh
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,8
+	xor	esi,edi
+	mov	edi,eax
+	shr	edi,16
+	and	edi,255
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,16
+	xor	esi,edi
+	mov	edi,edx
+	shr	edi,24
+	movzx	edi,BYTE [edi*1+ebp]
+	shl	edi,24
+	xor	esi,edi
+	mov	edi,DWORD [20+esp]
+	and	edx,255
+	movzx	edx,BYTE [edx*1+ebp]
+	movzx	ecx,ch
+	movzx	ecx,BYTE [ecx*1+ebp]
+	shl	ecx,8
+	xor	edx,ecx
+	mov	ecx,esi
+	shr	ebx,16
+	and	ebx,255
+	movzx	ebx,BYTE [ebx*1+ebp]
+	shl	ebx,16
+	xor	edx,ebx
+	mov	ebx,DWORD [8+esp]
+	shr	eax,24
+	movzx	eax,BYTE [eax*1+ebp]
+	shl	eax,24
+	xor	edx,eax
+	mov	eax,DWORD [4+esp]
+	lea	ebp,[ebp-2048]
+	add	edi,16
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	ret
+align	64
+L$AES_Td:
+dd	1353184337,1353184337
+dd	1399144830,1399144830
+dd	3282310938,3282310938
+dd	2522752826,2522752826
+dd	3412831035,3412831035
+dd	4047871263,4047871263
+dd	2874735276,2874735276
+dd	2466505547,2466505547
+dd	1442459680,1442459680
+dd	4134368941,4134368941
+dd	2440481928,2440481928
+dd	625738485,625738485
+dd	4242007375,4242007375
+dd	3620416197,3620416197
+dd	2151953702,2151953702
+dd	2409849525,2409849525
+dd	1230680542,1230680542
+dd	1729870373,1729870373
+dd	2551114309,2551114309
+dd	3787521629,3787521629
+dd	41234371,41234371
+dd	317738113,317738113
+dd	2744600205,2744600205
+dd	3338261355,3338261355
+dd	3881799427,3881799427
+dd	2510066197,2510066197
+dd	3950669247,3950669247
+dd	3663286933,3663286933
+dd	763608788,763608788
+dd	3542185048,3542185048
+dd	694804553,694804553
+dd	1154009486,1154009486
+dd	1787413109,1787413109
+dd	2021232372,2021232372
+dd	1799248025,1799248025
+dd	3715217703,3715217703
+dd	3058688446,3058688446
+dd	397248752,397248752
+dd	1722556617,1722556617
+dd	3023752829,3023752829
+dd	407560035,407560035
+dd	2184256229,2184256229
+dd	1613975959,1613975959
+dd	1165972322,1165972322
+dd	3765920945,3765920945
+dd	2226023355,2226023355
+dd	480281086,480281086
+dd	2485848313,2485848313
+dd	1483229296,1483229296
+dd	436028815,436028815
+dd	2272059028,2272059028
+dd	3086515026,3086515026
+dd	601060267,601060267
+dd	3791801202,3791801202
+dd	1468997603,1468997603
+dd	715871590,715871590
+dd	120122290,120122290
+dd	63092015,63092015
+dd	2591802758,2591802758
+dd	2768779219,2768779219
+dd	4068943920,4068943920
+dd	2997206819,2997206819
+dd	3127509762,3127509762
+dd	1552029421,1552029421
+dd	723308426,723308426
+dd	2461301159,2461301159
+dd	4042393587,4042393587
+dd	2715969870,2715969870
+dd	3455375973,3455375973
+dd	3586000134,3586000134
+dd	526529745,526529745
+dd	2331944644,2331944644
+dd	2639474228,2639474228
+dd	2689987490,2689987490
+dd	853641733,853641733
+dd	1978398372,1978398372
+dd	971801355,971801355
+dd	2867814464,2867814464
+dd	111112542,111112542
+dd	1360031421,1360031421
+dd	4186579262,4186579262
+dd	1023860118,1023860118
+dd	2919579357,2919579357
+dd	1186850381,1186850381
+dd	3045938321,3045938321
+dd	90031217,90031217
+dd	1876166148,1876166148
+dd	4279586912,4279586912
+dd	620468249,620468249
+dd	2548678102,2548678102
+dd	3426959497,3426959497
+dd	2006899047,2006899047
+dd	3175278768,3175278768
+dd	2290845959,2290845959
+dd	945494503,945494503
+dd	3689859193,3689859193
+dd	1191869601,1191869601
+dd	3910091388,3910091388
+dd	3374220536,3374220536
+dd	0,0
+dd	2206629897,2206629897
+dd	1223502642,1223502642
+dd	2893025566,2893025566
+dd	1316117100,1316117100
+dd	4227796733,4227796733
+dd	1446544655,1446544655
+dd	517320253,517320253
+dd	658058550,658058550
+dd	1691946762,1691946762
+dd	564550760,564550760
+dd	3511966619,3511966619
+dd	976107044,976107044
+dd	2976320012,2976320012
+dd	266819475,266819475
+dd	3533106868,3533106868
+dd	2660342555,2660342555
+dd	1338359936,1338359936
+dd	2720062561,2720062561
+dd	1766553434,1766553434
+dd	370807324,370807324
+dd	179999714,179999714
+dd	3844776128,3844776128
+dd	1138762300,1138762300
+dd	488053522,488053522
+dd	185403662,185403662
+dd	2915535858,2915535858
+dd	3114841645,3114841645
+dd	3366526484,3366526484
+dd	2233069911,2233069911
+dd	1275557295,1275557295
+dd	3151862254,3151862254
+dd	4250959779,4250959779
+dd	2670068215,2670068215
+dd	3170202204,3170202204
+dd	3309004356,3309004356
+dd	880737115,880737115
+dd	1982415755,1982415755
+dd	3703972811,3703972811
+dd	1761406390,1761406390
+dd	1676797112,1676797112
+dd	3403428311,3403428311
+dd	277177154,277177154
+dd	1076008723,1076008723
+dd	538035844,538035844
+dd	2099530373,2099530373
+dd	4164795346,4164795346
+dd	288553390,288553390
+dd	1839278535,1839278535
+dd	1261411869,1261411869
+dd	4080055004,4080055004
+dd	3964831245,3964831245
+dd	3504587127,3504587127
+dd	1813426987,1813426987
+dd	2579067049,2579067049
+dd	4199060497,4199060497
+dd	577038663,577038663
+dd	3297574056,3297574056
+dd	440397984,440397984
+dd	3626794326,3626794326
+dd	4019204898,4019204898
+dd	3343796615,3343796615
+dd	3251714265,3251714265
+dd	4272081548,4272081548
+dd	906744984,906744984
+dd	3481400742,3481400742
+dd	685669029,685669029
+dd	646887386,646887386
+dd	2764025151,2764025151
+dd	3835509292,3835509292
+dd	227702864,227702864
+dd	2613862250,2613862250
+dd	1648787028,1648787028
+dd	3256061430,3256061430
+dd	3904428176,3904428176
+dd	1593260334,1593260334
+dd	4121936770,4121936770
+dd	3196083615,3196083615
+dd	2090061929,2090061929
+dd	2838353263,2838353263
+dd	3004310991,3004310991
+dd	999926984,999926984
+dd	2809993232,2809993232
+dd	1852021992,1852021992
+dd	2075868123,2075868123
+dd	158869197,158869197
+dd	4095236462,4095236462
+dd	28809964,28809964
+dd	2828685187,2828685187
+dd	1701746150,1701746150
+dd	2129067946,2129067946
+dd	147831841,147831841
+dd	3873969647,3873969647
+dd	3650873274,3650873274
+dd	3459673930,3459673930
+dd	3557400554,3557400554
+dd	3598495785,3598495785
+dd	2947720241,2947720241
+dd	824393514,824393514
+dd	815048134,815048134
+dd	3227951669,3227951669
+dd	935087732,935087732
+dd	2798289660,2798289660
+dd	2966458592,2966458592
+dd	366520115,366520115
+dd	1251476721,1251476721
+dd	4158319681,4158319681
+dd	240176511,240176511
+dd	804688151,804688151
+dd	2379631990,2379631990
+dd	1303441219,1303441219
+dd	1414376140,1414376140
+dd	3741619940,3741619940
+dd	3820343710,3820343710
+dd	461924940,461924940
+dd	3089050817,3089050817
+dd	2136040774,2136040774
+dd	82468509,82468509
+dd	1563790337,1563790337
+dd	1937016826,1937016826
+dd	776014843,776014843
+dd	1511876531,1511876531
+dd	1389550482,1389550482
+dd	861278441,861278441
+dd	323475053,323475053
+dd	2355222426,2355222426
+dd	2047648055,2047648055
+dd	2383738969,2383738969
+dd	2302415851,2302415851
+dd	3995576782,3995576782
+dd	902390199,902390199
+dd	3991215329,3991215329
+dd	1018251130,1018251130
+dd	1507840668,1507840668
+dd	1064563285,1064563285
+dd	2043548696,2043548696
+dd	3208103795,3208103795
+dd	3939366739,3939366739
+dd	1537932639,1537932639
+dd	342834655,342834655
+dd	2262516856,2262516856
+dd	2180231114,2180231114
+dd	1053059257,1053059257
+dd	741614648,741614648
+dd	1598071746,1598071746
+dd	1925389590,1925389590
+dd	203809468,203809468
+dd	2336832552,2336832552
+dd	1100287487,1100287487
+dd	1895934009,1895934009
+dd	3736275976,3736275976
+dd	2632234200,2632234200
+dd	2428589668,2428589668
+dd	1636092795,1636092795
+dd	1890988757,1890988757
+dd	1952214088,1952214088
+dd	1113045200,1113045200
+db	82,9,106,213,48,54,165,56
+db	191,64,163,158,129,243,215,251
+db	124,227,57,130,155,47,255,135
+db	52,142,67,68,196,222,233,203
+db	84,123,148,50,166,194,35,61
+db	238,76,149,11,66,250,195,78
+db	8,46,161,102,40,217,36,178
+db	118,91,162,73,109,139,209,37
+db	114,248,246,100,134,104,152,22
+db	212,164,92,204,93,101,182,146
+db	108,112,72,80,253,237,185,218
+db	94,21,70,87,167,141,157,132
+db	144,216,171,0,140,188,211,10
+db	247,228,88,5,184,179,69,6
+db	208,44,30,143,202,63,15,2
+db	193,175,189,3,1,19,138,107
+db	58,145,17,65,79,103,220,234
+db	151,242,207,206,240,180,230,115
+db	150,172,116,34,231,173,53,133
+db	226,249,55,232,28,117,223,110
+db	71,241,26,113,29,41,197,137
+db	111,183,98,14,170,24,190,27
+db	252,86,62,75,198,210,121,32
+db	154,219,192,254,120,205,90,244
+db	31,221,168,51,136,7,199,49
+db	177,18,16,89,39,128,236,95
+db	96,81,127,169,25,181,74,13
+db	45,229,122,159,147,201,156,239
+db	160,224,59,77,174,42,245,176
+db	200,235,187,60,131,83,153,97
+db	23,43,4,126,186,119,214,38
+db	225,105,20,99,85,33,12,125
+db	82,9,106,213,48,54,165,56
+db	191,64,163,158,129,243,215,251
+db	124,227,57,130,155,47,255,135
+db	52,142,67,68,196,222,233,203
+db	84,123,148,50,166,194,35,61
+db	238,76,149,11,66,250,195,78
+db	8,46,161,102,40,217,36,178
+db	118,91,162,73,109,139,209,37
+db	114,248,246,100,134,104,152,22
+db	212,164,92,204,93,101,182,146
+db	108,112,72,80,253,237,185,218
+db	94,21,70,87,167,141,157,132
+db	144,216,171,0,140,188,211,10
+db	247,228,88,5,184,179,69,6
+db	208,44,30,143,202,63,15,2
+db	193,175,189,3,1,19,138,107
+db	58,145,17,65,79,103,220,234
+db	151,242,207,206,240,180,230,115
+db	150,172,116,34,231,173,53,133
+db	226,249,55,232,28,117,223,110
+db	71,241,26,113,29,41,197,137
+db	111,183,98,14,170,24,190,27
+db	252,86,62,75,198,210,121,32
+db	154,219,192,254,120,205,90,244
+db	31,221,168,51,136,7,199,49
+db	177,18,16,89,39,128,236,95
+db	96,81,127,169,25,181,74,13
+db	45,229,122,159,147,201,156,239
+db	160,224,59,77,174,42,245,176
+db	200,235,187,60,131,83,153,97
+db	23,43,4,126,186,119,214,38
+db	225,105,20,99,85,33,12,125
+db	82,9,106,213,48,54,165,56
+db	191,64,163,158,129,243,215,251
+db	124,227,57,130,155,47,255,135
+db	52,142,67,68,196,222,233,203
+db	84,123,148,50,166,194,35,61
+db	238,76,149,11,66,250,195,78
+db	8,46,161,102,40,217,36,178
+db	118,91,162,73,109,139,209,37
+db	114,248,246,100,134,104,152,22
+db	212,164,92,204,93,101,182,146
+db	108,112,72,80,253,237,185,218
+db	94,21,70,87,167,141,157,132
+db	144,216,171,0,140,188,211,10
+db	247,228,88,5,184,179,69,6
+db	208,44,30,143,202,63,15,2
+db	193,175,189,3,1,19,138,107
+db	58,145,17,65,79,103,220,234
+db	151,242,207,206,240,180,230,115
+db	150,172,116,34,231,173,53,133
+db	226,249,55,232,28,117,223,110
+db	71,241,26,113,29,41,197,137
+db	111,183,98,14,170,24,190,27
+db	252,86,62,75,198,210,121,32
+db	154,219,192,254,120,205,90,244
+db	31,221,168,51,136,7,199,49
+db	177,18,16,89,39,128,236,95
+db	96,81,127,169,25,181,74,13
+db	45,229,122,159,147,201,156,239
+db	160,224,59,77,174,42,245,176
+db	200,235,187,60,131,83,153,97
+db	23,43,4,126,186,119,214,38
+db	225,105,20,99,85,33,12,125
+db	82,9,106,213,48,54,165,56
+db	191,64,163,158,129,243,215,251
+db	124,227,57,130,155,47,255,135
+db	52,142,67,68,196,222,233,203
+db	84,123,148,50,166,194,35,61
+db	238,76,149,11,66,250,195,78
+db	8,46,161,102,40,217,36,178
+db	118,91,162,73,109,139,209,37
+db	114,248,246,100,134,104,152,22
+db	212,164,92,204,93,101,182,146
+db	108,112,72,80,253,237,185,218
+db	94,21,70,87,167,141,157,132
+db	144,216,171,0,140,188,211,10
+db	247,228,88,5,184,179,69,6
+db	208,44,30,143,202,63,15,2
+db	193,175,189,3,1,19,138,107
+db	58,145,17,65,79,103,220,234
+db	151,242,207,206,240,180,230,115
+db	150,172,116,34,231,173,53,133
+db	226,249,55,232,28,117,223,110
+db	71,241,26,113,29,41,197,137
+db	111,183,98,14,170,24,190,27
+db	252,86,62,75,198,210,121,32
+db	154,219,192,254,120,205,90,244
+db	31,221,168,51,136,7,199,49
+db	177,18,16,89,39,128,236,95
+db	96,81,127,169,25,181,74,13
+db	45,229,122,159,147,201,156,239
+db	160,224,59,77,174,42,245,176
+db	200,235,187,60,131,83,153,97
+db	23,43,4,126,186,119,214,38
+db	225,105,20,99,85,33,12,125
+global	_aes_nohw_decrypt
+align	16
+_aes_nohw_decrypt:
+L$_aes_nohw_decrypt_begin:
+	push	ebp
+	push	ebx
+	push	esi
+	push	edi
+	mov	esi,DWORD [20+esp]
+	mov	edi,DWORD [28+esp]
+	mov	eax,esp
+	sub	esp,36
+	and	esp,-64
+	lea	ebx,[edi-127]
+	sub	ebx,esp
+	neg	ebx
+	and	ebx,960
+	sub	esp,ebx
+	add	esp,4
+	mov	DWORD [28+esp],eax
+	call	L$010pic_point
+L$010pic_point:
+	pop	ebp
+	lea	eax,[_OPENSSL_ia32cap_P]
+	lea	ebp,[(L$AES_Td-L$010pic_point)+ebp]
+	lea	ebx,[764+esp]
+	sub	ebx,ebp
+	and	ebx,768
+	lea	ebp,[2176+ebx*1+ebp]
+	bt	DWORD [eax],25
+	jnc	NEAR L$011x86
+	movq	mm0,[esi]
+	movq	mm4,[8+esi]
+	call	__sse_AES_decrypt_compact
+	mov	esp,DWORD [28+esp]
+	mov	esi,DWORD [24+esp]
+	movq	[esi],mm0
+	movq	[8+esi],mm4
+	emms
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+align	16
+L$011x86:
+	mov	DWORD [24+esp],ebp
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	call	__x86_AES_decrypt_compact
+	mov	esp,DWORD [28+esp]
+	mov	esi,DWORD [24+esp]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+global	_aes_nohw_cbc_encrypt
+align	16
+_aes_nohw_cbc_encrypt:
+L$_aes_nohw_cbc_encrypt_begin:
+	push	ebp
+	push	ebx
+	push	esi
+	push	edi
+	mov	ecx,DWORD [28+esp]
+	cmp	ecx,0
+	je	NEAR L$012drop_out
+	call	L$013pic_point
+L$013pic_point:
+	pop	ebp
+	lea	eax,[_OPENSSL_ia32cap_P]
+	cmp	DWORD [40+esp],0
+	lea	ebp,[(L$AES_Te-L$013pic_point)+ebp]
+	jne	NEAR L$014picked_te
+	lea	ebp,[(L$AES_Td-L$AES_Te)+ebp]
+L$014picked_te:
+	pushfd
+	cld
+	cmp	ecx,512
+	jb	NEAR L$015slow_way
+	test	ecx,15
+	jnz	NEAR L$015slow_way
+	bt	DWORD [eax],28
+	jc	NEAR L$015slow_way
+	lea	esi,[esp-324]
+	and	esi,-64
+	mov	eax,ebp
+	lea	ebx,[2304+ebp]
+	mov	edx,esi
+	and	eax,4095
+	and	ebx,4095
+	and	edx,4095
+	cmp	edx,ebx
+	jb	NEAR L$016tbl_break_out
+	sub	edx,ebx
+	sub	esi,edx
+	jmp	NEAR L$017tbl_ok
+align	4
+L$016tbl_break_out:
+	sub	edx,eax
+	and	edx,4095
+	add	edx,384
+	sub	esi,edx
+align	4
+L$017tbl_ok:
+	lea	edx,[24+esp]
+	xchg	esp,esi
+	add	esp,4
+	mov	DWORD [24+esp],ebp
+	mov	DWORD [28+esp],esi
+	mov	eax,DWORD [edx]
+	mov	ebx,DWORD [4+edx]
+	mov	edi,DWORD [12+edx]
+	mov	esi,DWORD [16+edx]
+	mov	edx,DWORD [20+edx]
+	mov	DWORD [32+esp],eax
+	mov	DWORD [36+esp],ebx
+	mov	DWORD [40+esp],ecx
+	mov	DWORD [44+esp],edi
+	mov	DWORD [48+esp],esi
+	mov	DWORD [316+esp],0
+	mov	ebx,edi
+	mov	ecx,61
+	sub	ebx,ebp
+	mov	esi,edi
+	and	ebx,4095
+	lea	edi,[76+esp]
+	cmp	ebx,2304
+	jb	NEAR L$018do_copy
+	cmp	ebx,3852
+	jb	NEAR L$019skip_copy
+align	4
+L$018do_copy:
+	mov	DWORD [44+esp],edi
+dd	2784229001
+L$019skip_copy:
+	mov	edi,16
+align	4
+L$020prefetch_tbl:
+	mov	eax,DWORD [ebp]
+	mov	ebx,DWORD [32+ebp]
+	mov	ecx,DWORD [64+ebp]
+	mov	esi,DWORD [96+ebp]
+	lea	ebp,[128+ebp]
+	sub	edi,1
+	jnz	NEAR L$020prefetch_tbl
+	sub	ebp,2048
+	mov	esi,DWORD [32+esp]
+	mov	edi,DWORD [48+esp]
+	cmp	edx,0
+	je	NEAR L$021fast_decrypt
+	mov	eax,DWORD [edi]
+	mov	ebx,DWORD [4+edi]
+align	16
+L$022fast_enc_loop:
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	xor	eax,DWORD [esi]
+	xor	ebx,DWORD [4+esi]
+	xor	ecx,DWORD [8+esi]
+	xor	edx,DWORD [12+esi]
+	mov	edi,DWORD [44+esp]
+	call	__x86_AES_encrypt
+	mov	esi,DWORD [32+esp]
+	mov	edi,DWORD [36+esp]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	lea	esi,[16+esi]
+	mov	ecx,DWORD [40+esp]
+	mov	DWORD [32+esp],esi
+	lea	edx,[16+edi]
+	mov	DWORD [36+esp],edx
+	sub	ecx,16
+	mov	DWORD [40+esp],ecx
+	jnz	NEAR L$022fast_enc_loop
+	mov	esi,DWORD [48+esp]
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	cmp	DWORD [316+esp],0
+	mov	edi,DWORD [44+esp]
+	je	NEAR L$023skip_ezero
+	mov	ecx,60
+	xor	eax,eax
+align	4
+dd	2884892297
+L$023skip_ezero:
+	mov	esp,DWORD [28+esp]
+	popfd
+L$012drop_out:
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$021fast_decrypt:
+	cmp	esi,DWORD [36+esp]
+	je	NEAR L$024fast_dec_in_place
+	mov	DWORD [52+esp],edi
+align	4
+align	16
+L$025fast_dec_loop:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	edi,DWORD [44+esp]
+	call	__x86_AES_decrypt
+	mov	edi,DWORD [52+esp]
+	mov	esi,DWORD [40+esp]
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	edi,DWORD [36+esp]
+	mov	esi,DWORD [32+esp]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	ecx,DWORD [40+esp]
+	mov	DWORD [52+esp],esi
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	lea	edi,[16+edi]
+	mov	DWORD [36+esp],edi
+	sub	ecx,16
+	mov	DWORD [40+esp],ecx
+	jnz	NEAR L$025fast_dec_loop
+	mov	edi,DWORD [52+esp]
+	mov	esi,DWORD [48+esp]
+	mov	eax,DWORD [edi]
+	mov	ebx,DWORD [4+edi]
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	jmp	NEAR L$026fast_dec_out
+align	16
+L$024fast_dec_in_place:
+L$027fast_dec_in_place_loop:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	lea	edi,[60+esp]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	edi,DWORD [44+esp]
+	call	__x86_AES_decrypt
+	mov	edi,DWORD [48+esp]
+	mov	esi,DWORD [36+esp]
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	lea	esi,[16+esi]
+	mov	DWORD [36+esp],esi
+	lea	esi,[60+esp]
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	esi,DWORD [32+esp]
+	mov	ecx,DWORD [40+esp]
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	sub	ecx,16
+	mov	DWORD [40+esp],ecx
+	jnz	NEAR L$027fast_dec_in_place_loop
+align	4
+L$026fast_dec_out:
+	cmp	DWORD [316+esp],0
+	mov	edi,DWORD [44+esp]
+	je	NEAR L$028skip_dzero
+	mov	ecx,60
+	xor	eax,eax
+align	4
+dd	2884892297
+L$028skip_dzero:
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$015slow_way:
+	mov	eax,DWORD [eax]
+	mov	edi,DWORD [36+esp]
+	lea	esi,[esp-80]
+	and	esi,-64
+	lea	ebx,[edi-143]
+	sub	ebx,esi
+	neg	ebx
+	and	ebx,960
+	sub	esi,ebx
+	lea	ebx,[768+esi]
+	sub	ebx,ebp
+	and	ebx,768
+	lea	ebp,[2176+ebx*1+ebp]
+	lea	edx,[24+esp]
+	xchg	esp,esi
+	add	esp,4
+	mov	DWORD [24+esp],ebp
+	mov	DWORD [28+esp],esi
+	mov	DWORD [52+esp],eax
+	mov	eax,DWORD [edx]
+	mov	ebx,DWORD [4+edx]
+	mov	esi,DWORD [16+edx]
+	mov	edx,DWORD [20+edx]
+	mov	DWORD [32+esp],eax
+	mov	DWORD [36+esp],ebx
+	mov	DWORD [40+esp],ecx
+	mov	DWORD [44+esp],edi
+	mov	DWORD [48+esp],esi
+	mov	edi,esi
+	mov	esi,eax
+	cmp	edx,0
+	je	NEAR L$029slow_decrypt
+	cmp	ecx,16
+	mov	edx,ebx
+	jb	NEAR L$030slow_enc_tail
+	bt	DWORD [52+esp],25
+	jnc	NEAR L$031slow_enc_x86
+	movq	mm0,[edi]
+	movq	mm4,[8+edi]
+align	16
+L$032slow_enc_loop_sse:
+	pxor	mm0,[esi]
+	pxor	mm4,[8+esi]
+	mov	edi,DWORD [44+esp]
+	call	__sse_AES_encrypt_compact
+	mov	esi,DWORD [32+esp]
+	mov	edi,DWORD [36+esp]
+	mov	ecx,DWORD [40+esp]
+	movq	[edi],mm0
+	movq	[8+edi],mm4
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	lea	edx,[16+edi]
+	mov	DWORD [36+esp],edx
+	sub	ecx,16
+	cmp	ecx,16
+	mov	DWORD [40+esp],ecx
+	jae	NEAR L$032slow_enc_loop_sse
+	test	ecx,15
+	jnz	NEAR L$030slow_enc_tail
+	mov	esi,DWORD [48+esp]
+	movq	[esi],mm0
+	movq	[8+esi],mm4
+	emms
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$031slow_enc_x86:
+	mov	eax,DWORD [edi]
+	mov	ebx,DWORD [4+edi]
+align	4
+L$033slow_enc_loop_x86:
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	xor	eax,DWORD [esi]
+	xor	ebx,DWORD [4+esi]
+	xor	ecx,DWORD [8+esi]
+	xor	edx,DWORD [12+esi]
+	mov	edi,DWORD [44+esp]
+	call	__x86_AES_encrypt_compact
+	mov	esi,DWORD [32+esp]
+	mov	edi,DWORD [36+esp]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	ecx,DWORD [40+esp]
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	lea	edx,[16+edi]
+	mov	DWORD [36+esp],edx
+	sub	ecx,16
+	cmp	ecx,16
+	mov	DWORD [40+esp],ecx
+	jae	NEAR L$033slow_enc_loop_x86
+	test	ecx,15
+	jnz	NEAR L$030slow_enc_tail
+	mov	esi,DWORD [48+esp]
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$030slow_enc_tail:
+	emms
+	mov	edi,edx
+	mov	ebx,16
+	sub	ebx,ecx
+	cmp	edi,esi
+	je	NEAR L$034enc_in_place
+align	4
+dd	2767451785
+	jmp	NEAR L$035enc_skip_in_place
+L$034enc_in_place:
+	lea	edi,[ecx*1+edi]
+L$035enc_skip_in_place:
+	mov	ecx,ebx
+	xor	eax,eax
+align	4
+dd	2868115081
+	mov	edi,DWORD [48+esp]
+	mov	esi,edx
+	mov	eax,DWORD [edi]
+	mov	ebx,DWORD [4+edi]
+	mov	DWORD [40+esp],16
+	jmp	NEAR L$033slow_enc_loop_x86
+align	16
+L$029slow_decrypt:
+	bt	DWORD [52+esp],25
+	jnc	NEAR L$036slow_dec_loop_x86
+align	4
+L$037slow_dec_loop_sse:
+	movq	mm0,[esi]
+	movq	mm4,[8+esi]
+	mov	edi,DWORD [44+esp]
+	call	__sse_AES_decrypt_compact
+	mov	esi,DWORD [32+esp]
+	lea	eax,[60+esp]
+	mov	ebx,DWORD [36+esp]
+	mov	ecx,DWORD [40+esp]
+	mov	edi,DWORD [48+esp]
+	movq	mm1,[esi]
+	movq	mm5,[8+esi]
+	pxor	mm0,[edi]
+	pxor	mm4,[8+edi]
+	movq	[edi],mm1
+	movq	[8+edi],mm5
+	sub	ecx,16
+	jc	NEAR L$038slow_dec_partial_sse
+	movq	[ebx],mm0
+	movq	[8+ebx],mm4
+	lea	ebx,[16+ebx]
+	mov	DWORD [36+esp],ebx
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	mov	DWORD [40+esp],ecx
+	jnz	NEAR L$037slow_dec_loop_sse
+	emms
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$038slow_dec_partial_sse:
+	movq	[eax],mm0
+	movq	[8+eax],mm4
+	emms
+	add	ecx,16
+	mov	edi,ebx
+	mov	esi,eax
+align	4
+dd	2767451785
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$036slow_dec_loop_x86:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	lea	edi,[60+esp]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	edi,DWORD [44+esp]
+	call	__x86_AES_decrypt_compact
+	mov	edi,DWORD [48+esp]
+	mov	esi,DWORD [40+esp]
+	xor	eax,DWORD [edi]
+	xor	ebx,DWORD [4+edi]
+	xor	ecx,DWORD [8+edi]
+	xor	edx,DWORD [12+edi]
+	sub	esi,16
+	jc	NEAR L$039slow_dec_partial_x86
+	mov	DWORD [40+esp],esi
+	mov	esi,DWORD [36+esp]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	lea	esi,[16+esi]
+	mov	DWORD [36+esp],esi
+	lea	esi,[60+esp]
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	esi,DWORD [32+esp]
+	lea	esi,[16+esi]
+	mov	DWORD [32+esp],esi
+	jnz	NEAR L$036slow_dec_loop_x86
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+	pushfd
+align	16
+L$039slow_dec_partial_x86:
+	lea	esi,[60+esp]
+	mov	DWORD [esi],eax
+	mov	DWORD [4+esi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	mov	esi,DWORD [32+esp]
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	ecx,DWORD [40+esp]
+	mov	edi,DWORD [36+esp]
+	lea	esi,[60+esp]
+align	4
+dd	2767451785
+	mov	esp,DWORD [28+esp]
+	popfd
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+align	16
+__x86_AES_set_encrypt_key:
+	push	ebp
+	push	ebx
+	push	esi
+	push	edi
+	mov	esi,DWORD [24+esp]
+	mov	edi,DWORD [32+esp]
+	test	esi,-1
+	jz	NEAR L$040badpointer
+	test	edi,-1
+	jz	NEAR L$040badpointer
+	call	L$041pic_point
+L$041pic_point:
+	pop	ebp
+	lea	ebp,[(L$AES_Te-L$041pic_point)+ebp]
+	lea	ebp,[2176+ebp]
+	mov	eax,DWORD [ebp-128]
+	mov	ebx,DWORD [ebp-96]
+	mov	ecx,DWORD [ebp-64]
+	mov	edx,DWORD [ebp-32]
+	mov	eax,DWORD [ebp]
+	mov	ebx,DWORD [32+ebp]
+	mov	ecx,DWORD [64+ebp]
+	mov	edx,DWORD [96+ebp]
+	mov	ecx,DWORD [28+esp]
+	cmp	ecx,128
+	je	NEAR L$04210rounds
+	cmp	ecx,192
+	je	NEAR L$04312rounds
+	cmp	ecx,256
+	je	NEAR L$04414rounds
+	mov	eax,-2
+	jmp	NEAR L$045exit
+L$04210rounds:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	xor	ecx,ecx
+	jmp	NEAR L$04610shortcut
+align	4
+L$04710loop:
+	mov	eax,DWORD [edi]
+	mov	edx,DWORD [12+edi]
+L$04610shortcut:
+	movzx	esi,dl
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,24
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shr	edx,16
+	movzx	esi,dl
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,8
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shl	ebx,16
+	xor	eax,ebx
+	xor	eax,DWORD [896+ecx*4+ebp]
+	mov	DWORD [16+edi],eax
+	xor	eax,DWORD [4+edi]
+	mov	DWORD [20+edi],eax
+	xor	eax,DWORD [8+edi]
+	mov	DWORD [24+edi],eax
+	xor	eax,DWORD [12+edi]
+	mov	DWORD [28+edi],eax
+	inc	ecx
+	add	edi,16
+	cmp	ecx,10
+	jl	NEAR L$04710loop
+	mov	DWORD [80+edi],10
+	xor	eax,eax
+	jmp	NEAR L$045exit
+L$04312rounds:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	ecx,DWORD [16+esi]
+	mov	edx,DWORD [20+esi]
+	mov	DWORD [16+edi],ecx
+	mov	DWORD [20+edi],edx
+	xor	ecx,ecx
+	jmp	NEAR L$04812shortcut
+align	4
+L$04912loop:
+	mov	eax,DWORD [edi]
+	mov	edx,DWORD [20+edi]
+L$04812shortcut:
+	movzx	esi,dl
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,24
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shr	edx,16
+	movzx	esi,dl
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,8
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shl	ebx,16
+	xor	eax,ebx
+	xor	eax,DWORD [896+ecx*4+ebp]
+	mov	DWORD [24+edi],eax
+	xor	eax,DWORD [4+edi]
+	mov	DWORD [28+edi],eax
+	xor	eax,DWORD [8+edi]
+	mov	DWORD [32+edi],eax
+	xor	eax,DWORD [12+edi]
+	mov	DWORD [36+edi],eax
+	cmp	ecx,7
+	je	NEAR L$05012break
+	inc	ecx
+	xor	eax,DWORD [16+edi]
+	mov	DWORD [40+edi],eax
+	xor	eax,DWORD [20+edi]
+	mov	DWORD [44+edi],eax
+	add	edi,24
+	jmp	NEAR L$04912loop
+L$05012break:
+	mov	DWORD [72+edi],12
+	xor	eax,eax
+	jmp	NEAR L$045exit
+L$04414rounds:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [8+esi]
+	mov	edx,DWORD [12+esi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [8+edi],ecx
+	mov	DWORD [12+edi],edx
+	mov	eax,DWORD [16+esi]
+	mov	ebx,DWORD [20+esi]
+	mov	ecx,DWORD [24+esi]
+	mov	edx,DWORD [28+esi]
+	mov	DWORD [16+edi],eax
+	mov	DWORD [20+edi],ebx
+	mov	DWORD [24+edi],ecx
+	mov	DWORD [28+edi],edx
+	xor	ecx,ecx
+	jmp	NEAR L$05114shortcut
+align	4
+L$05214loop:
+	mov	edx,DWORD [28+edi]
+L$05114shortcut:
+	mov	eax,DWORD [edi]
+	movzx	esi,dl
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,24
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shr	edx,16
+	movzx	esi,dl
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,8
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shl	ebx,16
+	xor	eax,ebx
+	xor	eax,DWORD [896+ecx*4+ebp]
+	mov	DWORD [32+edi],eax
+	xor	eax,DWORD [4+edi]
+	mov	DWORD [36+edi],eax
+	xor	eax,DWORD [8+edi]
+	mov	DWORD [40+edi],eax
+	xor	eax,DWORD [12+edi]
+	mov	DWORD [44+edi],eax
+	cmp	ecx,6
+	je	NEAR L$05314break
+	inc	ecx
+	mov	edx,eax
+	mov	eax,DWORD [16+edi]
+	movzx	esi,dl
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shr	edx,16
+	shl	ebx,8
+	movzx	esi,dl
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	movzx	esi,dh
+	shl	ebx,16
+	xor	eax,ebx
+	movzx	ebx,BYTE [esi*1+ebp-128]
+	shl	ebx,24
+	xor	eax,ebx
+	mov	DWORD [48+edi],eax
+	xor	eax,DWORD [20+edi]
+	mov	DWORD [52+edi],eax
+	xor	eax,DWORD [24+edi]
+	mov	DWORD [56+edi],eax
+	xor	eax,DWORD [28+edi]
+	mov	DWORD [60+edi],eax
+	add	edi,32
+	jmp	NEAR L$05214loop
+L$05314break:
+	mov	DWORD [48+edi],14
+	xor	eax,eax
+	jmp	NEAR L$045exit
+L$040badpointer:
+	mov	eax,-1
+L$045exit:
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+global	_aes_nohw_set_encrypt_key
+align	16
+_aes_nohw_set_encrypt_key:
+L$_aes_nohw_set_encrypt_key_begin:
+	call	__x86_AES_set_encrypt_key
+	ret
+global	_aes_nohw_set_decrypt_key
+align	16
+_aes_nohw_set_decrypt_key:
+L$_aes_nohw_set_decrypt_key_begin:
+	call	__x86_AES_set_encrypt_key
+	cmp	eax,0
+	je	NEAR L$054proceed
+	ret
+L$054proceed:
+	push	ebp
+	push	ebx
+	push	esi
+	push	edi
+	mov	esi,DWORD [28+esp]
+	mov	ecx,DWORD [240+esi]
+	lea	ecx,[ecx*4]
+	lea	edi,[ecx*4+esi]
+align	4
+L$055invert:
+	mov	eax,DWORD [esi]
+	mov	ebx,DWORD [4+esi]
+	mov	ecx,DWORD [edi]
+	mov	edx,DWORD [4+edi]
+	mov	DWORD [edi],eax
+	mov	DWORD [4+edi],ebx
+	mov	DWORD [esi],ecx
+	mov	DWORD [4+esi],edx
+	mov	eax,DWORD [8+esi]
+	mov	ebx,DWORD [12+esi]
+	mov	ecx,DWORD [8+edi]
+	mov	edx,DWORD [12+edi]
+	mov	DWORD [8+edi],eax
+	mov	DWORD [12+edi],ebx
+	mov	DWORD [8+esi],ecx
+	mov	DWORD [12+esi],edx
+	add	esi,16
+	sub	edi,16
+	cmp	esi,edi
+	jne	NEAR L$055invert
+	mov	edi,DWORD [28+esp]
+	mov	esi,DWORD [240+edi]
+	lea	esi,[esi*1+esi-2]
+	lea	esi,[esi*8+edi]
+	mov	DWORD [28+esp],esi
+	mov	eax,DWORD [16+edi]
+align	4
+L$056permute:
+	add	edi,16
+	mov	ebp,2155905152
+	and	ebp,eax
+	lea	ebx,[eax*1+eax]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	ebx,4278124286
+	and	esi,454761243
+	xor	ebx,esi
+	mov	ebp,2155905152
+	and	ebp,ebx
+	lea	ecx,[ebx*1+ebx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	ecx,4278124286
+	and	esi,454761243
+	xor	ebx,eax
+	xor	ecx,esi
+	mov	ebp,2155905152
+	and	ebp,ecx
+	lea	edx,[ecx*1+ecx]
+	mov	esi,ebp
+	shr	ebp,7
+	xor	ecx,eax
+	sub	esi,ebp
+	and	edx,4278124286
+	and	esi,454761243
+	rol	eax,8
+	xor	edx,esi
+	mov	ebp,DWORD [4+edi]
+	xor	eax,ebx
+	xor	ebx,edx
+	xor	eax,ecx
+	rol	ebx,24
+	xor	ecx,edx
+	xor	eax,edx
+	rol	ecx,16
+	xor	eax,ebx
+	rol	edx,8
+	xor	eax,ecx
+	mov	ebx,ebp
+	xor	eax,edx
+	mov	DWORD [edi],eax
+	mov	ebp,2155905152
+	and	ebp,ebx
+	lea	ecx,[ebx*1+ebx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	ecx,4278124286
+	and	esi,454761243
+	xor	ecx,esi
+	mov	ebp,2155905152
+	and	ebp,ecx
+	lea	edx,[ecx*1+ecx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	edx,4278124286
+	and	esi,454761243
+	xor	ecx,ebx
+	xor	edx,esi
+	mov	ebp,2155905152
+	and	ebp,edx
+	lea	eax,[edx*1+edx]
+	mov	esi,ebp
+	shr	ebp,7
+	xor	edx,ebx
+	sub	esi,ebp
+	and	eax,4278124286
+	and	esi,454761243
+	rol	ebx,8
+	xor	eax,esi
+	mov	ebp,DWORD [8+edi]
+	xor	ebx,ecx
+	xor	ecx,eax
+	xor	ebx,edx
+	rol	ecx,24
+	xor	edx,eax
+	xor	ebx,eax
+	rol	edx,16
+	xor	ebx,ecx
+	rol	eax,8
+	xor	ebx,edx
+	mov	ecx,ebp
+	xor	ebx,eax
+	mov	DWORD [4+edi],ebx
+	mov	ebp,2155905152
+	and	ebp,ecx
+	lea	edx,[ecx*1+ecx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	edx,4278124286
+	and	esi,454761243
+	xor	edx,esi
+	mov	ebp,2155905152
+	and	ebp,edx
+	lea	eax,[edx*1+edx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	eax,4278124286
+	and	esi,454761243
+	xor	edx,ecx
+	xor	eax,esi
+	mov	ebp,2155905152
+	and	ebp,eax
+	lea	ebx,[eax*1+eax]
+	mov	esi,ebp
+	shr	ebp,7
+	xor	eax,ecx
+	sub	esi,ebp
+	and	ebx,4278124286
+	and	esi,454761243
+	rol	ecx,8
+	xor	ebx,esi
+	mov	ebp,DWORD [12+edi]
+	xor	ecx,edx
+	xor	edx,ebx
+	xor	ecx,eax
+	rol	edx,24
+	xor	eax,ebx
+	xor	ecx,ebx
+	rol	eax,16
+	xor	ecx,edx
+	rol	ebx,8
+	xor	ecx,eax
+	mov	edx,ebp
+	xor	ecx,ebx
+	mov	DWORD [8+edi],ecx
+	mov	ebp,2155905152
+	and	ebp,edx
+	lea	eax,[edx*1+edx]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	eax,4278124286
+	and	esi,454761243
+	xor	eax,esi
+	mov	ebp,2155905152
+	and	ebp,eax
+	lea	ebx,[eax*1+eax]
+	mov	esi,ebp
+	shr	ebp,7
+	sub	esi,ebp
+	and	ebx,4278124286
+	and	esi,454761243
+	xor	eax,edx
+	xor	ebx,esi
+	mov	ebp,2155905152
+	and	ebp,ebx
+	lea	ecx,[ebx*1+ebx]
+	mov	esi,ebp
+	shr	ebp,7
+	xor	ebx,edx
+	sub	esi,ebp
+	and	ecx,4278124286
+	and	esi,454761243
+	rol	edx,8
+	xor	ecx,esi
+	mov	ebp,DWORD [16+edi]
+	xor	edx,eax
+	xor	eax,ecx
+	xor	edx,ebx
+	rol	eax,24
+	xor	ebx,ecx
+	xor	edx,ecx
+	rol	ebx,16
+	xor	edx,eax
+	rol	ecx,8
+	xor	edx,ebx
+	mov	eax,ebp
+	xor	edx,ecx
+	mov	DWORD [12+edi],edx
+	cmp	edi,DWORD [28+esp]
+	jb	NEAR L$056permute
+	xor	eax,eax
+	pop	edi
+	pop	esi
+	pop	ebx
+	pop	ebp
+	ret
+db	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+db	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+db	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+segment	.bss
+common	_OPENSSL_ia32cap_P 16
diff --git a/win-x86_64/crypto/fipsmodule/aes-x86_64.asm b/win-x86_64/crypto/fipsmodule/aes-x86_64.asm
new file mode 100644
index 0000000..329185e
--- /dev/null
+++ b/win-x86_64/crypto/fipsmodule/aes-x86_64.asm
@@ -0,0 +1,2962 @@
+; This file is generated from a similarly-named Perl script in the BoringSSL
+; source tree. Do not edit by hand.
+
+default	rel
+%define XMMWORD
+%define YMMWORD
+%define ZMMWORD
+
+%ifdef BORINGSSL_PREFIX
+%include "boringssl_prefix_symbols_nasm.inc"
+%endif
+section	.text code align=64
+
+
+ALIGN	16
+_x86_64_AES_encrypt:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+
+	mov	r13d,DWORD[240+r15]
+	sub	r13d,1
+	jmp	NEAR $L$enc_loop
+ALIGN	16
+$L$enc_loop:
+
+	movzx	esi,al
+	movzx	edi,bl
+	movzx	ebp,cl
+	mov	r10d,DWORD[rsi*8+r14]
+	mov	r11d,DWORD[rdi*8+r14]
+	mov	r12d,DWORD[rbp*8+r14]
+
+	movzx	esi,bh
+	movzx	edi,ch
+	movzx	ebp,dl
+	xor	r10d,DWORD[3+rsi*8+r14]
+	xor	r11d,DWORD[3+rdi*8+r14]
+	mov	r8d,DWORD[rbp*8+r14]
+
+	movzx	esi,dh
+	shr	ecx,16
+	movzx	ebp,ah
+	xor	r12d,DWORD[3+rsi*8+r14]
+	shr	edx,16
+	xor	r8d,DWORD[3+rbp*8+r14]
+
+	shr	ebx,16
+	lea	r15,[16+r15]
+	shr	eax,16
+
+	movzx	esi,cl
+	movzx	edi,dl
+	movzx	ebp,al
+	xor	r10d,DWORD[2+rsi*8+r14]
+	xor	r11d,DWORD[2+rdi*8+r14]
+	xor	r12d,DWORD[2+rbp*8+r14]
+
+	movzx	esi,dh
+	movzx	edi,ah
+	movzx	ebp,bl
+	xor	r10d,DWORD[1+rsi*8+r14]
+	xor	r11d,DWORD[1+rdi*8+r14]
+	xor	r8d,DWORD[2+rbp*8+r14]
+
+	mov	edx,DWORD[12+r15]
+	movzx	edi,bh
+	movzx	ebp,ch
+	mov	eax,DWORD[r15]
+	xor	r12d,DWORD[1+rdi*8+r14]
+	xor	r8d,DWORD[1+rbp*8+r14]
+
+	mov	ebx,DWORD[4+r15]
+	mov	ecx,DWORD[8+r15]
+	xor	eax,r10d
+	xor	ebx,r11d
+	xor	ecx,r12d
+	xor	edx,r8d
+	sub	r13d,1
+	jnz	NEAR $L$enc_loop
+	movzx	esi,al
+	movzx	edi,bl
+	movzx	ebp,cl
+	movzx	r10d,BYTE[2+rsi*8+r14]
+	movzx	r11d,BYTE[2+rdi*8+r14]
+	movzx	r12d,BYTE[2+rbp*8+r14]
+
+	movzx	esi,dl
+	movzx	edi,bh
+	movzx	ebp,ch
+	movzx	r8d,BYTE[2+rsi*8+r14]
+	mov	edi,DWORD[rdi*8+r14]
+	mov	ebp,DWORD[rbp*8+r14]
+
+	and	edi,0x0000ff00
+	and	ebp,0x0000ff00
+
+	xor	r10d,edi
+	xor	r11d,ebp
+	shr	ecx,16
+
+	movzx	esi,dh
+	movzx	edi,ah
+	shr	edx,16
+	mov	esi,DWORD[rsi*8+r14]
+	mov	edi,DWORD[rdi*8+r14]
+
+	and	esi,0x0000ff00
+	and	edi,0x0000ff00
+	shr	ebx,16
+	xor	r12d,esi
+	xor	r8d,edi
+	shr	eax,16
+
+	movzx	esi,cl
+	movzx	edi,dl
+	movzx	ebp,al
+	mov	esi,DWORD[rsi*8+r14]
+	mov	edi,DWORD[rdi*8+r14]
+	mov	ebp,DWORD[rbp*8+r14]
+
+	and	esi,0x00ff0000
+	and	edi,0x00ff0000
+	and	ebp,0x00ff0000
+
+	xor	r10d,esi
+	xor	r11d,edi
+	xor	r12d,ebp
+
+	movzx	esi,bl
+	movzx	edi,dh
+	movzx	ebp,ah
+	mov	esi,DWORD[rsi*8+r14]
+	mov	edi,DWORD[2+rdi*8+r14]
+	mov	ebp,DWORD[2+rbp*8+r14]
+
+	and	esi,0x00ff0000
+	and	edi,0xff000000
+	and	ebp,0xff000000
+
+	xor	r8d,esi
+	xor	r10d,edi
+	xor	r11d,ebp
+
+	movzx	esi,bh
+	movzx	edi,ch
+	mov	edx,DWORD[((16+12))+r15]
+	mov	esi,DWORD[2+rsi*8+r14]
+	mov	edi,DWORD[2+rdi*8+r14]
+	mov	eax,DWORD[((16+0))+r15]
+
+	and	esi,0xff000000
+	and	edi,0xff000000
+
+	xor	r12d,esi
+	xor	r8d,edi
+
+	mov	ebx,DWORD[((16+4))+r15]
+	mov	ecx,DWORD[((16+8))+r15]
+	xor	eax,r10d
+	xor	ebx,r11d
+	xor	ecx,r12d
+	xor	edx,r8d
+DB	0xf3,0xc3
+
+
+ALIGN	16
+_x86_64_AES_encrypt_compact:
+
+	lea	r8,[128+r14]
+	mov	edi,DWORD[((0-128))+r8]
+	mov	ebp,DWORD[((32-128))+r8]
+	mov	r10d,DWORD[((64-128))+r8]
+	mov	r11d,DWORD[((96-128))+r8]
+	mov	edi,DWORD[((128-128))+r8]
+	mov	ebp,DWORD[((160-128))+r8]
+	mov	r10d,DWORD[((192-128))+r8]
+	mov	r11d,DWORD[((224-128))+r8]
+	jmp	NEAR $L$enc_loop_compact
+ALIGN	16
+$L$enc_loop_compact:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+	lea	r15,[16+r15]
+	movzx	r10d,al
+	movzx	r11d,bl
+	movzx	r12d,cl
+	movzx	r8d,dl
+	movzx	esi,bh
+	movzx	edi,ch
+	shr	ecx,16
+	movzx	ebp,dh
+	movzx	r10d,BYTE[r10*1+r14]
+	movzx	r11d,BYTE[r11*1+r14]
+	movzx	r12d,BYTE[r12*1+r14]
+	movzx	r8d,BYTE[r8*1+r14]
+
+	movzx	r9d,BYTE[rsi*1+r14]
+	movzx	esi,ah
+	movzx	r13d,BYTE[rdi*1+r14]
+	movzx	edi,cl
+	movzx	ebp,BYTE[rbp*1+r14]
+	movzx	esi,BYTE[rsi*1+r14]
+
+	shl	r9d,8
+	shr	edx,16
+	shl	r13d,8
+	xor	r10d,r9d
+	shr	eax,16
+	movzx	r9d,dl
+	shr	ebx,16
+	xor	r11d,r13d
+	shl	ebp,8
+	movzx	r13d,al
+	movzx	edi,BYTE[rdi*1+r14]
+	xor	r12d,ebp
+
+	shl	esi,8
+	movzx	ebp,bl
+	shl	edi,16
+	xor	r8d,esi
+	movzx	r9d,BYTE[r9*1+r14]
+	movzx	esi,dh
+	movzx	r13d,BYTE[r13*1+r14]
+	xor	r10d,edi
+
+	shr	ecx,8
+	movzx	edi,ah
+	shl	r9d,16
+	shr	ebx,8
+	shl	r13d,16
+	xor	r11d,r9d
+	movzx	ebp,BYTE[rbp*1+r14]
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+	movzx	edx,BYTE[rcx*1+r14]
+	movzx	ecx,BYTE[rbx*1+r14]
+
+	shl	ebp,16
+	xor	r12d,r13d
+	shl	esi,24
+	xor	r8d,ebp
+	shl	edi,24
+	xor	r10d,esi
+	shl	edx,24
+	xor	r11d,edi
+	shl	ecx,24
+	mov	eax,r10d
+	mov	ebx,r11d
+	xor	ecx,r12d
+	xor	edx,r8d
+	cmp	r15,QWORD[16+rsp]
+	je	NEAR $L$enc_compact_done
+	mov	r10d,0x80808080
+	mov	r11d,0x80808080
+	and	r10d,eax
+	and	r11d,ebx
+	mov	esi,r10d
+	mov	edi,r11d
+	shr	r10d,7
+	lea	r8d,[rax*1+rax]
+	shr	r11d,7
+	lea	r9d,[rbx*1+rbx]
+	sub	esi,r10d
+	sub	edi,r11d
+	and	r8d,0xfefefefe
+	and	r9d,0xfefefefe
+	and	esi,0x1b1b1b1b
+	and	edi,0x1b1b1b1b
+	mov	r10d,eax
+	mov	r11d,ebx
+	xor	r8d,esi
+	xor	r9d,edi
+
+	xor	eax,r8d
+	xor	ebx,r9d
+	mov	r12d,0x80808080
+	rol	eax,24
+	mov	ebp,0x80808080
+	rol	ebx,24
+	and	r12d,ecx
+	and	ebp,edx
+	xor	eax,r8d
+	xor	ebx,r9d
+	mov	esi,r12d
+	ror	r10d,16
+	mov	edi,ebp
+	ror	r11d,16
+	lea	r8d,[rcx*1+rcx]
+	shr	r12d,7
+	xor	eax,r10d
+	shr	ebp,7
+	xor	ebx,r11d
+	ror	r10d,8
+	lea	r9d,[rdx*1+rdx]
+	ror	r11d,8
+	sub	esi,r12d
+	sub	edi,ebp
+	xor	eax,r10d
+	xor	ebx,r11d
+
+	and	r8d,0xfefefefe
+	and	r9d,0xfefefefe
+	and	esi,0x1b1b1b1b
+	and	edi,0x1b1b1b1b
+	mov	r12d,ecx
+	mov	ebp,edx
+	xor	r8d,esi
+	xor	r9d,edi
+
+	ror	r12d,16
+	xor	ecx,r8d
+	ror	ebp,16
+	xor	edx,r9d
+	rol	ecx,24
+	mov	esi,DWORD[r14]
+	rol	edx,24
+	xor	ecx,r8d
+	mov	edi,DWORD[64+r14]
+	xor	edx,r9d
+	mov	r8d,DWORD[128+r14]
+	xor	ecx,r12d
+	ror	r12d,8
+	xor	edx,ebp
+	ror	ebp,8
+	xor	ecx,r12d
+	mov	r9d,DWORD[192+r14]
+	xor	edx,ebp
+	jmp	NEAR $L$enc_loop_compact
+ALIGN	16
+$L$enc_compact_done:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+DB	0xf3,0xc3
+
+
+ALIGN	16
+global	aes_nohw_encrypt
+
+
+aes_nohw_encrypt:
+	mov	QWORD[8+rsp],rdi	;WIN64 prologue
+	mov	QWORD[16+rsp],rsi
+	mov	rax,rsp
+$L$SEH_begin_aes_nohw_encrypt:
+	mov	rdi,rcx
+	mov	rsi,rdx
+	mov	rdx,r8
+
+
+
+	mov	rax,rsp
+
+	push	rbx
+
+	push	rbp
+
+	push	r12
+
+	push	r13
+
+	push	r14
+
+	push	r15
+
+
+
+	lea	rcx,[((-63))+rdx]
+	and	rsp,-64
+	sub	rcx,rsp
+	neg	rcx
+	and	rcx,0x3c0
+	sub	rsp,rcx
+	sub	rsp,32
+
+	mov	QWORD[16+rsp],rsi
+	mov	QWORD[24+rsp],rax
+
+$L$enc_prologue:
+
+	mov	r15,rdx
+	mov	r13d,DWORD[240+r15]
+
+	mov	eax,DWORD[rdi]
+	mov	ebx,DWORD[4+rdi]
+	mov	ecx,DWORD[8+rdi]
+	mov	edx,DWORD[12+rdi]
+
+	shl	r13d,4
+	lea	rbp,[r13*1+r15]
+	mov	QWORD[rsp],r15
+	mov	QWORD[8+rsp],rbp
+
+
+	lea	r14,[(($L$AES_Te+2048))]
+	lea	rbp,[768+rsp]
+	sub	rbp,r14
+	and	rbp,0x300
+	lea	r14,[rbp*1+r14]
+
+	call	_x86_64_AES_encrypt_compact
+
+	mov	r9,QWORD[16+rsp]
+	mov	rsi,QWORD[24+rsp]
+
+	mov	DWORD[r9],eax
+	mov	DWORD[4+r9],ebx
+	mov	DWORD[8+r9],ecx
+	mov	DWORD[12+r9],edx
+
+	mov	r15,QWORD[((-48))+rsi]
+
+	mov	r14,QWORD[((-40))+rsi]
+
+	mov	r13,QWORD[((-32))+rsi]
+
+	mov	r12,QWORD[((-24))+rsi]
+
+	mov	rbp,QWORD[((-16))+rsi]
+
+	mov	rbx,QWORD[((-8))+rsi]
+
+	lea	rsp,[rsi]
+
+$L$enc_epilogue:
+	mov	rdi,QWORD[8+rsp]	;WIN64 epilogue
+	mov	rsi,QWORD[16+rsp]
+	DB	0F3h,0C3h		;repret
+
+$L$SEH_end_aes_nohw_encrypt:
+
+ALIGN	16
+_x86_64_AES_decrypt:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+
+	mov	r13d,DWORD[240+r15]
+	sub	r13d,1
+	jmp	NEAR $L$dec_loop
+ALIGN	16
+$L$dec_loop:
+
+	movzx	esi,al
+	movzx	edi,bl
+	movzx	ebp,cl
+	mov	r10d,DWORD[rsi*8+r14]
+	mov	r11d,DWORD[rdi*8+r14]
+	mov	r12d,DWORD[rbp*8+r14]
+
+	movzx	esi,dh
+	movzx	edi,ah
+	movzx	ebp,dl
+	xor	r10d,DWORD[3+rsi*8+r14]
+	xor	r11d,DWORD[3+rdi*8+r14]
+	mov	r8d,DWORD[rbp*8+r14]
+
+	movzx	esi,bh
+	shr	eax,16
+	movzx	ebp,ch
+	xor	r12d,DWORD[3+rsi*8+r14]
+	shr	edx,16
+	xor	r8d,DWORD[3+rbp*8+r14]
+
+	shr	ebx,16
+	lea	r15,[16+r15]
+	shr	ecx,16
+
+	movzx	esi,cl
+	movzx	edi,dl
+	movzx	ebp,al
+	xor	r10d,DWORD[2+rsi*8+r14]
+	xor	r11d,DWORD[2+rdi*8+r14]
+	xor	r12d,DWORD[2+rbp*8+r14]
+
+	movzx	esi,bh
+	movzx	edi,ch
+	movzx	ebp,bl
+	xor	r10d,DWORD[1+rsi*8+r14]
+	xor	r11d,DWORD[1+rdi*8+r14]
+	xor	r8d,DWORD[2+rbp*8+r14]
+
+	movzx	esi,dh
+	mov	edx,DWORD[12+r15]
+	movzx	ebp,ah
+	xor	r12d,DWORD[1+rsi*8+r14]
+	mov	eax,DWORD[r15]
+	xor	r8d,DWORD[1+rbp*8+r14]
+
+	xor	eax,r10d
+	mov	ebx,DWORD[4+r15]
+	mov	ecx,DWORD[8+r15]
+	xor	ecx,r12d
+	xor	ebx,r11d
+	xor	edx,r8d
+	sub	r13d,1
+	jnz	NEAR $L$dec_loop
+	lea	r14,[2048+r14]
+	movzx	esi,al
+	movzx	edi,bl
+	movzx	ebp,cl
+	movzx	r10d,BYTE[rsi*1+r14]
+	movzx	r11d,BYTE[rdi*1+r14]
+	movzx	r12d,BYTE[rbp*1+r14]
+
+	movzx	esi,dl
+	movzx	edi,dh
+	movzx	ebp,ah
+	movzx	r8d,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+	movzx	ebp,BYTE[rbp*1+r14]
+
+	shl	edi,8
+	shl	ebp,8
+
+	xor	r10d,edi
+	xor	r11d,ebp
+	shr	edx,16
+
+	movzx	esi,bh
+	movzx	edi,ch
+	shr	eax,16
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+
+	shl	esi,8
+	shl	edi,8
+	shr	ebx,16
+	xor	r12d,esi
+	xor	r8d,edi
+	shr	ecx,16
+
+	movzx	esi,cl
+	movzx	edi,dl
+	movzx	ebp,al
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+	movzx	ebp,BYTE[rbp*1+r14]
+
+	shl	esi,16
+	shl	edi,16
+	shl	ebp,16
+
+	xor	r10d,esi
+	xor	r11d,edi
+	xor	r12d,ebp
+
+	movzx	esi,bl
+	movzx	edi,bh
+	movzx	ebp,ch
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+	movzx	ebp,BYTE[rbp*1+r14]
+
+	shl	esi,16
+	shl	edi,24
+	shl	ebp,24
+
+	xor	r8d,esi
+	xor	r10d,edi
+	xor	r11d,ebp
+
+	movzx	esi,dh
+	movzx	edi,ah
+	mov	edx,DWORD[((16+12))+r15]
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	edi,BYTE[rdi*1+r14]
+	mov	eax,DWORD[((16+0))+r15]
+
+	shl	esi,24
+	shl	edi,24
+
+	xor	r12d,esi
+	xor	r8d,edi
+
+	mov	ebx,DWORD[((16+4))+r15]
+	mov	ecx,DWORD[((16+8))+r15]
+	lea	r14,[((-2048))+r14]
+	xor	eax,r10d
+	xor	ebx,r11d
+	xor	ecx,r12d
+	xor	edx,r8d
+DB	0xf3,0xc3
+
+
+ALIGN	16
+_x86_64_AES_decrypt_compact:
+
+	lea	r8,[128+r14]
+	mov	edi,DWORD[((0-128))+r8]
+	mov	ebp,DWORD[((32-128))+r8]
+	mov	r10d,DWORD[((64-128))+r8]
+	mov	r11d,DWORD[((96-128))+r8]
+	mov	edi,DWORD[((128-128))+r8]
+	mov	ebp,DWORD[((160-128))+r8]
+	mov	r10d,DWORD[((192-128))+r8]
+	mov	r11d,DWORD[((224-128))+r8]
+	jmp	NEAR $L$dec_loop_compact
+
+ALIGN	16
+$L$dec_loop_compact:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+	lea	r15,[16+r15]
+	movzx	r10d,al
+	movzx	r11d,bl
+	movzx	r12d,cl
+	movzx	r8d,dl
+	movzx	esi,dh
+	movzx	edi,ah
+	shr	edx,16
+	movzx	ebp,bh
+	movzx	r10d,BYTE[r10*1+r14]
+	movzx	r11d,BYTE[r11*1+r14]
+	movzx	r12d,BYTE[r12*1+r14]
+	movzx	r8d,BYTE[r8*1+r14]
+
+	movzx	r9d,BYTE[rsi*1+r14]
+	movzx	esi,ch
+	movzx	r13d,BYTE[rdi*1+r14]
+	movzx	ebp,BYTE[rbp*1+r14]
+	movzx	esi,BYTE[rsi*1+r14]
+
+	shr	ecx,16
+	shl	r13d,8
+	shl	r9d,8
+	movzx	edi,cl
+	shr	eax,16
+	xor	r10d,r9d
+	shr	ebx,16
+	movzx	r9d,dl
+
+	shl	ebp,8
+	xor	r11d,r13d
+	shl	esi,8
+	movzx	r13d,al
+	movzx	edi,BYTE[rdi*1+r14]
+	xor	r12d,ebp
+	movzx	ebp,bl
+
+	shl	edi,16
+	xor	r8d,esi
+	movzx	r9d,BYTE[r9*1+r14]
+	movzx	esi,bh
+	movzx	ebp,BYTE[rbp*1+r14]
+	xor	r10d,edi
+	movzx	r13d,BYTE[r13*1+r14]
+	movzx	edi,ch
+
+	shl	ebp,16
+	shl	r9d,16
+	shl	r13d,16
+	xor	r8d,ebp
+	movzx	ebp,dh
+	xor	r11d,r9d
+	shr	eax,8
+	xor	r12d,r13d
+
+	movzx	esi,BYTE[rsi*1+r14]
+	movzx	ebx,BYTE[rdi*1+r14]
+	movzx	ecx,BYTE[rbp*1+r14]
+	movzx	edx,BYTE[rax*1+r14]
+
+	mov	eax,r10d
+	shl	esi,24
+	shl	ebx,24
+	shl	ecx,24
+	xor	eax,esi
+	shl	edx,24
+	xor	ebx,r11d
+	xor	ecx,r12d
+	xor	edx,r8d
+	cmp	r15,QWORD[16+rsp]
+	je	NEAR $L$dec_compact_done
+
+	mov	rsi,QWORD[((256+0))+r14]
+	shl	rbx,32
+	shl	rdx,32
+	mov	rdi,QWORD[((256+8))+r14]
+	or	rax,rbx
+	or	rcx,rdx
+	mov	rbp,QWORD[((256+16))+r14]
+	mov	r9,rsi
+	mov	r12,rsi
+	and	r9,rax
+	and	r12,rcx
+	mov	rbx,r9
+	mov	rdx,r12
+	shr	r9,7
+	lea	r8,[rax*1+rax]
+	shr	r12,7
+	lea	r11,[rcx*1+rcx]
+	sub	rbx,r9
+	sub	rdx,r12
+	and	r8,rdi
+	and	r11,rdi
+	and	rbx,rbp
+	and	rdx,rbp
+	xor	r8,rbx
+	xor	r11,rdx
+	mov	r10,rsi
+	mov	r13,rsi
+
+	and	r10,r8
+	and	r13,r11
+	mov	rbx,r10
+	mov	rdx,r13
+	shr	r10,7
+	lea	r9,[r8*1+r8]
+	shr	r13,7
+	lea	r12,[r11*1+r11]
+	sub	rbx,r10
+	sub	rdx,r13
+	and	r9,rdi
+	and	r12,rdi
+	and	rbx,rbp
+	and	rdx,rbp
+	xor	r9,rbx
+	xor	r12,rdx
+	mov	r10,rsi
+	mov	r13,rsi
+
+	and	r10,r9
+	and	r13,r12
+	mov	rbx,r10
+	mov	rdx,r13
+	shr	r10,7
+	xor	r8,rax
+	shr	r13,7
+	xor	r11,rcx
+	sub	rbx,r10
+	sub	rdx,r13
+	lea	r10,[r9*1+r9]
+	lea	r13,[r12*1+r12]
+	xor	r9,rax
+	xor	r12,rcx
+	and	r10,rdi
+	and	r13,rdi
+	and	rbx,rbp
+	and	rdx,rbp
+	xor	r10,rbx
+	xor	r13,rdx
+
+	xor	rax,r10
+	xor	rcx,r13
+	xor	r8,r10
+	xor	r11,r13
+	mov	rbx,rax
+	mov	rdx,rcx
+	xor	r9,r10
+	shr	rbx,32
+	xor	r12,r13
+	shr	rdx,32
+	xor	r10,r8
+	rol	eax,8
+	xor	r13,r11
+	rol	ecx,8
+	xor	r10,r9
+	rol	ebx,8
+	xor	r13,r12
+
+	rol	edx,8
+	xor	eax,r10d
+	shr	r10,32
+	xor	ecx,r13d
+	shr	r13,32
+	xor	ebx,r10d
+	xor	edx,r13d
+
+	mov	r10,r8
+	rol	r8d,24
+	mov	r13,r11
+	rol	r11d,24
+	shr	r10,32
+	xor	eax,r8d
+	shr	r13,32
+	xor	ecx,r11d
+	rol	r10d,24
+	mov	r8,r9
+	rol	r13d,24
+	mov	r11,r12
+	shr	r8,32
+	xor	ebx,r10d
+	shr	r11,32
+	xor	edx,r13d
+
+	mov	rsi,QWORD[r14]
+	rol	r9d,16
+	mov	rdi,QWORD[64+r14]
+	rol	r12d,16
+	mov	rbp,QWORD[128+r14]
+	rol	r8d,16
+	mov	r10,QWORD[192+r14]
+	xor	eax,r9d
+	rol	r11d,16
+	xor	ecx,r12d
+	mov	r13,QWORD[256+r14]
+	xor	ebx,r8d
+	xor	edx,r11d
+	jmp	NEAR $L$dec_loop_compact
+ALIGN	16
+$L$dec_compact_done:
+	xor	eax,DWORD[r15]
+	xor	ebx,DWORD[4+r15]
+	xor	ecx,DWORD[8+r15]
+	xor	edx,DWORD[12+r15]
+DB	0xf3,0xc3
+
+
+ALIGN	16
+global	aes_nohw_decrypt
+
+
+aes_nohw_decrypt:
+	mov	QWORD[8+rsp],rdi	;WIN64 prologue
+	mov	QWORD[16+rsp],rsi
+	mov	rax,rsp
+$L$SEH_begin_aes_nohw_decrypt:
+	mov	rdi,rcx
+	mov	rsi,rdx
+	mov	rdx,r8
+
+
+
+	mov	rax,rsp
+
+	push	rbx
+
+	push	rbp
+
+	push	r12
+
+	push	r13
+
+	push	r14
+
+	push	r15
+
+
+
+	lea	rcx,[((-63))+rdx]
+	and	rsp,-64
+	sub	rcx,rsp
+	neg	rcx
+	and	rcx,0x3c0
+	sub	rsp,rcx
+	sub	rsp,32
+
+	mov	QWORD[16+rsp],rsi
+	mov	QWORD[24+rsp],rax
+
+$L$dec_prologue:
+
+	mov	r15,rdx
+	mov	r13d,DWORD[240+r15]
+
+	mov	eax,DWORD[rdi]
+	mov	ebx,DWORD[4+rdi]
+	mov	ecx,DWORD[8+rdi]
+	mov	edx,DWORD[12+rdi]
+
+	shl	r13d,4
+	lea	rbp,[r13*1+r15]
+	mov	QWORD[rsp],r15
+	mov	QWORD[8+rsp],rbp
+
+
+	lea	r14,[(($L$AES_Td+2048))]
+	lea	rbp,[768+rsp]
+	sub	rbp,r14
+	and	rbp,0x300
+	lea	r14,[rbp*1+r14]
+	shr	rbp,3
+	add	r14,rbp
+
+	call	_x86_64_AES_decrypt_compact
+
+	mov	r9,QWORD[16+rsp]
+	mov	rsi,QWORD[24+rsp]
+
+	mov	DWORD[r9],eax
+	mov	DWORD[4+r9],ebx
+	mov	DWORD[8+r9],ecx
+	mov	DWORD[12+r9],edx
+
+	mov	r15,QWORD[((-48))+rsi]
+
+	mov	r14,QWORD[((-40))+rsi]
+
+	mov	r13,QWORD[((-32))+rsi]
+
+	mov	r12,QWORD[((-24))+rsi]
+
+	mov	rbp,QWORD[((-16))+rsi]
+
+	mov	rbx,QWORD[((-8))+rsi]
+
+	lea	rsp,[rsi]
+
+$L$dec_epilogue:
+	mov	rdi,QWORD[8+rsp]	;WIN64 epilogue
+	mov	rsi,QWORD[16+rsp]
+	DB	0F3h,0C3h		;repret
+
+$L$SEH_end_aes_nohw_decrypt:
+ALIGN	16
+global	aes_nohw_set_encrypt_key
+
+aes_nohw_set_encrypt_key:
+	mov	QWORD[8+rsp],rdi	;WIN64 prologue
+	mov	QWORD[16+rsp],rsi
+	mov	rax,rsp
+$L$SEH_begin_aes_nohw_set_encrypt_key:
+	mov	rdi,rcx
+	mov	rsi,rdx
+	mov	rdx,r8
+
+
+
+	push	rbx
+
+	push	rbp
+
+	push	r12
+
+	push	r13
+
+	push	r14
+
+	push	r15
+
+	sub	rsp,8
+
+$L$enc_key_prologue:
+
+	call	_x86_64_AES_set_encrypt_key
+
+	mov	rbp,QWORD[40+rsp]
+
+	mov	rbx,QWORD[48+rsp]
+
+	add	rsp,56
+
+$L$enc_key_epilogue:
+	mov	rdi,QWORD[8+rsp]	;WIN64 epilogue
+	mov	rsi,QWORD[16+rsp]
+	DB	0F3h,0C3h		;repret
+
+$L$SEH_end_aes_nohw_set_encrypt_key:
+
+
+ALIGN	16
+_x86_64_AES_set_encrypt_key:
+
+	mov	ecx,esi
+	mov	rsi,rdi
+	mov	rdi,rdx
+
+	test	rsi,-1
+	jz	NEAR $L$badpointer
+	test	rdi,-1
+	jz	NEAR $L$badpointer
+
+	lea	rbp,[$L$AES_Te]
+	lea	rbp,[((2048+128))+rbp]
+
+
+	mov	eax,DWORD[((0-128))+rbp]
+	mov	ebx,DWORD[((32-128))+rbp]
+	mov	r8d,DWORD[((64-128))+rbp]
+	mov	edx,DWORD[((96-128))+rbp]
+	mov	eax,DWORD[((128-128))+rbp]
+	mov	ebx,DWORD[((160-128))+rbp]
+	mov	r8d,DWORD[((192-128))+rbp]
+	mov	edx,DWORD[((224-128))+rbp]
+
+	cmp	ecx,128
+	je	NEAR $L$10rounds
+	cmp	ecx,192
+	je	NEAR $L$12rounds
+	cmp	ecx,256
+	je	NEAR $L$14rounds
+	mov	rax,-2
+	jmp	NEAR $L$exit
+
+$L$10rounds:
+	mov	rax,QWORD[rsi]
+	mov	rdx,QWORD[8+rsi]
+	mov	QWORD[rdi],rax
+	mov	QWORD[8+rdi],rdx
+
+	shr	rdx,32
+	xor	ecx,ecx
+	jmp	NEAR $L$10shortcut
+ALIGN	4
+$L$10loop:
+	mov	eax,DWORD[rdi]
+	mov	edx,DWORD[12+rdi]
+$L$10shortcut:
+	movzx	esi,dl
+	movzx	ebx,BYTE[((-128))+rsi*1+rbp]
+	movzx	esi,dh
+	shl	ebx,24
+	xor	eax,ebx
+
+	movzx	ebx,BYTE[((-128))+rsi*1+rbp]
+	shr	edx,16
+	movzx	esi,dl
+	xor	eax,ebx
+
+	movzx	ebx,BYTE[((-128))+rsi*1+rbp]
+	movzx	esi,dh
+	shl	ebx,8
+	xor	eax,ebx
+
+	movzx	ebx,BYTE[((-128))+rsi*1+rbp]
+	shl	ebx,16
+	xor	eax,ebx
+
+	xor	eax,DWORD[((1024-128))+rcx*4+rbp]
+	mov	DWORD[16+rdi],eax
+	xor	eax,DWORD[4+rdi]
+	mov	DWORD[20+rdi],eax
+	xor	eax,DWORD[8+rdi]
+	mov	DWORD[24+rdi],eax
+	xor	eax,DWORD[12+rdi]
+	mov	DWORD[28+rdi],eax
+	add	ecx,1
+	lea	rdi,[16+rdi]
+	cmp	ecx,10
+	jl	NEAR $L$10loop
+
+	mov	DWORD[80+rdi],10
+	xor	rax,rax
+	jmp	NEAR $L$exit
+
+$L$12rounds:
+	mov	rax,QWORD[rsi]
+	mov	rbx,QWORD[8+rsi]
+	mov	rdx,QWORD[16+rsi]
+	mov	QWORD[rdi],rax
+	mov	QWORD[8+rdi],rbx
+	mov	QWORD[16+r