[Midnightbsd-cvs] src [11612] trunk/secure/lib/libcrypto/i386: fixup

laffer1 at midnightbsd.org laffer1 at midnightbsd.org
Sun Jul 8 12:31:11 EDT 2018


Revision: 11612
          http://svnweb.midnightbsd.org/src/?rev=11612
Author:   laffer1
Date:     2018-07-08 12:31:10 -0400 (Sun, 08 Jul 2018)
Log Message:
-----------
fixup

Added Paths:
-----------
    trunk/secure/lib/libcrypto/i386/aes-586.S
    trunk/secure/lib/libcrypto/i386/aesni-x86.S
    trunk/secure/lib/libcrypto/i386/bf-586.S
    trunk/secure/lib/libcrypto/i386/bf-686.S
    trunk/secure/lib/libcrypto/i386/bn-586.S
    trunk/secure/lib/libcrypto/i386/cast-586.S
    trunk/secure/lib/libcrypto/i386/cmll-x86.S
    trunk/secure/lib/libcrypto/i386/co-586.S
    trunk/secure/lib/libcrypto/i386/crypt586.S
    trunk/secure/lib/libcrypto/i386/des-586.S
    trunk/secure/lib/libcrypto/i386/ghash-x86.S
    trunk/secure/lib/libcrypto/i386/md5-586.S
    trunk/secure/lib/libcrypto/i386/rc4-586.S
    trunk/secure/lib/libcrypto/i386/rc5-586.S
    trunk/secure/lib/libcrypto/i386/rmd-586.S
    trunk/secure/lib/libcrypto/i386/sha1-586.S
    trunk/secure/lib/libcrypto/i386/sha256-586.S
    trunk/secure/lib/libcrypto/i386/sha512-586.S
    trunk/secure/lib/libcrypto/i386/vpaes-x86.S
    trunk/secure/lib/libcrypto/i386/wp-mmx.S
    trunk/secure/lib/libcrypto/i386/x86-gf2m.S
    trunk/secure/lib/libcrypto/i386/x86-mont.S
    trunk/secure/lib/libcrypto/i386/x86cpuid.S

Removed Paths:
-------------
    trunk/secure/lib/libcrypto/i386/aes-586.s
    trunk/secure/lib/libcrypto/i386/aesni-x86.s
    trunk/secure/lib/libcrypto/i386/bf-586.s
    trunk/secure/lib/libcrypto/i386/bf-686.s
    trunk/secure/lib/libcrypto/i386/bn-586.s
    trunk/secure/lib/libcrypto/i386/cast-586.s
    trunk/secure/lib/libcrypto/i386/cmll-x86.s
    trunk/secure/lib/libcrypto/i386/co-586.s
    trunk/secure/lib/libcrypto/i386/crypt586.s
    trunk/secure/lib/libcrypto/i386/des-586.s
    trunk/secure/lib/libcrypto/i386/ghash-x86.s
    trunk/secure/lib/libcrypto/i386/md5-586.s
    trunk/secure/lib/libcrypto/i386/rc4-586.s
    trunk/secure/lib/libcrypto/i386/rc5-586.s
    trunk/secure/lib/libcrypto/i386/rmd-586.s
    trunk/secure/lib/libcrypto/i386/sha1-586.s
    trunk/secure/lib/libcrypto/i386/sha256-586.s
    trunk/secure/lib/libcrypto/i386/sha512-586.s
    trunk/secure/lib/libcrypto/i386/vpaes-x86.s
    trunk/secure/lib/libcrypto/i386/wp-mmx.s
    trunk/secure/lib/libcrypto/i386/x86-gf2m.s
    trunk/secure/lib/libcrypto/i386/x86-mont.s
    trunk/secure/lib/libcrypto/i386/x86cpuid.s

Added: trunk/secure/lib/libcrypto/i386/aes-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/aes-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/aes-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,6481 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/aes-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from aes-586.pl.
+#ifdef PIC
+.file	"aes-586.S"
+.text
+.type	_x86_AES_encrypt_compact, at function
+.align	16
+_x86_AES_encrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L000loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%ecx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%ecx
+	roll	$24,%ecx
+	xorl	%esi,%ecx
+	rorl	$16,%ebp
+	xorl	%ebp,%ecx
+	rorl	$8,%ebp
+	xorl	%ebp,%ecx
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%edx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%edx
+	roll	$24,%edx
+	xorl	%esi,%edx
+	rorl	$16,%ebp
+	xorl	%ebp,%edx
+	rorl	$8,%ebp
+	xorl	%ebp,%edx
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%eax,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%eax
+	roll	$24,%eax
+	xorl	%esi,%eax
+	rorl	$16,%ebp
+	xorl	%ebp,%eax
+	rorl	$8,%ebp
+	xorl	%ebp,%eax
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%ebx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%ebx
+	roll	$24,%ebx
+	xorl	%esi,%ebx
+	rorl	$16,%ebp
+	xorl	%ebp,%ebx
+	rorl	$8,%ebp
+	xorl	%ebp,%ebx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L000loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type	_sse_AES_encrypt_compact, at function
+.align	16
+_sse_AES_encrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L001loop:
+	pshufw	$8,%mm0,%mm1
+	pshufw	$13,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	pshufw	$13,%mm0,%mm2
+	movzbl	%ah,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	shll	$8,%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	pshufw	$8,%mm4,%mm6
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm0
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movd	%mm2,%eax
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	movd	%mm6,%ebx
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm1
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	shrl	$16,%eax
+	punpckldq	%mm1,%mm0
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	andl	$255,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$16,%eax
+	orl	%eax,%edx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm4
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%ebx,%edx
+	movd	%edx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L002out
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pcmpgtb	%mm0,%mm3
+	pcmpgtb	%mm4,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	pshufw	$177,%mm0,%mm2
+	pshufw	$177,%mm4,%mm6
+	paddb	%mm0,%mm0
+	paddb	%mm4,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pshufw	$177,%mm2,%mm3
+	pshufw	$177,%mm6,%mm7
+	pxor	%mm0,%mm1
+	pxor	%mm4,%mm5
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm3,%mm2
+	movq	%mm7,%mm6
+	pslld	$8,%mm3
+	pslld	$8,%mm7
+	psrld	$24,%mm2
+	psrld	$24,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	psrld	$8,%mm1
+	psrld	$8,%mm5
+	movl	-128(%ebp),%eax
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	movl	-64(%ebp),%ebx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	(%ebp),%ecx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L001loop
+.align	16
+.L002out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.type	_x86_AES_encrypt, at function
+.align	16
+_x86_AES_encrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L003loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ch,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movzbl	%bh,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ah,%eax
+	xorl	3(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	xorl	1(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L003loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%bh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%ch,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%dh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	2(%ebp,%edx,8),%edx
+	andl	$255,%edx
+	movzbl	%ah,%eax
+	movl	(%ebp,%eax,8),%eax
+	andl	$65280,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movl	(%ebp,%ebx,8),%ebx
+	andl	$16711680,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movl	2(%ebp,%ecx,8),%ecx
+	andl	$4278190080,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Te:
+.long	2774754246,2774754246
+.long	2222750968,2222750968
+.long	2574743534,2574743534
+.long	2373680118,2373680118
+.long	234025727,234025727
+.long	3177933782,3177933782
+.long	2976870366,2976870366
+.long	1422247313,1422247313
+.long	1345335392,1345335392
+.long	50397442,50397442
+.long	2842126286,2842126286
+.long	2099981142,2099981142
+.long	436141799,436141799
+.long	1658312629,1658312629
+.long	3870010189,3870010189
+.long	2591454956,2591454956
+.long	1170918031,1170918031
+.long	2642575903,2642575903
+.long	1086966153,1086966153
+.long	2273148410,2273148410
+.long	368769775,368769775
+.long	3948501426,3948501426
+.long	3376891790,3376891790
+.long	200339707,200339707
+.long	3970805057,3970805057
+.long	1742001331,1742001331
+.long	4255294047,4255294047
+.long	3937382213,3937382213
+.long	3214711843,3214711843
+.long	4154762323,4154762323
+.long	2524082916,2524082916
+.long	1539358875,1539358875
+.long	3266819957,3266819957
+.long	486407649,486407649
+.long	2928907069,2928907069
+.long	1780885068,1780885068
+.long	1513502316,1513502316
+.long	1094664062,1094664062
+.long	49805301,49805301
+.long	1338821763,1338821763
+.long	1546925160,1546925160
+.long	4104496465,4104496465
+.long	887481809,887481809
+.long	150073849,150073849
+.long	2473685474,2473685474
+.long	1943591083,1943591083
+.long	1395732834,1395732834
+.long	1058346282,1058346282
+.long	201589768,201589768
+.long	1388824469,1388824469
+.long	1696801606,1696801606
+.long	1589887901,1589887901
+.long	672667696,672667696
+.long	2711000631,2711000631
+.long	251987210,251987210
+.long	3046808111,3046808111
+.long	151455502,151455502
+.long	907153956,907153956
+.long	2608889883,2608889883
+.long	1038279391,1038279391
+.long	652995533,652995533
+.long	1764173646,1764173646
+.long	3451040383,3451040383
+.long	2675275242,2675275242
+.long	453576978,453576978
+.long	2659418909,2659418909
+.long	1949051992,1949051992
+.long	773462580,773462580
+.long	756751158,756751158
+.long	2993581788,2993581788
+.long	3998898868,3998898868
+.long	4221608027,4221608027
+.long	4132590244,4132590244
+.long	1295727478,1295727478
+.long	1641469623,1641469623
+.long	3467883389,3467883389
+.long	2066295122,2066295122
+.long	1055122397,1055122397
+.long	1898917726,1898917726
+.long	2542044179,2542044179
+.long	4115878822,4115878822
+.long	1758581177,1758581177
+.long	0,0
+.long	753790401,753790401
+.long	1612718144,1612718144
+.long	536673507,536673507
+.long	3367088505,3367088505
+.long	3982187446,3982187446
+.long	3194645204,3194645204
+.long	1187761037,1187761037
+.long	3653156455,3653156455
+.long	1262041458,1262041458
+.long	3729410708,3729410708
+.long	3561770136,3561770136
+.long	3898103984,3898103984
+.long	1255133061,1255133061
+.long	1808847035,1808847035
+.long	720367557,720367557
+.long	3853167183,3853167183
+.long	385612781,385612781
+.long	3309519750,3309519750
+.long	3612167578,3612167578
+.long	1429418854,1429418854
+.long	2491778321,2491778321
+.long	3477423498,3477423498
+.long	284817897,284817897
+.long	100794884,100794884
+.long	2172616702,2172616702
+.long	4031795360,4031795360
+.long	1144798328,1144798328
+.long	3131023141,3131023141
+.long	3819481163,3819481163
+.long	4082192802,4082192802
+.long	4272137053,4272137053
+.long	3225436288,3225436288
+.long	2324664069,2324664069
+.long	2912064063,2912064063
+.long	3164445985,3164445985
+.long	1211644016,1211644016
+.long	83228145,83228145
+.long	3753688163,3753688163
+.long	3249976951,3249976951
+.long	1977277103,1977277103
+.long	1663115586,1663115586
+.long	806359072,806359072
+.long	452984805,452984805
+.long	250868733,250868733
+.long	1842533055,1842533055
+.long	1288555905,1288555905
+.long	336333848,336333848
+.long	890442534,890442534
+.long	804056259,804056259
+.long	3781124030,3781124030
+.long	2727843637,2727843637
+.long	3427026056,3427026056
+.long	957814574,957814574
+.long	1472513171,1472513171
+.long	4071073621,4071073621
+.long	2189328124,2189328124
+.long	1195195770,1195195770
+.long	2892260552,2892260552
+.long	3881655738,3881655738
+.long	723065138,723065138
+.long	2507371494,2507371494
+.long	2690670784,2690670784
+.long	2558624025,2558624025
+.long	3511635870,3511635870
+.long	2145180835,2145180835
+.long	1713513028,1713513028
+.long	2116692564,2116692564
+.long	2878378043,2878378043
+.long	2206763019,2206763019
+.long	3393603212,3393603212
+.long	703524551,703524551
+.long	3552098411,3552098411
+.long	1007948840,1007948840
+.long	2044649127,2044649127
+.long	3797835452,3797835452
+.long	487262998,487262998
+.long	1994120109,1994120109
+.long	1004593371,1004593371
+.long	1446130276,1446130276
+.long	1312438900,1312438900
+.long	503974420,503974420
+.long	3679013266,3679013266
+.long	168166924,168166924
+.long	1814307912,1814307912
+.long	3831258296,3831258296
+.long	1573044895,1573044895
+.long	1859376061,1859376061
+.long	4021070915,4021070915
+.long	2791465668,2791465668
+.long	2828112185,2828112185
+.long	2761266481,2761266481
+.long	937747667,937747667
+.long	2339994098,2339994098
+.long	854058965,854058965
+.long	1137232011,1137232011
+.long	1496790894,1496790894
+.long	3077402074,3077402074
+.long	2358086913,2358086913
+.long	1691735473,1691735473
+.long	3528347292,3528347292
+.long	3769215305,3769215305
+.long	3027004632,3027004632
+.long	4199962284,4199962284
+.long	133494003,133494003
+.long	636152527,636152527
+.long	2942657994,2942657994
+.long	2390391540,2390391540
+.long	3920539207,3920539207
+.long	403179536,403179536
+.long	3585784431,3585784431
+.long	2289596656,2289596656
+.long	1864705354,1864705354
+.long	1915629148,1915629148
+.long	605822008,605822008
+.long	4054230615,4054230615
+.long	3350508659,3350508659
+.long	1371981463,1371981463
+.long	602466507,602466507
+.long	2094914977,2094914977
+.long	2624877800,2624877800
+.long	555687742,555687742
+.long	3712699286,3712699286
+.long	3703422305,3703422305
+.long	2257292045,2257292045
+.long	2240449039,2240449039
+.long	2423288032,2423288032
+.long	1111375484,1111375484
+.long	3300242801,3300242801
+.long	2858837708,2858837708
+.long	3628615824,3628615824
+.long	84083462,84083462
+.long	32962295,32962295
+.long	302911004,302911004
+.long	2741068226,2741068226
+.long	1597322602,1597322602
+.long	4183250862,4183250862
+.long	3501832553,3501832553
+.long	2441512471,2441512471
+.long	1489093017,1489093017
+.long	656219450,656219450
+.long	3114180135,3114180135
+.long	954327513,954327513
+.long	335083755,335083755
+.long	3013122091,3013122091
+.long	856756514,856756514
+.long	3144247762,3144247762
+.long	1893325225,1893325225
+.long	2307821063,2307821063
+.long	2811532339,2811532339
+.long	3063651117,3063651117
+.long	572399164,572399164
+.long	2458355477,2458355477
+.long	552200649,552200649
+.long	1238290055,1238290055
+.long	4283782570,4283782570
+.long	2015897680,2015897680
+.long	2061492133,2061492133
+.long	2408352771,2408352771
+.long	4171342169,4171342169
+.long	2156497161,2156497161
+.long	386731290,386731290
+.long	3669999461,3669999461
+.long	837215959,837215959
+.long	3326231172,3326231172
+.long	3093850320,3093850320
+.long	3275833730,3275833730
+.long	2962856233,2962856233
+.long	1999449434,1999449434
+.long	286199582,286199582
+.long	3417354363,3417354363
+.long	4233385128,4233385128
+.long	3602627437,3602627437
+.long	974525996,974525996
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.long	1,2,4,8
+.long	16,32,64,128
+.long	27,54,0,0
+.long	0,0,0,0
+.size	_x86_AES_encrypt,.-_x86_AES_encrypt
+.globl	AES_encrypt
+.type	AES_encrypt, at function
+.align	16
+AES_encrypt:
+.L_AES_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L004pic_point
+.L004pic_point:
+	popl	%ebp
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L004pic_point](%ebp),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	leal	.LAES_Te-.L004pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L005x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L005x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_encrypt,.-.L_AES_encrypt_begin
+.type	_x86_AES_decrypt_compact, at function
+.align	16
+_x86_AES_decrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L006loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%eax
+	subl	%edi,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ecx,%eax
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ecx,%ebx
+	roll	$8,%ecx
+	xorl	%esi,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebp,%eax
+	roll	$24,%eax
+	xorl	%ebx,%ecx
+	xorl	%ebp,%ebx
+	roll	$16,%ebx
+	xorl	%ebp,%ecx
+	roll	$8,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebx,%ecx
+	movl	4(%esp),%eax
+	xorl	%ebp,%ecx
+	movl	%ecx,12(%esp)
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%edx,%ebx
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%edx,%ecx
+	roll	$8,%edx
+	xorl	%esi,%ebp
+	xorl	%ebx,%edx
+	xorl	%ebp,%ebx
+	roll	$24,%ebx
+	xorl	%ecx,%edx
+	xorl	%ebp,%ecx
+	roll	$16,%ecx
+	xorl	%ebp,%edx
+	roll	$8,%ebp
+	xorl	%ebx,%edx
+	xorl	%ecx,%edx
+	movl	8(%esp),%ebx
+	xorl	%ebp,%edx
+	movl	%edx,16(%esp)
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%eax,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%eax,%edx
+	roll	$8,%eax
+	xorl	%esi,%ebp
+	xorl	%ecx,%eax
+	xorl	%ebp,%ecx
+	roll	$24,%ecx
+	xorl	%edx,%eax
+	xorl	%ebp,%edx
+	roll	$16,%edx
+	xorl	%ebp,%eax
+	roll	$8,%ebp
+	xorl	%ecx,%eax
+	xorl	%edx,%eax
+	xorl	%ebp,%eax
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ebx,%edx
+	roll	$8,%ebx
+	xorl	%esi,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ebp,%ecx
+	roll	$24,%ecx
+	xorl	%edx,%ebx
+	xorl	%ebp,%edx
+	roll	$16,%edx
+	xorl	%ebp,%ebx
+	roll	$8,%ebp
+	xorl	%ecx,%ebx
+	xorl	%edx,%ebx
+	movl	12(%esp),%ecx
+	xorl	%ebp,%ebx
+	movl	16(%esp),%edx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L006loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.type	_sse_AES_decrypt_compact, at function
+.align	16
+_sse_AES_decrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L007loop:
+	pshufw	$12,%mm0,%mm1
+	movd	%mm1,%eax
+	pshufw	$9,%mm4,%mm5
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movd	%mm5,%ebx
+	movzbl	%ah,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	shll	$8,%edx
+	pshufw	$6,%mm0,%mm2
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	shrl	$16,%eax
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	pshufw	$3,%mm4,%mm6
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm0
+	movzbl	%al,%esi
+	movd	%mm2,%eax
+	movzbl	-128(%ebp,%esi,1),%ecx
+	shll	$16,%ecx
+	movzbl	%bl,%esi
+	movd	%mm6,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	orl	%esi,%ecx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	orl	%esi,%edx
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%edx
+	movd	%edx,%mm1
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%edx
+	shll	$8,%edx
+	movzbl	%bh,%esi
+	shrl	$16,%eax
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	punpckldq	%mm1,%mm0
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%ebx,%edx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%edx
+	movd	%edx,%mm4
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	orl	%eax,%ecx
+	movd	%ecx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L008out
+	movq	%mm0,%mm3
+	movq	%mm4,%mm7
+	pshufw	$228,%mm0,%mm2
+	pshufw	$228,%mm4,%mm6
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pshufw	$177,%mm0,%mm0
+	pshufw	$177,%mm4,%mm4
+	pslld	$8,%mm2
+	pslld	$8,%mm6
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pslld	$16,%mm2
+	pslld	$16,%mm6
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movq	8(%esp),%mm3
+	pxor	%mm2,%mm2
+	pxor	%mm6,%mm6
+	pcmpgtb	%mm1,%mm2
+	pcmpgtb	%mm5,%mm6
+	pand	%mm3,%mm2
+	pand	%mm3,%mm6
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm2,%mm1
+	pxor	%mm6,%mm5
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	%mm1,%mm2
+	movq	%mm5,%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	psrld	$8,%mm2
+	psrld	$8,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pshufw	$177,%mm1,%mm3
+	pshufw	$177,%mm5,%mm7
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	pshufw	$177,%mm1,%mm2
+	pshufw	$177,%mm5,%mm6
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pslld	$8,%mm1
+	pslld	$8,%mm5
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	-128(%ebp),%eax
+	pslld	$16,%mm1
+	pslld	$16,%mm5
+	movl	-64(%ebp),%ebx
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	movl	(%ebp),%ecx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L007loop
+.align	16
+.L008out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.type	_x86_AES_decrypt, at function
+.align	16
+_x86_AES_decrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L009loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ah,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ch,%ecx
+	xorl	3(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	xorl	1(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L009loop
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+	leal	-128(%ebp),%ebp
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	leal	-2048(%ebp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Td:
+.long	1353184337,1353184337
+.long	1399144830,1399144830
+.long	3282310938,3282310938
+.long	2522752826,2522752826
+.long	3412831035,3412831035
+.long	4047871263,4047871263
+.long	2874735276,2874735276
+.long	2466505547,2466505547
+.long	1442459680,1442459680
+.long	4134368941,4134368941
+.long	2440481928,2440481928
+.long	625738485,625738485
+.long	4242007375,4242007375
+.long	3620416197,3620416197
+.long	2151953702,2151953702
+.long	2409849525,2409849525
+.long	1230680542,1230680542
+.long	1729870373,1729870373
+.long	2551114309,2551114309
+.long	3787521629,3787521629
+.long	41234371,41234371
+.long	317738113,317738113
+.long	2744600205,2744600205
+.long	3338261355,3338261355
+.long	3881799427,3881799427
+.long	2510066197,2510066197
+.long	3950669247,3950669247
+.long	3663286933,3663286933
+.long	763608788,763608788
+.long	3542185048,3542185048
+.long	694804553,694804553
+.long	1154009486,1154009486
+.long	1787413109,1787413109
+.long	2021232372,2021232372
+.long	1799248025,1799248025
+.long	3715217703,3715217703
+.long	3058688446,3058688446
+.long	397248752,397248752
+.long	1722556617,1722556617
+.long	3023752829,3023752829
+.long	407560035,407560035
+.long	2184256229,2184256229
+.long	1613975959,1613975959
+.long	1165972322,1165972322
+.long	3765920945,3765920945
+.long	2226023355,2226023355
+.long	480281086,480281086
+.long	2485848313,2485848313
+.long	1483229296,1483229296
+.long	436028815,436028815
+.long	2272059028,2272059028
+.long	3086515026,3086515026
+.long	601060267,601060267
+.long	3791801202,3791801202
+.long	1468997603,1468997603
+.long	715871590,715871590
+.long	120122290,120122290
+.long	63092015,63092015
+.long	2591802758,2591802758
+.long	2768779219,2768779219
+.long	4068943920,4068943920
+.long	2997206819,2997206819
+.long	3127509762,3127509762
+.long	1552029421,1552029421
+.long	723308426,723308426
+.long	2461301159,2461301159
+.long	4042393587,4042393587
+.long	2715969870,2715969870
+.long	3455375973,3455375973
+.long	3586000134,3586000134
+.long	526529745,526529745
+.long	2331944644,2331944644
+.long	2639474228,2639474228
+.long	2689987490,2689987490
+.long	853641733,853641733
+.long	1978398372,1978398372
+.long	971801355,971801355
+.long	2867814464,2867814464
+.long	111112542,111112542
+.long	1360031421,1360031421
+.long	4186579262,4186579262
+.long	1023860118,1023860118
+.long	2919579357,2919579357
+.long	1186850381,1186850381
+.long	3045938321,3045938321
+.long	90031217,90031217
+.long	1876166148,1876166148
+.long	4279586912,4279586912
+.long	620468249,620468249
+.long	2548678102,2548678102
+.long	3426959497,3426959497
+.long	2006899047,2006899047
+.long	3175278768,3175278768
+.long	2290845959,2290845959
+.long	945494503,945494503
+.long	3689859193,3689859193
+.long	1191869601,1191869601
+.long	3910091388,3910091388
+.long	3374220536,3374220536
+.long	0,0
+.long	2206629897,2206629897
+.long	1223502642,1223502642
+.long	2893025566,2893025566
+.long	1316117100,1316117100
+.long	4227796733,4227796733
+.long	1446544655,1446544655
+.long	517320253,517320253
+.long	658058550,658058550
+.long	1691946762,1691946762
+.long	564550760,564550760
+.long	3511966619,3511966619
+.long	976107044,976107044
+.long	2976320012,2976320012
+.long	266819475,266819475
+.long	3533106868,3533106868
+.long	2660342555,2660342555
+.long	1338359936,1338359936
+.long	2720062561,2720062561
+.long	1766553434,1766553434
+.long	370807324,370807324
+.long	179999714,179999714
+.long	3844776128,3844776128
+.long	1138762300,1138762300
+.long	488053522,488053522
+.long	185403662,185403662
+.long	2915535858,2915535858
+.long	3114841645,3114841645
+.long	3366526484,3366526484
+.long	2233069911,2233069911
+.long	1275557295,1275557295
+.long	3151862254,3151862254
+.long	4250959779,4250959779
+.long	2670068215,2670068215
+.long	3170202204,3170202204
+.long	3309004356,3309004356
+.long	880737115,880737115
+.long	1982415755,1982415755
+.long	3703972811,3703972811
+.long	1761406390,1761406390
+.long	1676797112,1676797112
+.long	3403428311,3403428311
+.long	277177154,277177154
+.long	1076008723,1076008723
+.long	538035844,538035844
+.long	2099530373,2099530373
+.long	4164795346,4164795346
+.long	288553390,288553390
+.long	1839278535,1839278535
+.long	1261411869,1261411869
+.long	4080055004,4080055004
+.long	3964831245,3964831245
+.long	3504587127,3504587127
+.long	1813426987,1813426987
+.long	2579067049,2579067049
+.long	4199060497,4199060497
+.long	577038663,577038663
+.long	3297574056,3297574056
+.long	440397984,440397984
+.long	3626794326,3626794326
+.long	4019204898,4019204898
+.long	3343796615,3343796615
+.long	3251714265,3251714265
+.long	4272081548,4272081548
+.long	906744984,906744984
+.long	3481400742,3481400742
+.long	685669029,685669029
+.long	646887386,646887386
+.long	2764025151,2764025151
+.long	3835509292,3835509292
+.long	227702864,227702864
+.long	2613862250,2613862250
+.long	1648787028,1648787028
+.long	3256061430,3256061430
+.long	3904428176,3904428176
+.long	1593260334,1593260334
+.long	4121936770,4121936770
+.long	3196083615,3196083615
+.long	2090061929,2090061929
+.long	2838353263,2838353263
+.long	3004310991,3004310991
+.long	999926984,999926984
+.long	2809993232,2809993232
+.long	1852021992,1852021992
+.long	2075868123,2075868123
+.long	158869197,158869197
+.long	4095236462,4095236462
+.long	28809964,28809964
+.long	2828685187,2828685187
+.long	1701746150,1701746150
+.long	2129067946,2129067946
+.long	147831841,147831841
+.long	3873969647,3873969647
+.long	3650873274,3650873274
+.long	3459673930,3459673930
+.long	3557400554,3557400554
+.long	3598495785,3598495785
+.long	2947720241,2947720241
+.long	824393514,824393514
+.long	815048134,815048134
+.long	3227951669,3227951669
+.long	935087732,935087732
+.long	2798289660,2798289660
+.long	2966458592,2966458592
+.long	366520115,366520115
+.long	1251476721,1251476721
+.long	4158319681,4158319681
+.long	240176511,240176511
+.long	804688151,804688151
+.long	2379631990,2379631990
+.long	1303441219,1303441219
+.long	1414376140,1414376140
+.long	3741619940,3741619940
+.long	3820343710,3820343710
+.long	461924940,461924940
+.long	3089050817,3089050817
+.long	2136040774,2136040774
+.long	82468509,82468509
+.long	1563790337,1563790337
+.long	1937016826,1937016826
+.long	776014843,776014843
+.long	1511876531,1511876531
+.long	1389550482,1389550482
+.long	861278441,861278441
+.long	323475053,323475053
+.long	2355222426,2355222426
+.long	2047648055,2047648055
+.long	2383738969,2383738969
+.long	2302415851,2302415851
+.long	3995576782,3995576782
+.long	902390199,902390199
+.long	3991215329,3991215329
+.long	1018251130,1018251130
+.long	1507840668,1507840668
+.long	1064563285,1064563285
+.long	2043548696,2043548696
+.long	3208103795,3208103795
+.long	3939366739,3939366739
+.long	1537932639,1537932639
+.long	342834655,342834655
+.long	2262516856,2262516856
+.long	2180231114,2180231114
+.long	1053059257,1053059257
+.long	741614648,741614648
+.long	1598071746,1598071746
+.long	1925389590,1925389590
+.long	203809468,203809468
+.long	2336832552,2336832552
+.long	1100287487,1100287487
+.long	1895934009,1895934009
+.long	3736275976,3736275976
+.long	2632234200,2632234200
+.long	2428589668,2428589668
+.long	1636092795,1636092795
+.long	1890988757,1890988757
+.long	1952214088,1952214088
+.long	1113045200,1113045200
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.size	_x86_AES_decrypt,.-_x86_AES_decrypt
+.globl	AES_decrypt
+.type	AES_decrypt, at function
+.align	16
+AES_decrypt:
+.L_AES_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L010pic_point
+.L010pic_point:
+	popl	%ebp
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L010pic_point](%ebp),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	leal	.LAES_Td-.L010pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L011x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L011x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_decrypt,.-.L_AES_decrypt_begin
+.globl	AES_cbc_encrypt
+.type	AES_cbc_encrypt, at function
+.align	16
+AES_cbc_encrypt:
+.L_AES_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	.L012drop_out
+	call	.L013pic_point
+.L013pic_point:
+	popl	%ebp
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L013pic_point](%ebp),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	cmpl	$0,40(%esp)
+	leal	.LAES_Te-.L013pic_point(%ebp),%ebp
+	jne	.L014picked_te
+	leal	.LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+	pushfl
+	cld
+	cmpl	$512,%ecx
+	jb	.L015slow_way
+	testl	$15,%ecx
+	jnz	.L015slow_way
+	btl	$28,(%eax)
+	jc	.L015slow_way
+	leal	-324(%esp),%esi
+	andl	$-64,%esi
+	movl	%ebp,%eax
+	leal	2304(%ebp),%ebx
+	movl	%esi,%edx
+	andl	$4095,%eax
+	andl	$4095,%ebx
+	andl	$4095,%edx
+	cmpl	%ebx,%edx
+	jb	.L016tbl_break_out
+	subl	%ebx,%edx
+	subl	%edx,%esi
+	jmp	.L017tbl_ok
+.align	4
+.L016tbl_break_out:
+	subl	%eax,%edx
+	andl	$4095,%edx
+	addl	$384,%edx
+	subl	%edx,%esi
+.align	4
+.L017tbl_ok:
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	12(%edx),%edi
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	$0,316(%esp)
+	movl	%edi,%ebx
+	movl	$61,%ecx
+	subl	%ebp,%ebx
+	movl	%edi,%esi
+	andl	$4095,%ebx
+	leal	76(%esp),%edi
+	cmpl	$2304,%ebx
+	jb	.L018do_copy
+	cmpl	$3852,%ebx
+	jb	.L019skip_copy
+.align	4
+.L018do_copy:
+	movl	%edi,44(%esp)
+.long	2784229001
+.L019skip_copy:
+	movl	$16,%edi
+.align	4
+.L020prefetch_tbl:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%esi
+	leal	128(%ebp),%ebp
+	subl	$1,%edi
+	jnz	.L020prefetch_tbl
+	subl	$2048,%ebp
+	movl	32(%esp),%esi
+	movl	48(%esp),%edi
+	cmpl	$0,%edx
+	je	.L021fast_decrypt
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	16
+.L022fast_enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%esi
+	movl	40(%esp),%ecx
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L022fast_enc_loop
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L023skip_ezero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L023skip_ezero:
+	movl	28(%esp),%esp
+	popfl
+.L012drop_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L021fast_decrypt:
+	cmpl	36(%esp),%esi
+	je	.L024fast_dec_in_place
+	movl	%edi,52(%esp)
+.align	4
+.align	16
+.L025fast_dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	52(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	36(%esp),%edi
+	movl	32(%esp),%esi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	%esi,52(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L025fast_dec_loop
+	movl	52(%esp),%edi
+	movl	48(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	.L026fast_dec_out
+.align	16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	48(%esp),%edi
+	movl	36(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L027fast_dec_in_place_loop
+.align	4
+.L026fast_dec_out:
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L028skip_dzero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L028skip_dzero:
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L015slow_way:
+	movl	(%eax),%eax
+	movl	36(%esp),%edi
+	leal	-80(%esp),%esi
+	andl	$-64,%esi
+	leal	-143(%edi),%ebx
+	subl	%esi,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esi
+	leal	768(%esi),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	%eax,52(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	%esi,%edi
+	movl	%eax,%esi
+	cmpl	$0,%edx
+	je	.L029slow_decrypt
+	cmpl	$16,%ecx
+	movl	%ebx,%edx
+	jb	.L030slow_enc_tail
+	btl	$25,52(%esp)
+	jnc	.L031slow_enc_x86
+	movq	(%edi),%mm0
+	movq	8(%edi),%mm4
+.align	16
+.L032slow_enc_loop_sse:
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	40(%esp),%ecx
+	movq	%mm0,(%edi)
+	movq	%mm4,8(%edi)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L032slow_enc_loop_sse
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L031slow_enc_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4
+.L033slow_enc_loop_x86:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L033slow_enc_loop_x86
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L030slow_enc_tail:
+	emms
+	movl	%edx,%edi
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	.L034enc_in_place
+.align	4
+.long	2767451785
+	jmp	.L035enc_skip_in_place
+.L034enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2868115081
+	movl	48(%esp),%edi
+	movl	%edx,%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,40(%esp)
+	jmp	.L033slow_enc_loop_x86
+.align	16
+.L029slow_decrypt:
+	btl	$25,52(%esp)
+	jnc	.L036slow_dec_loop_x86
+.align	4
+.L037slow_dec_loop_sse:
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_decrypt_compact
+	movl	32(%esp),%esi
+	leal	60(%esp),%eax
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	48(%esp),%edi
+	movq	(%esi),%mm1
+	movq	8(%esi),%mm5
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movq	%mm1,(%edi)
+	movq	%mm5,8(%edi)
+	subl	$16,%ecx
+	jc	.L038slow_dec_partial_sse
+	movq	%mm0,(%ebx)
+	movq	%mm4,8(%ebx)
+	leal	16(%ebx),%ebx
+	movl	%ebx,36(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	movl	%ecx,40(%esp)
+	jnz	.L037slow_dec_loop_sse
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L038slow_dec_partial_sse:
+	movq	%mm0,(%eax)
+	movq	%mm4,8(%eax)
+	emms
+	addl	$16,%ecx
+	movl	%ebx,%edi
+	movl	%eax,%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L036slow_dec_loop_x86:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt_compact
+	movl	48(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	.L039slow_dec_partial_x86
+	movl	%esi,40(%esp)
+	movl	36(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	jnz	.L036slow_dec_loop_x86
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L039slow_dec_partial_x86:
+	leal	60(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	36(%esp),%edi
+	leal	60(%esp),%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
+.type	_x86_AES_set_encrypt_key, at function
+.align	16
+_x86_AES_set_encrypt_key:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%esi
+	movl	32(%esp),%edi
+	testl	$-1,%esi
+	jz	.L040badpointer
+	testl	$-1,%edi
+	jz	.L040badpointer
+	call	.L041pic_point
+.L041pic_point:
+	popl	%ebp
+	leal	.LAES_Te-.L041pic_point(%ebp),%ebp
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	movl	28(%esp),%ecx
+	cmpl	$128,%ecx
+	je	.L04210rounds
+	cmpl	$192,%ecx
+	je	.L04312rounds
+	cmpl	$256,%ecx
+	je	.L04414rounds
+	movl	$-2,%eax
+	jmp	.L045exit
+.L04210rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04610shortcut
+.align	4
+.L04710loop:
+	movl	(%edi),%eax
+	movl	12(%edi),%edx
+.L04610shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,16(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,20(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,24(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,28(%edi)
+	incl	%ecx
+	addl	$16,%edi
+	cmpl	$10,%ecx
+	jl	.L04710loop
+	movl	$10,80(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04312rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%ecx
+	movl	20(%esi),%edx
+	movl	%ecx,16(%edi)
+	movl	%edx,20(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04812shortcut
+.align	4
+.L04912loop:
+	movl	(%edi),%eax
+	movl	20(%edi),%edx
+.L04812shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,24(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,28(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,32(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,36(%edi)
+	cmpl	$7,%ecx
+	je	.L05012break
+	incl	%ecx
+	xorl	16(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,44(%edi)
+	addl	$24,%edi
+	jmp	.L04912loop
+.L05012break:
+	movl	$12,72(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04414rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	movl	%eax,16(%edi)
+	movl	%ebx,20(%edi)
+	movl	%ecx,24(%edi)
+	movl	%edx,28(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L05114shortcut
+.align	4
+.L05214loop:
+	movl	28(%edi),%edx
+.L05114shortcut:
+	movl	(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,32(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,36(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,44(%edi)
+	cmpl	$6,%ecx
+	je	.L05314break
+	incl	%ecx
+	movl	%eax,%edx
+	movl	16(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movl	%eax,48(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,52(%edi)
+	xorl	24(%edi),%eax
+	movl	%eax,56(%edi)
+	xorl	28(%edi),%eax
+	movl	%eax,60(%edi)
+	addl	$32,%edi
+	jmp	.L05214loop
+.L05314break:
+	movl	$14,48(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L040badpointer:
+	movl	$-1,%eax
+.L045exit:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl	private_AES_set_encrypt_key
+.type	private_AES_set_encrypt_key, at function
+.align	16
+private_AES_set_encrypt_key:
+.L_private_AES_set_encrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	ret
+.size	private_AES_set_encrypt_key,.-.L_private_AES_set_encrypt_key_begin
+.globl	private_AES_set_decrypt_key
+.type	private_AES_set_decrypt_key, at function
+.align	16
+private_AES_set_decrypt_key:
+.L_private_AES_set_decrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	cmpl	$0,%eax
+	je	.L054proceed
+	ret
+.L054proceed:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%esi
+	movl	240(%esi),%ecx
+	leal	(,%ecx,4),%ecx
+	leal	(%esi,%ecx,4),%edi
+.align	4
+.L055invert:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,(%esi)
+	movl	%edx,4(%esi)
+	movl	8(%esi),%eax
+	movl	12(%esi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,8(%edi)
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	addl	$16,%esi
+	subl	$16,%edi
+	cmpl	%edi,%esi
+	jne	.L055invert
+	movl	28(%esp),%edi
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,28(%esp)
+	movl	16(%edi),%eax
+.align	4
+.L056permute:
+	addl	$16,%edi
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%eax,%ebx
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	xorl	%eax,%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	roll	$8,%eax
+	xorl	%esi,%edx
+	movl	4(%edi),%ebp
+	xorl	%ebx,%eax
+	xorl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$24,%ebx
+	xorl	%edx,%ecx
+	xorl	%edx,%eax
+	roll	$16,%ecx
+	xorl	%ebx,%eax
+	roll	$8,%edx
+	xorl	%ecx,%eax
+	movl	%ebp,%ebx
+	xorl	%edx,%eax
+	movl	%eax,(%edi)
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	xorl	%ebx,%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	roll	$8,%ebx
+	xorl	%esi,%eax
+	movl	8(%edi),%ebp
+	xorl	%ecx,%ebx
+	xorl	%eax,%ecx
+	xorl	%edx,%ebx
+	roll	$24,%ecx
+	xorl	%eax,%edx
+	xorl	%eax,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%eax
+	xorl	%edx,%ebx
+	movl	%ebp,%ecx
+	xorl	%eax,%ebx
+	movl	%ebx,4(%edi)
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%ecx,%edx
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	xorl	%ecx,%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	roll	$8,%ecx
+	xorl	%esi,%ebx
+	movl	12(%edi),%ebp
+	xorl	%edx,%ecx
+	xorl	%ebx,%edx
+	xorl	%eax,%ecx
+	roll	$24,%edx
+	xorl	%ebx,%eax
+	xorl	%ebx,%ecx
+	roll	$16,%eax
+	xorl	%edx,%ecx
+	roll	$8,%ebx
+	xorl	%eax,%ecx
+	movl	%ebp,%edx
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%edi)
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%edx,%eax
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	xorl	%edx,%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	roll	$8,%edx
+	xorl	%esi,%ecx
+	movl	16(%edi),%ebp
+	xorl	%eax,%edx
+	xorl	%ecx,%eax
+	xorl	%ebx,%edx
+	roll	$24,%eax
+	xorl	%ecx,%ebx
+	xorl	%ecx,%edx
+	roll	$16,%ebx
+	xorl	%eax,%edx
+	roll	$8,%ecx
+	xorl	%ebx,%edx
+	movl	%ebp,%eax
+	xorl	%ecx,%edx
+	movl	%edx,12(%edi)
+	cmpl	28(%esp),%edi
+	jb	.L056permute
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	private_AES_set_decrypt_key,.-.L_private_AES_set_decrypt_key_begin
+.byte	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"aes-586.S"
+.text
+.type	_x86_AES_encrypt_compact, at function
+.align	16
+_x86_AES_encrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L000loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%ecx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%ecx
+	roll	$24,%ecx
+	xorl	%esi,%ecx
+	rorl	$16,%ebp
+	xorl	%ebp,%ecx
+	rorl	$8,%ebp
+	xorl	%ebp,%ecx
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%edx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%edx
+	roll	$24,%edx
+	xorl	%esi,%edx
+	rorl	$16,%ebp
+	xorl	%ebp,%edx
+	rorl	$8,%ebp
+	xorl	%ebp,%edx
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%eax,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%eax
+	roll	$24,%eax
+	xorl	%esi,%eax
+	rorl	$16,%ebp
+	xorl	%ebp,%eax
+	rorl	$8,%ebp
+	xorl	%ebp,%eax
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%edi
+	subl	%ebp,%esi
+	andl	$4278124286,%edi
+	andl	$454761243,%esi
+	movl	%ebx,%ebp
+	xorl	%edi,%esi
+	xorl	%esi,%ebx
+	roll	$24,%ebx
+	xorl	%esi,%ebx
+	rorl	$16,%ebp
+	xorl	%ebp,%ebx
+	rorl	$8,%ebp
+	xorl	%ebp,%ebx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L000loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ch,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$8,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$24,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type	_sse_AES_encrypt_compact, at function
+.align	16
+_sse_AES_encrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L001loop:
+	pshufw	$8,%mm0,%mm1
+	pshufw	$13,%mm4,%mm5
+	movd	%mm1,%eax
+	movd	%mm5,%ebx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	pshufw	$13,%mm0,%mm2
+	movzbl	%ah,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	shll	$8,%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	pshufw	$8,%mm4,%mm6
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm0
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movd	%mm2,%eax
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	movd	%mm6,%ebx
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm1
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	shrl	$16,%eax
+	punpckldq	%mm1,%mm0
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	andl	$255,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$16,%eax
+	orl	%eax,%edx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm4
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%ebx,%edx
+	movd	%edx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L002out
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pcmpgtb	%mm0,%mm3
+	pcmpgtb	%mm4,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	pshufw	$177,%mm0,%mm2
+	pshufw	$177,%mm4,%mm6
+	paddb	%mm0,%mm0
+	paddb	%mm4,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pshufw	$177,%mm2,%mm3
+	pshufw	$177,%mm6,%mm7
+	pxor	%mm0,%mm1
+	pxor	%mm4,%mm5
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm3,%mm2
+	movq	%mm7,%mm6
+	pslld	$8,%mm3
+	pslld	$8,%mm7
+	psrld	$24,%mm2
+	psrld	$24,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	psrld	$8,%mm1
+	psrld	$8,%mm5
+	movl	-128(%ebp),%eax
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	movl	-64(%ebp),%ebx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	(%ebp),%ecx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L001loop
+.align	16
+.L002out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.type	_x86_AES_encrypt, at function
+.align	16
+_x86_AES_encrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L003loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ch,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movzbl	%bh,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ah,%eax
+	xorl	3(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	xorl	1(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L003loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%bh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	shrl	$16,%ebx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%ch,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$24,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	shrl	$24,%ecx
+	movl	2(%ebp,%esi,8),%esi
+	andl	$255,%esi
+	movzbl	%dh,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$65280,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edx
+	andl	$255,%edi
+	movl	(%ebp,%edi,8),%edi
+	andl	$16711680,%edi
+	xorl	%edi,%esi
+	movzbl	%bh,%edi
+	movl	2(%ebp,%edi,8),%edi
+	andl	$4278190080,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	2(%ebp,%edx,8),%edx
+	andl	$255,%edx
+	movzbl	%ah,%eax
+	movl	(%ebp,%eax,8),%eax
+	andl	$65280,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	andl	$255,%ebx
+	movl	(%ebp,%ebx,8),%ebx
+	andl	$16711680,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	movl	2(%ebp,%ecx,8),%ecx
+	andl	$4278190080,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Te:
+.long	2774754246,2774754246
+.long	2222750968,2222750968
+.long	2574743534,2574743534
+.long	2373680118,2373680118
+.long	234025727,234025727
+.long	3177933782,3177933782
+.long	2976870366,2976870366
+.long	1422247313,1422247313
+.long	1345335392,1345335392
+.long	50397442,50397442
+.long	2842126286,2842126286
+.long	2099981142,2099981142
+.long	436141799,436141799
+.long	1658312629,1658312629
+.long	3870010189,3870010189
+.long	2591454956,2591454956
+.long	1170918031,1170918031
+.long	2642575903,2642575903
+.long	1086966153,1086966153
+.long	2273148410,2273148410
+.long	368769775,368769775
+.long	3948501426,3948501426
+.long	3376891790,3376891790
+.long	200339707,200339707
+.long	3970805057,3970805057
+.long	1742001331,1742001331
+.long	4255294047,4255294047
+.long	3937382213,3937382213
+.long	3214711843,3214711843
+.long	4154762323,4154762323
+.long	2524082916,2524082916
+.long	1539358875,1539358875
+.long	3266819957,3266819957
+.long	486407649,486407649
+.long	2928907069,2928907069
+.long	1780885068,1780885068
+.long	1513502316,1513502316
+.long	1094664062,1094664062
+.long	49805301,49805301
+.long	1338821763,1338821763
+.long	1546925160,1546925160
+.long	4104496465,4104496465
+.long	887481809,887481809
+.long	150073849,150073849
+.long	2473685474,2473685474
+.long	1943591083,1943591083
+.long	1395732834,1395732834
+.long	1058346282,1058346282
+.long	201589768,201589768
+.long	1388824469,1388824469
+.long	1696801606,1696801606
+.long	1589887901,1589887901
+.long	672667696,672667696
+.long	2711000631,2711000631
+.long	251987210,251987210
+.long	3046808111,3046808111
+.long	151455502,151455502
+.long	907153956,907153956
+.long	2608889883,2608889883
+.long	1038279391,1038279391
+.long	652995533,652995533
+.long	1764173646,1764173646
+.long	3451040383,3451040383
+.long	2675275242,2675275242
+.long	453576978,453576978
+.long	2659418909,2659418909
+.long	1949051992,1949051992
+.long	773462580,773462580
+.long	756751158,756751158
+.long	2993581788,2993581788
+.long	3998898868,3998898868
+.long	4221608027,4221608027
+.long	4132590244,4132590244
+.long	1295727478,1295727478
+.long	1641469623,1641469623
+.long	3467883389,3467883389
+.long	2066295122,2066295122
+.long	1055122397,1055122397
+.long	1898917726,1898917726
+.long	2542044179,2542044179
+.long	4115878822,4115878822
+.long	1758581177,1758581177
+.long	0,0
+.long	753790401,753790401
+.long	1612718144,1612718144
+.long	536673507,536673507
+.long	3367088505,3367088505
+.long	3982187446,3982187446
+.long	3194645204,3194645204
+.long	1187761037,1187761037
+.long	3653156455,3653156455
+.long	1262041458,1262041458
+.long	3729410708,3729410708
+.long	3561770136,3561770136
+.long	3898103984,3898103984
+.long	1255133061,1255133061
+.long	1808847035,1808847035
+.long	720367557,720367557
+.long	3853167183,3853167183
+.long	385612781,385612781
+.long	3309519750,3309519750
+.long	3612167578,3612167578
+.long	1429418854,1429418854
+.long	2491778321,2491778321
+.long	3477423498,3477423498
+.long	284817897,284817897
+.long	100794884,100794884
+.long	2172616702,2172616702
+.long	4031795360,4031795360
+.long	1144798328,1144798328
+.long	3131023141,3131023141
+.long	3819481163,3819481163
+.long	4082192802,4082192802
+.long	4272137053,4272137053
+.long	3225436288,3225436288
+.long	2324664069,2324664069
+.long	2912064063,2912064063
+.long	3164445985,3164445985
+.long	1211644016,1211644016
+.long	83228145,83228145
+.long	3753688163,3753688163
+.long	3249976951,3249976951
+.long	1977277103,1977277103
+.long	1663115586,1663115586
+.long	806359072,806359072
+.long	452984805,452984805
+.long	250868733,250868733
+.long	1842533055,1842533055
+.long	1288555905,1288555905
+.long	336333848,336333848
+.long	890442534,890442534
+.long	804056259,804056259
+.long	3781124030,3781124030
+.long	2727843637,2727843637
+.long	3427026056,3427026056
+.long	957814574,957814574
+.long	1472513171,1472513171
+.long	4071073621,4071073621
+.long	2189328124,2189328124
+.long	1195195770,1195195770
+.long	2892260552,2892260552
+.long	3881655738,3881655738
+.long	723065138,723065138
+.long	2507371494,2507371494
+.long	2690670784,2690670784
+.long	2558624025,2558624025
+.long	3511635870,3511635870
+.long	2145180835,2145180835
+.long	1713513028,1713513028
+.long	2116692564,2116692564
+.long	2878378043,2878378043
+.long	2206763019,2206763019
+.long	3393603212,3393603212
+.long	703524551,703524551
+.long	3552098411,3552098411
+.long	1007948840,1007948840
+.long	2044649127,2044649127
+.long	3797835452,3797835452
+.long	487262998,487262998
+.long	1994120109,1994120109
+.long	1004593371,1004593371
+.long	1446130276,1446130276
+.long	1312438900,1312438900
+.long	503974420,503974420
+.long	3679013266,3679013266
+.long	168166924,168166924
+.long	1814307912,1814307912
+.long	3831258296,3831258296
+.long	1573044895,1573044895
+.long	1859376061,1859376061
+.long	4021070915,4021070915
+.long	2791465668,2791465668
+.long	2828112185,2828112185
+.long	2761266481,2761266481
+.long	937747667,937747667
+.long	2339994098,2339994098
+.long	854058965,854058965
+.long	1137232011,1137232011
+.long	1496790894,1496790894
+.long	3077402074,3077402074
+.long	2358086913,2358086913
+.long	1691735473,1691735473
+.long	3528347292,3528347292
+.long	3769215305,3769215305
+.long	3027004632,3027004632
+.long	4199962284,4199962284
+.long	133494003,133494003
+.long	636152527,636152527
+.long	2942657994,2942657994
+.long	2390391540,2390391540
+.long	3920539207,3920539207
+.long	403179536,403179536
+.long	3585784431,3585784431
+.long	2289596656,2289596656
+.long	1864705354,1864705354
+.long	1915629148,1915629148
+.long	605822008,605822008
+.long	4054230615,4054230615
+.long	3350508659,3350508659
+.long	1371981463,1371981463
+.long	602466507,602466507
+.long	2094914977,2094914977
+.long	2624877800,2624877800
+.long	555687742,555687742
+.long	3712699286,3712699286
+.long	3703422305,3703422305
+.long	2257292045,2257292045
+.long	2240449039,2240449039
+.long	2423288032,2423288032
+.long	1111375484,1111375484
+.long	3300242801,3300242801
+.long	2858837708,2858837708
+.long	3628615824,3628615824
+.long	84083462,84083462
+.long	32962295,32962295
+.long	302911004,302911004
+.long	2741068226,2741068226
+.long	1597322602,1597322602
+.long	4183250862,4183250862
+.long	3501832553,3501832553
+.long	2441512471,2441512471
+.long	1489093017,1489093017
+.long	656219450,656219450
+.long	3114180135,3114180135
+.long	954327513,954327513
+.long	335083755,335083755
+.long	3013122091,3013122091
+.long	856756514,856756514
+.long	3144247762,3144247762
+.long	1893325225,1893325225
+.long	2307821063,2307821063
+.long	2811532339,2811532339
+.long	3063651117,3063651117
+.long	572399164,572399164
+.long	2458355477,2458355477
+.long	552200649,552200649
+.long	1238290055,1238290055
+.long	4283782570,4283782570
+.long	2015897680,2015897680
+.long	2061492133,2061492133
+.long	2408352771,2408352771
+.long	4171342169,4171342169
+.long	2156497161,2156497161
+.long	386731290,386731290
+.long	3669999461,3669999461
+.long	837215959,837215959
+.long	3326231172,3326231172
+.long	3093850320,3093850320
+.long	3275833730,3275833730
+.long	2962856233,2962856233
+.long	1999449434,1999449434
+.long	286199582,286199582
+.long	3417354363,3417354363
+.long	4233385128,4233385128
+.long	3602627437,3602627437
+.long	974525996,974525996
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.byte	99,124,119,123,242,107,111,197
+.byte	48,1,103,43,254,215,171,118
+.byte	202,130,201,125,250,89,71,240
+.byte	173,212,162,175,156,164,114,192
+.byte	183,253,147,38,54,63,247,204
+.byte	52,165,229,241,113,216,49,21
+.byte	4,199,35,195,24,150,5,154
+.byte	7,18,128,226,235,39,178,117
+.byte	9,131,44,26,27,110,90,160
+.byte	82,59,214,179,41,227,47,132
+.byte	83,209,0,237,32,252,177,91
+.byte	106,203,190,57,74,76,88,207
+.byte	208,239,170,251,67,77,51,133
+.byte	69,249,2,127,80,60,159,168
+.byte	81,163,64,143,146,157,56,245
+.byte	188,182,218,33,16,255,243,210
+.byte	205,12,19,236,95,151,68,23
+.byte	196,167,126,61,100,93,25,115
+.byte	96,129,79,220,34,42,144,136
+.byte	70,238,184,20,222,94,11,219
+.byte	224,50,58,10,73,6,36,92
+.byte	194,211,172,98,145,149,228,121
+.byte	231,200,55,109,141,213,78,169
+.byte	108,86,244,234,101,122,174,8
+.byte	186,120,37,46,28,166,180,198
+.byte	232,221,116,31,75,189,139,138
+.byte	112,62,181,102,72,3,246,14
+.byte	97,53,87,185,134,193,29,158
+.byte	225,248,152,17,105,217,142,148
+.byte	155,30,135,233,206,85,40,223
+.byte	140,161,137,13,191,230,66,104
+.byte	65,153,45,15,176,84,187,22
+.long	1,2,4,8
+.long	16,32,64,128
+.long	27,54,0,0
+.long	0,0,0,0
+.size	_x86_AES_encrypt,.-_x86_AES_encrypt
+.globl	AES_encrypt
+.type	AES_encrypt, at function
+.align	16
+AES_encrypt:
+.L_AES_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L004pic_point
+.L004pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P,%eax
+	leal	.LAES_Te-.L004pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L005x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L005x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_encrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_encrypt,.-.L_AES_encrypt_begin
+.type	_x86_AES_decrypt_compact, at function
+.align	16
+_x86_AES_decrypt_compact:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+.align	16
+.L006loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%eax
+	subl	%edi,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ecx,%eax
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ecx,%ebx
+	roll	$8,%ecx
+	xorl	%esi,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebp,%eax
+	roll	$24,%eax
+	xorl	%ebx,%ecx
+	xorl	%ebp,%ebx
+	roll	$16,%ebx
+	xorl	%ebp,%ecx
+	roll	$8,%ebp
+	xorl	%eax,%ecx
+	xorl	%ebx,%ecx
+	movl	4(%esp),%eax
+	xorl	%ebp,%ecx
+	movl	%ecx,12(%esp)
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebx
+	subl	%edi,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%edx,%ebx
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%edx,%ecx
+	roll	$8,%edx
+	xorl	%esi,%ebp
+	xorl	%ebx,%edx
+	xorl	%ebp,%ebx
+	roll	$24,%ebx
+	xorl	%ecx,%edx
+	xorl	%ebp,%ecx
+	roll	$16,%ecx
+	xorl	%ebp,%edx
+	roll	$8,%ebp
+	xorl	%ebx,%edx
+	xorl	%ecx,%edx
+	movl	8(%esp),%ebx
+	xorl	%ebp,%edx
+	movl	%edx,16(%esp)
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%eax,%eax,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%eax,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%eax,%edx
+	roll	$8,%eax
+	xorl	%esi,%ebp
+	xorl	%ecx,%eax
+	xorl	%ebp,%ecx
+	roll	$24,%ecx
+	xorl	%edx,%eax
+	xorl	%ebp,%edx
+	roll	$16,%edx
+	xorl	%ebp,%eax
+	roll	$8,%ebp
+	xorl	%ecx,%eax
+	xorl	%edx,%eax
+	xorl	%ebp,%eax
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%edi,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%ecx,%ecx,1),%edx
+	subl	%edi,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%edi
+	shrl	$7,%edi
+	leal	(%edx,%edx,1),%ebp
+	subl	%edi,%esi
+	andl	$4278124286,%ebp
+	andl	$454761243,%esi
+	xorl	%ebx,%edx
+	roll	$8,%ebx
+	xorl	%esi,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ebp,%ecx
+	roll	$24,%ecx
+	xorl	%edx,%ebx
+	xorl	%ebp,%edx
+	roll	$16,%edx
+	xorl	%ebp,%ebx
+	roll	$8,%ebp
+	xorl	%ecx,%ebx
+	xorl	%edx,%ebx
+	movl	12(%esp),%ecx
+	xorl	%ebp,%ebx
+	movl	16(%esp),%edx
+	movl	20(%esp),%edi
+	movl	28(%esp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L006loop
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	-128(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	-128(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	xorl	16(%edi),%eax
+	xorl	20(%edi),%ebx
+	xorl	24(%edi),%ecx
+	xorl	28(%edi),%edx
+	ret
+.size	_x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.type	_sse_AES_decrypt_compact, at function
+.align	16
+_sse_AES_decrypt_compact:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+	movl	$454761243,%eax
+	movl	%eax,8(%esp)
+	movl	%eax,12(%esp)
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+.align	16
+.L007loop:
+	pshufw	$12,%mm0,%mm1
+	movd	%mm1,%eax
+	pshufw	$9,%mm4,%mm5
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%ecx
+	movd	%mm5,%ebx
+	movzbl	%ah,%edx
+	movzbl	-128(%ebp,%edx,1),%edx
+	shll	$8,%edx
+	pshufw	$6,%mm0,%mm2
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%ecx
+	shrl	$16,%eax
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	pshufw	$3,%mm4,%mm6
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%ecx
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	movd	%ecx,%mm0
+	movzbl	%al,%esi
+	movd	%mm2,%eax
+	movzbl	-128(%ebp,%esi,1),%ecx
+	shll	$16,%ecx
+	movzbl	%bl,%esi
+	movd	%mm6,%ebx
+	movzbl	-128(%ebp,%esi,1),%esi
+	orl	%esi,%ecx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	orl	%esi,%edx
+	movzbl	%bl,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%edx
+	movd	%edx,%mm1
+	movzbl	%ah,%esi
+	movzbl	-128(%ebp,%esi,1),%edx
+	shll	$8,%edx
+	movzbl	%bh,%esi
+	shrl	$16,%eax
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$24,%esi
+	orl	%esi,%edx
+	shrl	$16,%ebx
+	punpckldq	%mm1,%mm0
+	movzbl	%bh,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$8,%esi
+	orl	%esi,%ecx
+	andl	$255,%ebx
+	movzbl	-128(%ebp,%ebx,1),%ebx
+	orl	%ebx,%edx
+	movzbl	%al,%esi
+	movzbl	-128(%ebp,%esi,1),%esi
+	shll	$16,%esi
+	orl	%esi,%edx
+	movd	%edx,%mm4
+	movzbl	%ah,%eax
+	movzbl	-128(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	orl	%eax,%ecx
+	movd	%ecx,%mm5
+	punpckldq	%mm5,%mm4
+	addl	$16,%edi
+	cmpl	24(%esp),%edi
+	ja	.L008out
+	movq	%mm0,%mm3
+	movq	%mm4,%mm7
+	pshufw	$228,%mm0,%mm2
+	pshufw	$228,%mm4,%mm6
+	movq	%mm0,%mm1
+	movq	%mm4,%mm5
+	pshufw	$177,%mm0,%mm0
+	pshufw	$177,%mm4,%mm4
+	pslld	$8,%mm2
+	pslld	$8,%mm6
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pslld	$16,%mm2
+	pslld	$16,%mm6
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movq	8(%esp),%mm3
+	pxor	%mm2,%mm2
+	pxor	%mm6,%mm6
+	pcmpgtb	%mm1,%mm2
+	pcmpgtb	%mm5,%mm6
+	pand	%mm3,%mm2
+	pand	%mm3,%mm6
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm2,%mm1
+	pxor	%mm6,%mm5
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	movq	%mm1,%mm2
+	movq	%mm5,%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pslld	$24,%mm3
+	pslld	$24,%mm7
+	psrld	$8,%mm2
+	psrld	$8,%mm6
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	movq	8(%esp),%mm2
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pshufw	$177,%mm1,%mm3
+	pshufw	$177,%mm5,%mm7
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm3,%mm3
+	pxor	%mm7,%mm7
+	pcmpgtb	%mm1,%mm3
+	pcmpgtb	%mm5,%mm7
+	pand	%mm2,%mm3
+	pand	%mm2,%mm7
+	paddb	%mm1,%mm1
+	paddb	%mm5,%mm5
+	pxor	%mm3,%mm1
+	pxor	%mm7,%mm5
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movq	%mm1,%mm3
+	movq	%mm5,%mm7
+	pshufw	$177,%mm1,%mm2
+	pshufw	$177,%mm5,%mm6
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	pslld	$8,%mm1
+	pslld	$8,%mm5
+	psrld	$8,%mm3
+	psrld	$8,%mm7
+	movq	(%edi),%mm2
+	movq	8(%edi),%mm6
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	movl	-128(%ebp),%eax
+	pslld	$16,%mm1
+	pslld	$16,%mm5
+	movl	-64(%ebp),%ebx
+	psrld	$16,%mm3
+	psrld	$16,%mm7
+	movl	(%ebp),%ecx
+	pxor	%mm1,%mm0
+	pxor	%mm5,%mm4
+	movl	64(%ebp),%edx
+	pxor	%mm3,%mm0
+	pxor	%mm7,%mm4
+	pxor	%mm2,%mm0
+	pxor	%mm6,%mm4
+	jmp	.L007loop
+.align	16
+.L008out:
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	ret
+.size	_sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.type	_x86_AES_decrypt, at function
+.align	16
+_x86_AES_decrypt:
+	movl	%edi,20(%esp)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,24(%esp)
+.align	16
+.L009loop:
+	movl	%eax,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%dh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,4(%esp)
+
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%ah,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+	movl	%esi,8(%esp)
+
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movl	(%ebp,%esi,8),%esi
+	movzbl	%bh,%edi
+	xorl	3(%ebp,%edi,8),%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	xorl	2(%ebp,%edi,8),%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	xorl	1(%ebp,%edi,8),%esi
+
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movl	(%ebp,%edx,8),%edx
+	movzbl	%ch,%ecx
+	xorl	3(%ebp,%ecx,8),%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	xorl	2(%ebp,%ebx,8),%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	xorl	1(%ebp,%eax,8),%edx
+	movl	4(%esp),%eax
+
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	cmpl	24(%esp),%edi
+	movl	%edi,20(%esp)
+	jb	.L009loop
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%edi
+	movl	-96(%ebp),%esi
+	movl	-64(%ebp),%edi
+	movl	-32(%ebp),%esi
+	movl	(%ebp),%edi
+	movl	32(%ebp),%esi
+	movl	64(%ebp),%edi
+	movl	96(%ebp),%esi
+	leal	-128(%ebp),%ebp
+	movl	%eax,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%dh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ebx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,4(%esp)
+	movl	%ebx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%ah,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%ecx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	%esi,8(%esp)
+	movl	%ecx,%esi
+	andl	$255,%esi
+	movzbl	(%ebp,%esi,1),%esi
+	movzbl	%bh,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$8,%edi
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	shrl	$16,%edi
+	andl	$255,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$16,%edi
+	xorl	%edi,%esi
+	movl	%edx,%edi
+	shrl	$24,%edi
+	movzbl	(%ebp,%edi,1),%edi
+	shll	$24,%edi
+	xorl	%edi,%esi
+	movl	20(%esp),%edi
+	andl	$255,%edx
+	movzbl	(%ebp,%edx,1),%edx
+	movzbl	%ch,%ecx
+	movzbl	(%ebp,%ecx,1),%ecx
+	shll	$8,%ecx
+	xorl	%ecx,%edx
+	movl	%esi,%ecx
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	movzbl	(%ebp,%ebx,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%edx
+	movl	8(%esp),%ebx
+	shrl	$24,%eax
+	movzbl	(%ebp,%eax,1),%eax
+	shll	$24,%eax
+	xorl	%eax,%edx
+	movl	4(%esp),%eax
+	leal	-2048(%ebp),%ebp
+	addl	$16,%edi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.align	64
+.LAES_Td:
+.long	1353184337,1353184337
+.long	1399144830,1399144830
+.long	3282310938,3282310938
+.long	2522752826,2522752826
+.long	3412831035,3412831035
+.long	4047871263,4047871263
+.long	2874735276,2874735276
+.long	2466505547,2466505547
+.long	1442459680,1442459680
+.long	4134368941,4134368941
+.long	2440481928,2440481928
+.long	625738485,625738485
+.long	4242007375,4242007375
+.long	3620416197,3620416197
+.long	2151953702,2151953702
+.long	2409849525,2409849525
+.long	1230680542,1230680542
+.long	1729870373,1729870373
+.long	2551114309,2551114309
+.long	3787521629,3787521629
+.long	41234371,41234371
+.long	317738113,317738113
+.long	2744600205,2744600205
+.long	3338261355,3338261355
+.long	3881799427,3881799427
+.long	2510066197,2510066197
+.long	3950669247,3950669247
+.long	3663286933,3663286933
+.long	763608788,763608788
+.long	3542185048,3542185048
+.long	694804553,694804553
+.long	1154009486,1154009486
+.long	1787413109,1787413109
+.long	2021232372,2021232372
+.long	1799248025,1799248025
+.long	3715217703,3715217703
+.long	3058688446,3058688446
+.long	397248752,397248752
+.long	1722556617,1722556617
+.long	3023752829,3023752829
+.long	407560035,407560035
+.long	2184256229,2184256229
+.long	1613975959,1613975959
+.long	1165972322,1165972322
+.long	3765920945,3765920945
+.long	2226023355,2226023355
+.long	480281086,480281086
+.long	2485848313,2485848313
+.long	1483229296,1483229296
+.long	436028815,436028815
+.long	2272059028,2272059028
+.long	3086515026,3086515026
+.long	601060267,601060267
+.long	3791801202,3791801202
+.long	1468997603,1468997603
+.long	715871590,715871590
+.long	120122290,120122290
+.long	63092015,63092015
+.long	2591802758,2591802758
+.long	2768779219,2768779219
+.long	4068943920,4068943920
+.long	2997206819,2997206819
+.long	3127509762,3127509762
+.long	1552029421,1552029421
+.long	723308426,723308426
+.long	2461301159,2461301159
+.long	4042393587,4042393587
+.long	2715969870,2715969870
+.long	3455375973,3455375973
+.long	3586000134,3586000134
+.long	526529745,526529745
+.long	2331944644,2331944644
+.long	2639474228,2639474228
+.long	2689987490,2689987490
+.long	853641733,853641733
+.long	1978398372,1978398372
+.long	971801355,971801355
+.long	2867814464,2867814464
+.long	111112542,111112542
+.long	1360031421,1360031421
+.long	4186579262,4186579262
+.long	1023860118,1023860118
+.long	2919579357,2919579357
+.long	1186850381,1186850381
+.long	3045938321,3045938321
+.long	90031217,90031217
+.long	1876166148,1876166148
+.long	4279586912,4279586912
+.long	620468249,620468249
+.long	2548678102,2548678102
+.long	3426959497,3426959497
+.long	2006899047,2006899047
+.long	3175278768,3175278768
+.long	2290845959,2290845959
+.long	945494503,945494503
+.long	3689859193,3689859193
+.long	1191869601,1191869601
+.long	3910091388,3910091388
+.long	3374220536,3374220536
+.long	0,0
+.long	2206629897,2206629897
+.long	1223502642,1223502642
+.long	2893025566,2893025566
+.long	1316117100,1316117100
+.long	4227796733,4227796733
+.long	1446544655,1446544655
+.long	517320253,517320253
+.long	658058550,658058550
+.long	1691946762,1691946762
+.long	564550760,564550760
+.long	3511966619,3511966619
+.long	976107044,976107044
+.long	2976320012,2976320012
+.long	266819475,266819475
+.long	3533106868,3533106868
+.long	2660342555,2660342555
+.long	1338359936,1338359936
+.long	2720062561,2720062561
+.long	1766553434,1766553434
+.long	370807324,370807324
+.long	179999714,179999714
+.long	3844776128,3844776128
+.long	1138762300,1138762300
+.long	488053522,488053522
+.long	185403662,185403662
+.long	2915535858,2915535858
+.long	3114841645,3114841645
+.long	3366526484,3366526484
+.long	2233069911,2233069911
+.long	1275557295,1275557295
+.long	3151862254,3151862254
+.long	4250959779,4250959779
+.long	2670068215,2670068215
+.long	3170202204,3170202204
+.long	3309004356,3309004356
+.long	880737115,880737115
+.long	1982415755,1982415755
+.long	3703972811,3703972811
+.long	1761406390,1761406390
+.long	1676797112,1676797112
+.long	3403428311,3403428311
+.long	277177154,277177154
+.long	1076008723,1076008723
+.long	538035844,538035844
+.long	2099530373,2099530373
+.long	4164795346,4164795346
+.long	288553390,288553390
+.long	1839278535,1839278535
+.long	1261411869,1261411869
+.long	4080055004,4080055004
+.long	3964831245,3964831245
+.long	3504587127,3504587127
+.long	1813426987,1813426987
+.long	2579067049,2579067049
+.long	4199060497,4199060497
+.long	577038663,577038663
+.long	3297574056,3297574056
+.long	440397984,440397984
+.long	3626794326,3626794326
+.long	4019204898,4019204898
+.long	3343796615,3343796615
+.long	3251714265,3251714265
+.long	4272081548,4272081548
+.long	906744984,906744984
+.long	3481400742,3481400742
+.long	685669029,685669029
+.long	646887386,646887386
+.long	2764025151,2764025151
+.long	3835509292,3835509292
+.long	227702864,227702864
+.long	2613862250,2613862250
+.long	1648787028,1648787028
+.long	3256061430,3256061430
+.long	3904428176,3904428176
+.long	1593260334,1593260334
+.long	4121936770,4121936770
+.long	3196083615,3196083615
+.long	2090061929,2090061929
+.long	2838353263,2838353263
+.long	3004310991,3004310991
+.long	999926984,999926984
+.long	2809993232,2809993232
+.long	1852021992,1852021992
+.long	2075868123,2075868123
+.long	158869197,158869197
+.long	4095236462,4095236462
+.long	28809964,28809964
+.long	2828685187,2828685187
+.long	1701746150,1701746150
+.long	2129067946,2129067946
+.long	147831841,147831841
+.long	3873969647,3873969647
+.long	3650873274,3650873274
+.long	3459673930,3459673930
+.long	3557400554,3557400554
+.long	3598495785,3598495785
+.long	2947720241,2947720241
+.long	824393514,824393514
+.long	815048134,815048134
+.long	3227951669,3227951669
+.long	935087732,935087732
+.long	2798289660,2798289660
+.long	2966458592,2966458592
+.long	366520115,366520115
+.long	1251476721,1251476721
+.long	4158319681,4158319681
+.long	240176511,240176511
+.long	804688151,804688151
+.long	2379631990,2379631990
+.long	1303441219,1303441219
+.long	1414376140,1414376140
+.long	3741619940,3741619940
+.long	3820343710,3820343710
+.long	461924940,461924940
+.long	3089050817,3089050817
+.long	2136040774,2136040774
+.long	82468509,82468509
+.long	1563790337,1563790337
+.long	1937016826,1937016826
+.long	776014843,776014843
+.long	1511876531,1511876531
+.long	1389550482,1389550482
+.long	861278441,861278441
+.long	323475053,323475053
+.long	2355222426,2355222426
+.long	2047648055,2047648055
+.long	2383738969,2383738969
+.long	2302415851,2302415851
+.long	3995576782,3995576782
+.long	902390199,902390199
+.long	3991215329,3991215329
+.long	1018251130,1018251130
+.long	1507840668,1507840668
+.long	1064563285,1064563285
+.long	2043548696,2043548696
+.long	3208103795,3208103795
+.long	3939366739,3939366739
+.long	1537932639,1537932639
+.long	342834655,342834655
+.long	2262516856,2262516856
+.long	2180231114,2180231114
+.long	1053059257,1053059257
+.long	741614648,741614648
+.long	1598071746,1598071746
+.long	1925389590,1925389590
+.long	203809468,203809468
+.long	2336832552,2336832552
+.long	1100287487,1100287487
+.long	1895934009,1895934009
+.long	3736275976,3736275976
+.long	2632234200,2632234200
+.long	2428589668,2428589668
+.long	1636092795,1636092795
+.long	1890988757,1890988757
+.long	1952214088,1952214088
+.long	1113045200,1113045200
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.byte	82,9,106,213,48,54,165,56
+.byte	191,64,163,158,129,243,215,251
+.byte	124,227,57,130,155,47,255,135
+.byte	52,142,67,68,196,222,233,203
+.byte	84,123,148,50,166,194,35,61
+.byte	238,76,149,11,66,250,195,78
+.byte	8,46,161,102,40,217,36,178
+.byte	118,91,162,73,109,139,209,37
+.byte	114,248,246,100,134,104,152,22
+.byte	212,164,92,204,93,101,182,146
+.byte	108,112,72,80,253,237,185,218
+.byte	94,21,70,87,167,141,157,132
+.byte	144,216,171,0,140,188,211,10
+.byte	247,228,88,5,184,179,69,6
+.byte	208,44,30,143,202,63,15,2
+.byte	193,175,189,3,1,19,138,107
+.byte	58,145,17,65,79,103,220,234
+.byte	151,242,207,206,240,180,230,115
+.byte	150,172,116,34,231,173,53,133
+.byte	226,249,55,232,28,117,223,110
+.byte	71,241,26,113,29,41,197,137
+.byte	111,183,98,14,170,24,190,27
+.byte	252,86,62,75,198,210,121,32
+.byte	154,219,192,254,120,205,90,244
+.byte	31,221,168,51,136,7,199,49
+.byte	177,18,16,89,39,128,236,95
+.byte	96,81,127,169,25,181,74,13
+.byte	45,229,122,159,147,201,156,239
+.byte	160,224,59,77,174,42,245,176
+.byte	200,235,187,60,131,83,153,97
+.byte	23,43,4,126,186,119,214,38
+.byte	225,105,20,99,85,33,12,125
+.size	_x86_AES_decrypt,.-_x86_AES_decrypt
+.globl	AES_decrypt
+.type	AES_decrypt, at function
+.align	16
+AES_decrypt:
+.L_AES_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%eax
+	subl	$36,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ebx
+	subl	%esp,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esp
+	addl	$4,%esp
+	movl	%eax,28(%esp)
+	call	.L010pic_point
+.L010pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P,%eax
+	leal	.LAES_Td-.L010pic_point(%ebp),%ebp
+	leal	764(%esp),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	btl	$25,(%eax)
+	jnc	.L011x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	call	_sse_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L011x86:
+	movl	%ebp,24(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	call	_x86_AES_decrypt_compact
+	movl	28(%esp),%esp
+	movl	24(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_decrypt,.-.L_AES_decrypt_begin
+.globl	AES_cbc_encrypt
+.type	AES_cbc_encrypt, at function
+.align	16
+AES_cbc_encrypt:
+.L_AES_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	.L012drop_out
+	call	.L013pic_point
+.L013pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P,%eax
+	cmpl	$0,40(%esp)
+	leal	.LAES_Te-.L013pic_point(%ebp),%ebp
+	jne	.L014picked_te
+	leal	.LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+	pushfl
+	cld
+	cmpl	$512,%ecx
+	jb	.L015slow_way
+	testl	$15,%ecx
+	jnz	.L015slow_way
+	btl	$28,(%eax)
+	jc	.L015slow_way
+	leal	-324(%esp),%esi
+	andl	$-64,%esi
+	movl	%ebp,%eax
+	leal	2304(%ebp),%ebx
+	movl	%esi,%edx
+	andl	$4095,%eax
+	andl	$4095,%ebx
+	andl	$4095,%edx
+	cmpl	%ebx,%edx
+	jb	.L016tbl_break_out
+	subl	%ebx,%edx
+	subl	%edx,%esi
+	jmp	.L017tbl_ok
+.align	4
+.L016tbl_break_out:
+	subl	%eax,%edx
+	andl	$4095,%edx
+	addl	$384,%edx
+	subl	%edx,%esi
+.align	4
+.L017tbl_ok:
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	12(%edx),%edi
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	$0,316(%esp)
+	movl	%edi,%ebx
+	movl	$61,%ecx
+	subl	%ebp,%ebx
+	movl	%edi,%esi
+	andl	$4095,%ebx
+	leal	76(%esp),%edi
+	cmpl	$2304,%ebx
+	jb	.L018do_copy
+	cmpl	$3852,%ebx
+	jb	.L019skip_copy
+.align	4
+.L018do_copy:
+	movl	%edi,44(%esp)
+.long	2784229001
+.L019skip_copy:
+	movl	$16,%edi
+.align	4
+.L020prefetch_tbl:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%esi
+	leal	128(%ebp),%ebp
+	subl	$1,%edi
+	jnz	.L020prefetch_tbl
+	subl	$2048,%ebp
+	movl	32(%esp),%esi
+	movl	48(%esp),%edi
+	cmpl	$0,%edx
+	je	.L021fast_decrypt
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	16
+.L022fast_enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%esi
+	movl	40(%esp),%ecx
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L022fast_enc_loop
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L023skip_ezero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L023skip_ezero:
+	movl	28(%esp),%esp
+	popfl
+.L012drop_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L021fast_decrypt:
+	cmpl	36(%esp),%esi
+	je	.L024fast_dec_in_place
+	movl	%edi,52(%esp)
+.align	4
+.align	16
+.L025fast_dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	52(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	36(%esp),%edi
+	movl	32(%esp),%esi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	%esi,52(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,36(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L025fast_dec_loop
+	movl	52(%esp),%edi
+	movl	48(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	.L026fast_dec_out
+.align	16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt
+	movl	48(%esp),%edi
+	movl	36(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	subl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jnz	.L027fast_dec_in_place_loop
+.align	4
+.L026fast_dec_out:
+	cmpl	$0,316(%esp)
+	movl	44(%esp),%edi
+	je	.L028skip_dzero
+	movl	$60,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2884892297
+.L028skip_dzero:
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L015slow_way:
+	movl	(%eax),%eax
+	movl	36(%esp),%edi
+	leal	-80(%esp),%esi
+	andl	$-64,%esi
+	leal	-143(%edi),%ebx
+	subl	%esi,%ebx
+	negl	%ebx
+	andl	$960,%ebx
+	subl	%ebx,%esi
+	leal	768(%esi),%ebx
+	subl	%ebp,%ebx
+	andl	$768,%ebx
+	leal	2176(%ebp,%ebx,1),%ebp
+	leal	24(%esp),%edx
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%ebp,24(%esp)
+	movl	%esi,28(%esp)
+	movl	%eax,52(%esp)
+	movl	(%edx),%eax
+	movl	4(%edx),%ebx
+	movl	16(%edx),%esi
+	movl	20(%edx),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edi,44(%esp)
+	movl	%esi,48(%esp)
+	movl	%esi,%edi
+	movl	%eax,%esi
+	cmpl	$0,%edx
+	je	.L029slow_decrypt
+	cmpl	$16,%ecx
+	movl	%ebx,%edx
+	jb	.L030slow_enc_tail
+	btl	$25,52(%esp)
+	jnc	.L031slow_enc_x86
+	movq	(%edi),%mm0
+	movq	8(%edi),%mm4
+.align	16
+.L032slow_enc_loop_sse:
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	40(%esp),%ecx
+	movq	%mm0,(%edi)
+	movq	%mm4,8(%edi)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L032slow_enc_loop_sse
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movq	%mm0,(%esi)
+	movq	%mm4,8(%esi)
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L031slow_enc_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4
+.L033slow_enc_loop_x86:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	44(%esp),%edi
+	call	_x86_AES_encrypt_compact
+	movl	32(%esp),%esi
+	movl	36(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,36(%esp)
+	subl	$16,%ecx
+	cmpl	$16,%ecx
+	movl	%ecx,40(%esp)
+	jae	.L033slow_enc_loop_x86
+	testl	$15,%ecx
+	jnz	.L030slow_enc_tail
+	movl	48(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L030slow_enc_tail:
+	emms
+	movl	%edx,%edi
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	.L034enc_in_place
+.align	4
+.long	2767451785
+	jmp	.L035enc_skip_in_place
+.L034enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2868115081
+	movl	48(%esp),%edi
+	movl	%edx,%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,40(%esp)
+	jmp	.L033slow_enc_loop_x86
+.align	16
+.L029slow_decrypt:
+	btl	$25,52(%esp)
+	jnc	.L036slow_dec_loop_x86
+.align	4
+.L037slow_dec_loop_sse:
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm4
+	movl	44(%esp),%edi
+	call	_sse_AES_decrypt_compact
+	movl	32(%esp),%esi
+	leal	60(%esp),%eax
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	48(%esp),%edi
+	movq	(%esi),%mm1
+	movq	8(%esi),%mm5
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm4
+	movq	%mm1,(%edi)
+	movq	%mm5,8(%edi)
+	subl	$16,%ecx
+	jc	.L038slow_dec_partial_sse
+	movq	%mm0,(%ebx)
+	movq	%mm4,8(%ebx)
+	leal	16(%ebx),%ebx
+	movl	%ebx,36(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	movl	%ecx,40(%esp)
+	jnz	.L037slow_dec_loop_sse
+	emms
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L038slow_dec_partial_sse:
+	movq	%mm0,(%eax)
+	movq	%mm4,8(%eax)
+	emms
+	addl	$16,%ecx
+	movl	%ebx,%edi
+	movl	%eax,%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L036slow_dec_loop_x86:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	leal	60(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	44(%esp),%edi
+	call	_x86_AES_decrypt_compact
+	movl	48(%esp),%edi
+	movl	40(%esp),%esi
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	.L039slow_dec_partial_x86
+	movl	%esi,40(%esp)
+	movl	36(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,36(%esp)
+	leal	60(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,32(%esp)
+	jnz	.L036slow_dec_loop_x86
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	16
+.L039slow_dec_partial_x86:
+	leal	60(%esp),%esi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	40(%esp),%ecx
+	movl	36(%esp),%edi
+	leal	60(%esp),%esi
+.align	4
+.long	2767451785
+	movl	28(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
+.type	_x86_AES_set_encrypt_key, at function
+.align	16
+_x86_AES_set_encrypt_key:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%esi
+	movl	32(%esp),%edi
+	testl	$-1,%esi
+	jz	.L040badpointer
+	testl	$-1,%edi
+	jz	.L040badpointer
+	call	.L041pic_point
+.L041pic_point:
+	popl	%ebp
+	leal	.LAES_Te-.L041pic_point(%ebp),%ebp
+	leal	2176(%ebp),%ebp
+	movl	-128(%ebp),%eax
+	movl	-96(%ebp),%ebx
+	movl	-64(%ebp),%ecx
+	movl	-32(%ebp),%edx
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	movl	28(%esp),%ecx
+	cmpl	$128,%ecx
+	je	.L04210rounds
+	cmpl	$192,%ecx
+	je	.L04312rounds
+	cmpl	$256,%ecx
+	je	.L04414rounds
+	movl	$-2,%eax
+	jmp	.L045exit
+.L04210rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04610shortcut
+.align	4
+.L04710loop:
+	movl	(%edi),%eax
+	movl	12(%edi),%edx
+.L04610shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,16(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,20(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,24(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,28(%edi)
+	incl	%ecx
+	addl	$16,%edi
+	cmpl	$10,%ecx
+	jl	.L04710loop
+	movl	$10,80(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04312rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%ecx
+	movl	20(%esi),%edx
+	movl	%ecx,16(%edi)
+	movl	%edx,20(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L04812shortcut
+.align	4
+.L04912loop:
+	movl	(%edi),%eax
+	movl	20(%edi),%edx
+.L04812shortcut:
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,24(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,28(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,32(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,36(%edi)
+	cmpl	$7,%ecx
+	je	.L05012break
+	incl	%ecx
+	xorl	16(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,44(%edi)
+	addl	$24,%edi
+	jmp	.L04912loop
+.L05012break:
+	movl	$12,72(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L04414rounds:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	movl	%eax,16(%edi)
+	movl	%ebx,20(%edi)
+	movl	%ecx,24(%edi)
+	movl	%edx,28(%edi)
+	xorl	%ecx,%ecx
+	jmp	.L05114shortcut
+.align	4
+.L05214loop:
+	movl	28(%edi),%edx
+.L05114shortcut:
+	movl	(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$8,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	xorl	896(%ebp,%ecx,4),%eax
+	movl	%eax,32(%edi)
+	xorl	4(%edi),%eax
+	movl	%eax,36(%edi)
+	xorl	8(%edi),%eax
+	movl	%eax,40(%edi)
+	xorl	12(%edi),%eax
+	movl	%eax,44(%edi)
+	cmpl	$6,%ecx
+	je	.L05314break
+	incl	%ecx
+	movl	%eax,%edx
+	movl	16(%edi),%eax
+	movzbl	%dl,%esi
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shrl	$16,%edx
+	shll	$8,%ebx
+	movzbl	%dl,%esi
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	movzbl	%dh,%esi
+	shll	$16,%ebx
+	xorl	%ebx,%eax
+	movzbl	-128(%ebp,%esi,1),%ebx
+	shll	$24,%ebx
+	xorl	%ebx,%eax
+	movl	%eax,48(%edi)
+	xorl	20(%edi),%eax
+	movl	%eax,52(%edi)
+	xorl	24(%edi),%eax
+	movl	%eax,56(%edi)
+	xorl	28(%edi),%eax
+	movl	%eax,60(%edi)
+	addl	$32,%edi
+	jmp	.L05214loop
+.L05314break:
+	movl	$14,48(%edi)
+	xorl	%eax,%eax
+	jmp	.L045exit
+.L040badpointer:
+	movl	$-1,%eax
+.L045exit:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl	private_AES_set_encrypt_key
+.type	private_AES_set_encrypt_key, at function
+.align	16
+private_AES_set_encrypt_key:
+.L_private_AES_set_encrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	ret
+.size	private_AES_set_encrypt_key,.-.L_private_AES_set_encrypt_key_begin
+.globl	private_AES_set_decrypt_key
+.type	private_AES_set_decrypt_key, at function
+.align	16
+private_AES_set_decrypt_key:
+.L_private_AES_set_decrypt_key_begin:
+	call	_x86_AES_set_encrypt_key
+	cmpl	$0,%eax
+	je	.L054proceed
+	ret
+.L054proceed:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%esi
+	movl	240(%esi),%ecx
+	leal	(,%ecx,4),%ecx
+	leal	(%esi,%ecx,4),%edi
+.align	4
+.L055invert:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,(%esi)
+	movl	%edx,4(%esi)
+	movl	8(%esi),%eax
+	movl	12(%esi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,8(%edi)
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	addl	$16,%esi
+	subl	$16,%edi
+	cmpl	%edi,%esi
+	jne	.L055invert
+	movl	28(%esp),%edi
+	movl	240(%edi),%esi
+	leal	-2(%esi,%esi,1),%esi
+	leal	(%edi,%esi,8),%esi
+	movl	%esi,28(%esp)
+	movl	16(%edi),%eax
+.align	4
+.L056permute:
+	addl	$16,%edi
+	movl	%eax,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%eax,%ebx
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	xorl	%eax,%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	roll	$8,%eax
+	xorl	%esi,%edx
+	movl	4(%edi),%ebp
+	xorl	%ebx,%eax
+	xorl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$24,%ebx
+	xorl	%edx,%ecx
+	xorl	%edx,%eax
+	roll	$16,%ecx
+	xorl	%ebx,%eax
+	roll	$8,%edx
+	xorl	%ecx,%eax
+	movl	%ebp,%ebx
+	xorl	%edx,%eax
+	movl	%eax,(%edi)
+	movl	%ebx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	xorl	%ecx,%esi
+	movl	%esi,%ecx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%ebx,%ecx
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	xorl	%ebx,%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	roll	$8,%ebx
+	xorl	%esi,%eax
+	movl	8(%edi),%ebp
+	xorl	%ecx,%ebx
+	xorl	%eax,%ecx
+	xorl	%edx,%ebx
+	roll	$24,%ecx
+	xorl	%eax,%edx
+	xorl	%eax,%ebx
+	roll	$16,%edx
+	xorl	%ecx,%ebx
+	roll	$8,%eax
+	xorl	%edx,%ebx
+	movl	%ebp,%ecx
+	xorl	%eax,%ebx
+	movl	%ebx,4(%edi)
+	movl	%ecx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ecx,%ecx,1),%edx
+	subl	%ebp,%esi
+	andl	$4278124286,%edx
+	andl	$454761243,%esi
+	xorl	%edx,%esi
+	movl	%esi,%edx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%ecx,%edx
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	xorl	%ecx,%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	roll	$8,%ecx
+	xorl	%esi,%ebx
+	movl	12(%edi),%ebp
+	xorl	%edx,%ecx
+	xorl	%ebx,%edx
+	xorl	%eax,%ecx
+	roll	$24,%edx
+	xorl	%ebx,%eax
+	xorl	%ebx,%ecx
+	roll	$16,%eax
+	xorl	%edx,%ecx
+	roll	$8,%ebx
+	xorl	%eax,%ecx
+	movl	%ebp,%edx
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%edi)
+	movl	%edx,%esi
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%edx,%edx,1),%eax
+	subl	%ebp,%esi
+	andl	$4278124286,%eax
+	andl	$454761243,%esi
+	xorl	%eax,%esi
+	movl	%esi,%eax
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%eax,%eax,1),%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ebx
+	andl	$454761243,%esi
+	xorl	%edx,%eax
+	xorl	%ebx,%esi
+	movl	%esi,%ebx
+	andl	$2155905152,%esi
+	movl	%esi,%ebp
+	shrl	$7,%ebp
+	leal	(%ebx,%ebx,1),%ecx
+	xorl	%edx,%ebx
+	subl	%ebp,%esi
+	andl	$4278124286,%ecx
+	andl	$454761243,%esi
+	roll	$8,%edx
+	xorl	%esi,%ecx
+	movl	16(%edi),%ebp
+	xorl	%eax,%edx
+	xorl	%ecx,%eax
+	xorl	%ebx,%edx
+	roll	$24,%eax
+	xorl	%ecx,%ebx
+	xorl	%ecx,%edx
+	roll	$16,%ebx
+	xorl	%eax,%edx
+	roll	$8,%ecx
+	xorl	%ebx,%edx
+	movl	%ebp,%eax
+	xorl	%ecx,%edx
+	movl	%edx,12(%edi)
+	cmpl	28(%esp),%edi
+	jb	.L056permute
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	private_AES_set_decrypt_key,.-.L_private_AES_set_decrypt_key_begin
+.byte	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/aes-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/aes-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/aes-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/aes-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,3237 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/aes-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"aes-586.s"
-.text
-.type	_x86_AES_encrypt_compact, at function
-.align	16
-_x86_AES_encrypt_compact:
-	movl	%edi,20(%esp)
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-	movl	-128(%ebp),%edi
-	movl	-96(%ebp),%esi
-	movl	-64(%ebp),%edi
-	movl	-32(%ebp),%esi
-	movl	(%ebp),%edi
-	movl	32(%ebp),%esi
-	movl	64(%ebp),%edi
-	movl	96(%ebp),%esi
-.align	16
-.L000loop:
-	movl	%eax,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-
-	movl	%ebx,%esi
-	andl	$255,%esi
-	shrl	$16,%ebx
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%ch,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-
-	movl	%ecx,%esi
-	andl	$255,%esi
-	shrl	$24,%ecx
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%dh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edx
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-
-	andl	$255,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	movzbl	%ah,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$8,%eax
-	xorl	%eax,%edx
-	movl	4(%esp),%eax
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%edx
-	movl	8(%esp),%ebx
-	movzbl	-128(%ebp,%ecx,1),%ecx
-	shll	$24,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-
-	movl	%ecx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ecx,%ecx,1),%edi
-	subl	%ebp,%esi
-	andl	$4278124286,%edi
-	andl	$454761243,%esi
-	movl	%ecx,%ebp
-	xorl	%edi,%esi
-	xorl	%esi,%ecx
-	roll	$24,%ecx
-	xorl	%esi,%ecx
-	rorl	$16,%ebp
-	xorl	%ebp,%ecx
-	rorl	$8,%ebp
-	xorl	%ebp,%ecx
-	movl	%edx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%edx,%edx,1),%edi
-	subl	%ebp,%esi
-	andl	$4278124286,%edi
-	andl	$454761243,%esi
-	movl	%edx,%ebp
-	xorl	%edi,%esi
-	xorl	%esi,%edx
-	roll	$24,%edx
-	xorl	%esi,%edx
-	rorl	$16,%ebp
-	xorl	%ebp,%edx
-	rorl	$8,%ebp
-	xorl	%ebp,%edx
-	movl	%eax,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%eax,%eax,1),%edi
-	subl	%ebp,%esi
-	andl	$4278124286,%edi
-	andl	$454761243,%esi
-	movl	%eax,%ebp
-	xorl	%edi,%esi
-	xorl	%esi,%eax
-	roll	$24,%eax
-	xorl	%esi,%eax
-	rorl	$16,%ebp
-	xorl	%ebp,%eax
-	rorl	$8,%ebp
-	xorl	%ebp,%eax
-	movl	%ebx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ebx,%ebx,1),%edi
-	subl	%ebp,%esi
-	andl	$4278124286,%edi
-	andl	$454761243,%esi
-	movl	%ebx,%ebp
-	xorl	%edi,%esi
-	xorl	%esi,%ebx
-	roll	$24,%ebx
-	xorl	%esi,%ebx
-	rorl	$16,%ebp
-	xorl	%ebp,%ebx
-	rorl	$8,%ebp
-	xorl	%ebp,%ebx
-	movl	20(%esp),%edi
-	movl	28(%esp),%ebp
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	cmpl	24(%esp),%edi
-	movl	%edi,20(%esp)
-	jb	.L000loop
-	movl	%eax,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-
-	movl	%ebx,%esi
-	andl	$255,%esi
-	shrl	$16,%ebx
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%ch,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-
-	movl	%ecx,%esi
-	andl	$255,%esi
-	shrl	$24,%ecx
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%dh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edx
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-
-	movl	20(%esp),%edi
-	andl	$255,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	movzbl	%ah,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$8,%eax
-	xorl	%eax,%edx
-	movl	4(%esp),%eax
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%edx
-	movl	8(%esp),%ebx
-	movzbl	-128(%ebp,%ecx,1),%ecx
-	shll	$24,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-
-	xorl	16(%edi),%eax
-	xorl	20(%edi),%ebx
-	xorl	24(%edi),%ecx
-	xorl	28(%edi),%edx
-	ret
-.size	_x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
-.type	_sse_AES_encrypt_compact, at function
-.align	16
-_sse_AES_encrypt_compact:
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm4
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-	movl	$454761243,%eax
-	movl	%eax,8(%esp)
-	movl	%eax,12(%esp)
-	movl	-128(%ebp),%eax
-	movl	-96(%ebp),%ebx
-	movl	-64(%ebp),%ecx
-	movl	-32(%ebp),%edx
-	movl	(%ebp),%eax
-	movl	32(%ebp),%ebx
-	movl	64(%ebp),%ecx
-	movl	96(%ebp),%edx
-.align	16
-.L001loop:
-	pshufw	$8,%mm0,%mm1
-	pshufw	$13,%mm4,%mm5
-	movd	%mm1,%eax
-	movd	%mm5,%ebx
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%ecx
-	pshufw	$13,%mm0,%mm2
-	movzbl	%ah,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	shll	$8,%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%ecx
-	pshufw	$8,%mm4,%mm6
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%edx
-	shrl	$16,%ebx
-	movzbl	%ah,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$8,%esi
-	orl	%esi,%ecx
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%ecx
-	movd	%ecx,%mm0
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%ecx
-	movd	%mm2,%eax
-	movzbl	%bl,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%ecx
-	movd	%mm6,%ebx
-	movzbl	%ah,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%ecx
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$8,%esi
-	orl	%esi,%ecx
-	movd	%ecx,%mm1
-	movzbl	%bl,%esi
-	movzbl	-128(%ebp,%esi,1),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%ecx
-	shrl	$16,%eax
-	punpckldq	%mm1,%mm0
-	movzbl	%ah,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%ecx
-	andl	$255,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$16,%eax
-	orl	%eax,%edx
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$8,%esi
-	orl	%esi,%ecx
-	movd	%ecx,%mm4
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	orl	%ebx,%edx
-	movd	%edx,%mm5
-	punpckldq	%mm5,%mm4
-	addl	$16,%edi
-	cmpl	24(%esp),%edi
-	ja	.L002out
-	movq	8(%esp),%mm2
-	pxor	%mm3,%mm3
-	pxor	%mm7,%mm7
-	movq	%mm0,%mm1
-	movq	%mm4,%mm5
-	pcmpgtb	%mm0,%mm3
-	pcmpgtb	%mm4,%mm7
-	pand	%mm2,%mm3
-	pand	%mm2,%mm7
-	pshufw	$177,%mm0,%mm2
-	pshufw	$177,%mm4,%mm6
-	paddb	%mm0,%mm0
-	paddb	%mm4,%mm4
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pshufw	$177,%mm2,%mm3
-	pshufw	$177,%mm6,%mm7
-	pxor	%mm0,%mm1
-	pxor	%mm4,%mm5
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	movq	%mm3,%mm2
-	movq	%mm7,%mm6
-	pslld	$8,%mm3
-	pslld	$8,%mm7
-	psrld	$24,%mm2
-	psrld	$24,%mm6
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	movq	%mm1,%mm3
-	movq	%mm5,%mm7
-	movq	(%edi),%mm2
-	movq	8(%edi),%mm6
-	psrld	$8,%mm1
-	psrld	$8,%mm5
-	movl	-128(%ebp),%eax
-	pslld	$24,%mm3
-	pslld	$24,%mm7
-	movl	-64(%ebp),%ebx
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	movl	(%ebp),%ecx
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	movl	64(%ebp),%edx
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	jmp	.L001loop
-.align	16
-.L002out:
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm4
-	ret
-.size	_sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
-.type	_x86_AES_encrypt, at function
-.align	16
-_x86_AES_encrypt:
-	movl	%edi,20(%esp)
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-.align	16
-.L003loop:
-	movl	%eax,%esi
-	andl	$255,%esi
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%bh,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	xorl	1(%ebp,%edi,8),%esi
-	movl	%esi,4(%esp)
-
-	movl	%ebx,%esi
-	andl	$255,%esi
-	shrl	$16,%ebx
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%ch,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movl	%eax,%edi
-	shrl	$24,%edi
-	xorl	1(%ebp,%edi,8),%esi
-	movl	%esi,8(%esp)
-
-	movl	%ecx,%esi
-	andl	$255,%esi
-	shrl	$24,%ecx
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%dh,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edx
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movzbl	%bh,%edi
-	xorl	1(%ebp,%edi,8),%esi
-
-	movl	20(%esp),%edi
-	movl	(%ebp,%edx,8),%edx
-	movzbl	%ah,%eax
-	xorl	3(%ebp,%eax,8),%edx
-	movl	4(%esp),%eax
-	andl	$255,%ebx
-	xorl	2(%ebp,%ebx,8),%edx
-	movl	8(%esp),%ebx
-	xorl	1(%ebp,%ecx,8),%edx
-	movl	%esi,%ecx
-
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	cmpl	24(%esp),%edi
-	movl	%edi,20(%esp)
-	jb	.L003loop
-	movl	%eax,%esi
-	andl	$255,%esi
-	movl	2(%ebp,%esi,8),%esi
-	andl	$255,%esi
-	movzbl	%bh,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$65280,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$16711680,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movl	2(%ebp,%edi,8),%edi
-	andl	$4278190080,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-	movl	%ebx,%esi
-	andl	$255,%esi
-	shrl	$16,%ebx
-	movl	2(%ebp,%esi,8),%esi
-	andl	$255,%esi
-	movzbl	%ch,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$65280,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$16711680,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$24,%edi
-	movl	2(%ebp,%edi,8),%edi
-	andl	$4278190080,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-	movl	%ecx,%esi
-	andl	$255,%esi
-	shrl	$24,%ecx
-	movl	2(%ebp,%esi,8),%esi
-	andl	$255,%esi
-	movzbl	%dh,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$65280,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edx
-	andl	$255,%edi
-	movl	(%ebp,%edi,8),%edi
-	andl	$16711680,%edi
-	xorl	%edi,%esi
-	movzbl	%bh,%edi
-	movl	2(%ebp,%edi,8),%edi
-	andl	$4278190080,%edi
-	xorl	%edi,%esi
-	movl	20(%esp),%edi
-	andl	$255,%edx
-	movl	2(%ebp,%edx,8),%edx
-	andl	$255,%edx
-	movzbl	%ah,%eax
-	movl	(%ebp,%eax,8),%eax
-	andl	$65280,%eax
-	xorl	%eax,%edx
-	movl	4(%esp),%eax
-	andl	$255,%ebx
-	movl	(%ebp,%ebx,8),%ebx
-	andl	$16711680,%ebx
-	xorl	%ebx,%edx
-	movl	8(%esp),%ebx
-	movl	2(%ebp,%ecx,8),%ecx
-	andl	$4278190080,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	ret
-.align	64
-.LAES_Te:
-.long	2774754246,2774754246
-.long	2222750968,2222750968
-.long	2574743534,2574743534
-.long	2373680118,2373680118
-.long	234025727,234025727
-.long	3177933782,3177933782
-.long	2976870366,2976870366
-.long	1422247313,1422247313
-.long	1345335392,1345335392
-.long	50397442,50397442
-.long	2842126286,2842126286
-.long	2099981142,2099981142
-.long	436141799,436141799
-.long	1658312629,1658312629
-.long	3870010189,3870010189
-.long	2591454956,2591454956
-.long	1170918031,1170918031
-.long	2642575903,2642575903
-.long	1086966153,1086966153
-.long	2273148410,2273148410
-.long	368769775,368769775
-.long	3948501426,3948501426
-.long	3376891790,3376891790
-.long	200339707,200339707
-.long	3970805057,3970805057
-.long	1742001331,1742001331
-.long	4255294047,4255294047
-.long	3937382213,3937382213
-.long	3214711843,3214711843
-.long	4154762323,4154762323
-.long	2524082916,2524082916
-.long	1539358875,1539358875
-.long	3266819957,3266819957
-.long	486407649,486407649
-.long	2928907069,2928907069
-.long	1780885068,1780885068
-.long	1513502316,1513502316
-.long	1094664062,1094664062
-.long	49805301,49805301
-.long	1338821763,1338821763
-.long	1546925160,1546925160
-.long	4104496465,4104496465
-.long	887481809,887481809
-.long	150073849,150073849
-.long	2473685474,2473685474
-.long	1943591083,1943591083
-.long	1395732834,1395732834
-.long	1058346282,1058346282
-.long	201589768,201589768
-.long	1388824469,1388824469
-.long	1696801606,1696801606
-.long	1589887901,1589887901
-.long	672667696,672667696
-.long	2711000631,2711000631
-.long	251987210,251987210
-.long	3046808111,3046808111
-.long	151455502,151455502
-.long	907153956,907153956
-.long	2608889883,2608889883
-.long	1038279391,1038279391
-.long	652995533,652995533
-.long	1764173646,1764173646
-.long	3451040383,3451040383
-.long	2675275242,2675275242
-.long	453576978,453576978
-.long	2659418909,2659418909
-.long	1949051992,1949051992
-.long	773462580,773462580
-.long	756751158,756751158
-.long	2993581788,2993581788
-.long	3998898868,3998898868
-.long	4221608027,4221608027
-.long	4132590244,4132590244
-.long	1295727478,1295727478
-.long	1641469623,1641469623
-.long	3467883389,3467883389
-.long	2066295122,2066295122
-.long	1055122397,1055122397
-.long	1898917726,1898917726
-.long	2542044179,2542044179
-.long	4115878822,4115878822
-.long	1758581177,1758581177
-.long	0,0
-.long	753790401,753790401
-.long	1612718144,1612718144
-.long	536673507,536673507
-.long	3367088505,3367088505
-.long	3982187446,3982187446
-.long	3194645204,3194645204
-.long	1187761037,1187761037
-.long	3653156455,3653156455
-.long	1262041458,1262041458
-.long	3729410708,3729410708
-.long	3561770136,3561770136
-.long	3898103984,3898103984
-.long	1255133061,1255133061
-.long	1808847035,1808847035
-.long	720367557,720367557
-.long	3853167183,3853167183
-.long	385612781,385612781
-.long	3309519750,3309519750
-.long	3612167578,3612167578
-.long	1429418854,1429418854
-.long	2491778321,2491778321
-.long	3477423498,3477423498
-.long	284817897,284817897
-.long	100794884,100794884
-.long	2172616702,2172616702
-.long	4031795360,4031795360
-.long	1144798328,1144798328
-.long	3131023141,3131023141
-.long	3819481163,3819481163
-.long	4082192802,4082192802
-.long	4272137053,4272137053
-.long	3225436288,3225436288
-.long	2324664069,2324664069
-.long	2912064063,2912064063
-.long	3164445985,3164445985
-.long	1211644016,1211644016
-.long	83228145,83228145
-.long	3753688163,3753688163
-.long	3249976951,3249976951
-.long	1977277103,1977277103
-.long	1663115586,1663115586
-.long	806359072,806359072
-.long	452984805,452984805
-.long	250868733,250868733
-.long	1842533055,1842533055
-.long	1288555905,1288555905
-.long	336333848,336333848
-.long	890442534,890442534
-.long	804056259,804056259
-.long	3781124030,3781124030
-.long	2727843637,2727843637
-.long	3427026056,3427026056
-.long	957814574,957814574
-.long	1472513171,1472513171
-.long	4071073621,4071073621
-.long	2189328124,2189328124
-.long	1195195770,1195195770
-.long	2892260552,2892260552
-.long	3881655738,3881655738
-.long	723065138,723065138
-.long	2507371494,2507371494
-.long	2690670784,2690670784
-.long	2558624025,2558624025
-.long	3511635870,3511635870
-.long	2145180835,2145180835
-.long	1713513028,1713513028
-.long	2116692564,2116692564
-.long	2878378043,2878378043
-.long	2206763019,2206763019
-.long	3393603212,3393603212
-.long	703524551,703524551
-.long	3552098411,3552098411
-.long	1007948840,1007948840
-.long	2044649127,2044649127
-.long	3797835452,3797835452
-.long	487262998,487262998
-.long	1994120109,1994120109
-.long	1004593371,1004593371
-.long	1446130276,1446130276
-.long	1312438900,1312438900
-.long	503974420,503974420
-.long	3679013266,3679013266
-.long	168166924,168166924
-.long	1814307912,1814307912
-.long	3831258296,3831258296
-.long	1573044895,1573044895
-.long	1859376061,1859376061
-.long	4021070915,4021070915
-.long	2791465668,2791465668
-.long	2828112185,2828112185
-.long	2761266481,2761266481
-.long	937747667,937747667
-.long	2339994098,2339994098
-.long	854058965,854058965
-.long	1137232011,1137232011
-.long	1496790894,1496790894
-.long	3077402074,3077402074
-.long	2358086913,2358086913
-.long	1691735473,1691735473
-.long	3528347292,3528347292
-.long	3769215305,3769215305
-.long	3027004632,3027004632
-.long	4199962284,4199962284
-.long	133494003,133494003
-.long	636152527,636152527
-.long	2942657994,2942657994
-.long	2390391540,2390391540
-.long	3920539207,3920539207
-.long	403179536,403179536
-.long	3585784431,3585784431
-.long	2289596656,2289596656
-.long	1864705354,1864705354
-.long	1915629148,1915629148
-.long	605822008,605822008
-.long	4054230615,4054230615
-.long	3350508659,3350508659
-.long	1371981463,1371981463
-.long	602466507,602466507
-.long	2094914977,2094914977
-.long	2624877800,2624877800
-.long	555687742,555687742
-.long	3712699286,3712699286
-.long	3703422305,3703422305
-.long	2257292045,2257292045
-.long	2240449039,2240449039
-.long	2423288032,2423288032
-.long	1111375484,1111375484
-.long	3300242801,3300242801
-.long	2858837708,2858837708
-.long	3628615824,3628615824
-.long	84083462,84083462
-.long	32962295,32962295
-.long	302911004,302911004
-.long	2741068226,2741068226
-.long	1597322602,1597322602
-.long	4183250862,4183250862
-.long	3501832553,3501832553
-.long	2441512471,2441512471
-.long	1489093017,1489093017
-.long	656219450,656219450
-.long	3114180135,3114180135
-.long	954327513,954327513
-.long	335083755,335083755
-.long	3013122091,3013122091
-.long	856756514,856756514
-.long	3144247762,3144247762
-.long	1893325225,1893325225
-.long	2307821063,2307821063
-.long	2811532339,2811532339
-.long	3063651117,3063651117
-.long	572399164,572399164
-.long	2458355477,2458355477
-.long	552200649,552200649
-.long	1238290055,1238290055
-.long	4283782570,4283782570
-.long	2015897680,2015897680
-.long	2061492133,2061492133
-.long	2408352771,2408352771
-.long	4171342169,4171342169
-.long	2156497161,2156497161
-.long	386731290,386731290
-.long	3669999461,3669999461
-.long	837215959,837215959
-.long	3326231172,3326231172
-.long	3093850320,3093850320
-.long	3275833730,3275833730
-.long	2962856233,2962856233
-.long	1999449434,1999449434
-.long	286199582,286199582
-.long	3417354363,3417354363
-.long	4233385128,4233385128
-.long	3602627437,3602627437
-.long	974525996,974525996
-.byte	99,124,119,123,242,107,111,197
-.byte	48,1,103,43,254,215,171,118
-.byte	202,130,201,125,250,89,71,240
-.byte	173,212,162,175,156,164,114,192
-.byte	183,253,147,38,54,63,247,204
-.byte	52,165,229,241,113,216,49,21
-.byte	4,199,35,195,24,150,5,154
-.byte	7,18,128,226,235,39,178,117
-.byte	9,131,44,26,27,110,90,160
-.byte	82,59,214,179,41,227,47,132
-.byte	83,209,0,237,32,252,177,91
-.byte	106,203,190,57,74,76,88,207
-.byte	208,239,170,251,67,77,51,133
-.byte	69,249,2,127,80,60,159,168
-.byte	81,163,64,143,146,157,56,245
-.byte	188,182,218,33,16,255,243,210
-.byte	205,12,19,236,95,151,68,23
-.byte	196,167,126,61,100,93,25,115
-.byte	96,129,79,220,34,42,144,136
-.byte	70,238,184,20,222,94,11,219
-.byte	224,50,58,10,73,6,36,92
-.byte	194,211,172,98,145,149,228,121
-.byte	231,200,55,109,141,213,78,169
-.byte	108,86,244,234,101,122,174,8
-.byte	186,120,37,46,28,166,180,198
-.byte	232,221,116,31,75,189,139,138
-.byte	112,62,181,102,72,3,246,14
-.byte	97,53,87,185,134,193,29,158
-.byte	225,248,152,17,105,217,142,148
-.byte	155,30,135,233,206,85,40,223
-.byte	140,161,137,13,191,230,66,104
-.byte	65,153,45,15,176,84,187,22
-.byte	99,124,119,123,242,107,111,197
-.byte	48,1,103,43,254,215,171,118
-.byte	202,130,201,125,250,89,71,240
-.byte	173,212,162,175,156,164,114,192
-.byte	183,253,147,38,54,63,247,204
-.byte	52,165,229,241,113,216,49,21
-.byte	4,199,35,195,24,150,5,154
-.byte	7,18,128,226,235,39,178,117
-.byte	9,131,44,26,27,110,90,160
-.byte	82,59,214,179,41,227,47,132
-.byte	83,209,0,237,32,252,177,91
-.byte	106,203,190,57,74,76,88,207
-.byte	208,239,170,251,67,77,51,133
-.byte	69,249,2,127,80,60,159,168
-.byte	81,163,64,143,146,157,56,245
-.byte	188,182,218,33,16,255,243,210
-.byte	205,12,19,236,95,151,68,23
-.byte	196,167,126,61,100,93,25,115
-.byte	96,129,79,220,34,42,144,136
-.byte	70,238,184,20,222,94,11,219
-.byte	224,50,58,10,73,6,36,92
-.byte	194,211,172,98,145,149,228,121
-.byte	231,200,55,109,141,213,78,169
-.byte	108,86,244,234,101,122,174,8
-.byte	186,120,37,46,28,166,180,198
-.byte	232,221,116,31,75,189,139,138
-.byte	112,62,181,102,72,3,246,14
-.byte	97,53,87,185,134,193,29,158
-.byte	225,248,152,17,105,217,142,148
-.byte	155,30,135,233,206,85,40,223
-.byte	140,161,137,13,191,230,66,104
-.byte	65,153,45,15,176,84,187,22
-.byte	99,124,119,123,242,107,111,197
-.byte	48,1,103,43,254,215,171,118
-.byte	202,130,201,125,250,89,71,240
-.byte	173,212,162,175,156,164,114,192
-.byte	183,253,147,38,54,63,247,204
-.byte	52,165,229,241,113,216,49,21
-.byte	4,199,35,195,24,150,5,154
-.byte	7,18,128,226,235,39,178,117
-.byte	9,131,44,26,27,110,90,160
-.byte	82,59,214,179,41,227,47,132
-.byte	83,209,0,237,32,252,177,91
-.byte	106,203,190,57,74,76,88,207
-.byte	208,239,170,251,67,77,51,133
-.byte	69,249,2,127,80,60,159,168
-.byte	81,163,64,143,146,157,56,245
-.byte	188,182,218,33,16,255,243,210
-.byte	205,12,19,236,95,151,68,23
-.byte	196,167,126,61,100,93,25,115
-.byte	96,129,79,220,34,42,144,136
-.byte	70,238,184,20,222,94,11,219
-.byte	224,50,58,10,73,6,36,92
-.byte	194,211,172,98,145,149,228,121
-.byte	231,200,55,109,141,213,78,169
-.byte	108,86,244,234,101,122,174,8
-.byte	186,120,37,46,28,166,180,198
-.byte	232,221,116,31,75,189,139,138
-.byte	112,62,181,102,72,3,246,14
-.byte	97,53,87,185,134,193,29,158
-.byte	225,248,152,17,105,217,142,148
-.byte	155,30,135,233,206,85,40,223
-.byte	140,161,137,13,191,230,66,104
-.byte	65,153,45,15,176,84,187,22
-.byte	99,124,119,123,242,107,111,197
-.byte	48,1,103,43,254,215,171,118
-.byte	202,130,201,125,250,89,71,240
-.byte	173,212,162,175,156,164,114,192
-.byte	183,253,147,38,54,63,247,204
-.byte	52,165,229,241,113,216,49,21
-.byte	4,199,35,195,24,150,5,154
-.byte	7,18,128,226,235,39,178,117
-.byte	9,131,44,26,27,110,90,160
-.byte	82,59,214,179,41,227,47,132
-.byte	83,209,0,237,32,252,177,91
-.byte	106,203,190,57,74,76,88,207
-.byte	208,239,170,251,67,77,51,133
-.byte	69,249,2,127,80,60,159,168
-.byte	81,163,64,143,146,157,56,245
-.byte	188,182,218,33,16,255,243,210
-.byte	205,12,19,236,95,151,68,23
-.byte	196,167,126,61,100,93,25,115
-.byte	96,129,79,220,34,42,144,136
-.byte	70,238,184,20,222,94,11,219
-.byte	224,50,58,10,73,6,36,92
-.byte	194,211,172,98,145,149,228,121
-.byte	231,200,55,109,141,213,78,169
-.byte	108,86,244,234,101,122,174,8
-.byte	186,120,37,46,28,166,180,198
-.byte	232,221,116,31,75,189,139,138
-.byte	112,62,181,102,72,3,246,14
-.byte	97,53,87,185,134,193,29,158
-.byte	225,248,152,17,105,217,142,148
-.byte	155,30,135,233,206,85,40,223
-.byte	140,161,137,13,191,230,66,104
-.byte	65,153,45,15,176,84,187,22
-.long	1,2,4,8
-.long	16,32,64,128
-.long	27,54,0,0
-.long	0,0,0,0
-.size	_x86_AES_encrypt,.-_x86_AES_encrypt
-.globl	AES_encrypt
-.type	AES_encrypt, at function
-.align	16
-AES_encrypt:
-.L_AES_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%eax
-	subl	$36,%esp
-	andl	$-64,%esp
-	leal	-127(%edi),%ebx
-	subl	%esp,%ebx
-	negl	%ebx
-	andl	$960,%ebx
-	subl	%ebx,%esp
-	addl	$4,%esp
-	movl	%eax,28(%esp)
-	call	.L004pic_point
-.L004pic_point:
-	popl	%ebp
-	leal	OPENSSL_ia32cap_P,%eax
-	leal	.LAES_Te-.L004pic_point(%ebp),%ebp
-	leal	764(%esp),%ebx
-	subl	%ebp,%ebx
-	andl	$768,%ebx
-	leal	2176(%ebp,%ebx,1),%ebp
-	btl	$25,(%eax)
-	jnc	.L005x86
-	movq	(%esi),%mm0
-	movq	8(%esi),%mm4
-	call	_sse_AES_encrypt_compact
-	movl	28(%esp),%esp
-	movl	24(%esp),%esi
-	movq	%mm0,(%esi)
-	movq	%mm4,8(%esi)
-	emms
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	16
-.L005x86:
-	movl	%ebp,24(%esp)
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	call	_x86_AES_encrypt_compact
-	movl	28(%esp),%esp
-	movl	24(%esp),%esi
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	AES_encrypt,.-.L_AES_encrypt_begin
-.type	_x86_AES_decrypt_compact, at function
-.align	16
-_x86_AES_decrypt_compact:
-	movl	%edi,20(%esp)
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-	movl	-128(%ebp),%edi
-	movl	-96(%ebp),%esi
-	movl	-64(%ebp),%edi
-	movl	-32(%ebp),%esi
-	movl	(%ebp),%edi
-	movl	32(%ebp),%esi
-	movl	64(%ebp),%edi
-	movl	96(%ebp),%esi
-.align	16
-.L006loop:
-	movl	%eax,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%dh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ebx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-	movl	%ebx,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%ah,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-	movl	%ecx,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	andl	$255,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	movzbl	%ch,%ecx
-	movzbl	-128(%ebp,%ecx,1),%ecx
-	shll	$8,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-	shrl	$16,%ebx
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%edx
-	shrl	$24,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$24,%eax
-	xorl	%eax,%edx
-	movl	%ecx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ecx,%ecx,1),%eax
-	subl	%edi,%esi
-	andl	$4278124286,%eax
-	andl	$454761243,%esi
-	xorl	%eax,%esi
-	movl	%esi,%eax
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%eax,%eax,1),%ebx
-	subl	%edi,%esi
-	andl	$4278124286,%ebx
-	andl	$454761243,%esi
-	xorl	%ecx,%eax
-	xorl	%ebx,%esi
-	movl	%esi,%ebx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ebx,%ebx,1),%ebp
-	subl	%edi,%esi
-	andl	$4278124286,%ebp
-	andl	$454761243,%esi
-	xorl	%ecx,%ebx
-	roll	$8,%ecx
-	xorl	%esi,%ebp
-	xorl	%eax,%ecx
-	xorl	%ebp,%eax
-	roll	$24,%eax
-	xorl	%ebx,%ecx
-	xorl	%ebp,%ebx
-	roll	$16,%ebx
-	xorl	%ebp,%ecx
-	roll	$8,%ebp
-	xorl	%eax,%ecx
-	xorl	%ebx,%ecx
-	movl	4(%esp),%eax
-	xorl	%ebp,%ecx
-	movl	%ecx,12(%esp)
-	movl	%edx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%edx,%edx,1),%ebx
-	subl	%edi,%esi
-	andl	$4278124286,%ebx
-	andl	$454761243,%esi
-	xorl	%ebx,%esi
-	movl	%esi,%ebx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ebx,%ebx,1),%ecx
-	subl	%edi,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	xorl	%edx,%ebx
-	xorl	%ecx,%esi
-	movl	%esi,%ecx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ecx,%ecx,1),%ebp
-	subl	%edi,%esi
-	andl	$4278124286,%ebp
-	andl	$454761243,%esi
-	xorl	%edx,%ecx
-	roll	$8,%edx
-	xorl	%esi,%ebp
-	xorl	%ebx,%edx
-	xorl	%ebp,%ebx
-	roll	$24,%ebx
-	xorl	%ecx,%edx
-	xorl	%ebp,%ecx
-	roll	$16,%ecx
-	xorl	%ebp,%edx
-	roll	$8,%ebp
-	xorl	%ebx,%edx
-	xorl	%ecx,%edx
-	movl	8(%esp),%ebx
-	xorl	%ebp,%edx
-	movl	%edx,16(%esp)
-	movl	%eax,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%eax,%eax,1),%ecx
-	subl	%edi,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	xorl	%ecx,%esi
-	movl	%esi,%ecx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ecx,%ecx,1),%edx
-	subl	%edi,%esi
-	andl	$4278124286,%edx
-	andl	$454761243,%esi
-	xorl	%eax,%ecx
-	xorl	%edx,%esi
-	movl	%esi,%edx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%edx,%edx,1),%ebp
-	subl	%edi,%esi
-	andl	$4278124286,%ebp
-	andl	$454761243,%esi
-	xorl	%eax,%edx
-	roll	$8,%eax
-	xorl	%esi,%ebp
-	xorl	%ecx,%eax
-	xorl	%ebp,%ecx
-	roll	$24,%ecx
-	xorl	%edx,%eax
-	xorl	%ebp,%edx
-	roll	$16,%edx
-	xorl	%ebp,%eax
-	roll	$8,%ebp
-	xorl	%ecx,%eax
-	xorl	%edx,%eax
-	xorl	%ebp,%eax
-	movl	%ebx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ebx,%ebx,1),%ecx
-	subl	%edi,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	xorl	%ecx,%esi
-	movl	%esi,%ecx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%ecx,%ecx,1),%edx
-	subl	%edi,%esi
-	andl	$4278124286,%edx
-	andl	$454761243,%esi
-	xorl	%ebx,%ecx
-	xorl	%edx,%esi
-	movl	%esi,%edx
-	andl	$2155905152,%esi
-	movl	%esi,%edi
-	shrl	$7,%edi
-	leal	(%edx,%edx,1),%ebp
-	subl	%edi,%esi
-	andl	$4278124286,%ebp
-	andl	$454761243,%esi
-	xorl	%ebx,%edx
-	roll	$8,%ebx
-	xorl	%esi,%ebp
-	xorl	%ecx,%ebx
-	xorl	%ebp,%ecx
-	roll	$24,%ecx
-	xorl	%edx,%ebx
-	xorl	%ebp,%edx
-	roll	$16,%edx
-	xorl	%ebp,%ebx
-	roll	$8,%ebp
-	xorl	%ecx,%ebx
-	xorl	%edx,%ebx
-	movl	12(%esp),%ecx
-	xorl	%ebp,%ebx
-	movl	16(%esp),%edx
-	movl	20(%esp),%edi
-	movl	28(%esp),%ebp
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	cmpl	24(%esp),%edi
-	movl	%edi,20(%esp)
-	jb	.L006loop
-	movl	%eax,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%dh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ebx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-	movl	%ebx,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%ah,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-	movl	%ecx,%esi
-	andl	$255,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	movzbl	%bh,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movzbl	-128(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	20(%esp),%edi
-	andl	$255,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	movzbl	%ch,%ecx
-	movzbl	-128(%ebp,%ecx,1),%ecx
-	shll	$8,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-	shrl	$16,%ebx
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%edx
-	movl	8(%esp),%ebx
-	shrl	$24,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$24,%eax
-	xorl	%eax,%edx
-	movl	4(%esp),%eax
-	xorl	16(%edi),%eax
-	xorl	20(%edi),%ebx
-	xorl	24(%edi),%ecx
-	xorl	28(%edi),%edx
-	ret
-.size	_x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
-.type	_sse_AES_decrypt_compact, at function
-.align	16
-_sse_AES_decrypt_compact:
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm4
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-	movl	$454761243,%eax
-	movl	%eax,8(%esp)
-	movl	%eax,12(%esp)
-	movl	-128(%ebp),%eax
-	movl	-96(%ebp),%ebx
-	movl	-64(%ebp),%ecx
-	movl	-32(%ebp),%edx
-	movl	(%ebp),%eax
-	movl	32(%ebp),%ebx
-	movl	64(%ebp),%ecx
-	movl	96(%ebp),%edx
-.align	16
-.L007loop:
-	pshufw	$12,%mm0,%mm1
-	movd	%mm1,%eax
-	pshufw	$9,%mm4,%mm5
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%ecx
-	movd	%mm5,%ebx
-	movzbl	%ah,%edx
-	movzbl	-128(%ebp,%edx,1),%edx
-	shll	$8,%edx
-	pshufw	$6,%mm0,%mm2
-	movzbl	%bl,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%ecx
-	shrl	$16,%eax
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%edx
-	shrl	$16,%ebx
-	pshufw	$3,%mm4,%mm6
-	movzbl	%ah,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%ecx
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$8,%esi
-	orl	%esi,%ecx
-	movd	%ecx,%mm0
-	movzbl	%al,%esi
-	movd	%mm2,%eax
-	movzbl	-128(%ebp,%esi,1),%ecx
-	shll	$16,%ecx
-	movzbl	%bl,%esi
-	movd	%mm6,%ebx
-	movzbl	-128(%ebp,%esi,1),%esi
-	orl	%esi,%ecx
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	orl	%esi,%edx
-	movzbl	%bl,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%edx
-	movd	%edx,%mm1
-	movzbl	%ah,%esi
-	movzbl	-128(%ebp,%esi,1),%edx
-	shll	$8,%edx
-	movzbl	%bh,%esi
-	shrl	$16,%eax
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$24,%esi
-	orl	%esi,%edx
-	shrl	$16,%ebx
-	punpckldq	%mm1,%mm0
-	movzbl	%bh,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$8,%esi
-	orl	%esi,%ecx
-	andl	$255,%ebx
-	movzbl	-128(%ebp,%ebx,1),%ebx
-	orl	%ebx,%edx
-	movzbl	%al,%esi
-	movzbl	-128(%ebp,%esi,1),%esi
-	shll	$16,%esi
-	orl	%esi,%edx
-	movd	%edx,%mm4
-	movzbl	%ah,%eax
-	movzbl	-128(%ebp,%eax,1),%eax
-	shll	$24,%eax
-	orl	%eax,%ecx
-	movd	%ecx,%mm5
-	punpckldq	%mm5,%mm4
-	addl	$16,%edi
-	cmpl	24(%esp),%edi
-	ja	.L008out
-	movq	%mm0,%mm3
-	movq	%mm4,%mm7
-	pshufw	$228,%mm0,%mm2
-	pshufw	$228,%mm4,%mm6
-	movq	%mm0,%mm1
-	movq	%mm4,%mm5
-	pshufw	$177,%mm0,%mm0
-	pshufw	$177,%mm4,%mm4
-	pslld	$8,%mm2
-	pslld	$8,%mm6
-	psrld	$8,%mm3
-	psrld	$8,%mm7
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pslld	$16,%mm2
-	pslld	$16,%mm6
-	psrld	$16,%mm3
-	psrld	$16,%mm7
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	movq	8(%esp),%mm3
-	pxor	%mm2,%mm2
-	pxor	%mm6,%mm6
-	pcmpgtb	%mm1,%mm2
-	pcmpgtb	%mm5,%mm6
-	pand	%mm3,%mm2
-	pand	%mm3,%mm6
-	paddb	%mm1,%mm1
-	paddb	%mm5,%mm5
-	pxor	%mm2,%mm1
-	pxor	%mm6,%mm5
-	movq	%mm1,%mm3
-	movq	%mm5,%mm7
-	movq	%mm1,%mm2
-	movq	%mm5,%mm6
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	pslld	$24,%mm3
-	pslld	$24,%mm7
-	psrld	$8,%mm2
-	psrld	$8,%mm6
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	movq	8(%esp),%mm2
-	pxor	%mm3,%mm3
-	pxor	%mm7,%mm7
-	pcmpgtb	%mm1,%mm3
-	pcmpgtb	%mm5,%mm7
-	pand	%mm2,%mm3
-	pand	%mm2,%mm7
-	paddb	%mm1,%mm1
-	paddb	%mm5,%mm5
-	pxor	%mm3,%mm1
-	pxor	%mm7,%mm5
-	pshufw	$177,%mm1,%mm3
-	pshufw	$177,%mm5,%mm7
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pxor	%mm3,%mm3
-	pxor	%mm7,%mm7
-	pcmpgtb	%mm1,%mm3
-	pcmpgtb	%mm5,%mm7
-	pand	%mm2,%mm3
-	pand	%mm2,%mm7
-	paddb	%mm1,%mm1
-	paddb	%mm5,%mm5
-	pxor	%mm3,%mm1
-	pxor	%mm7,%mm5
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	movq	%mm1,%mm3
-	movq	%mm5,%mm7
-	pshufw	$177,%mm1,%mm2
-	pshufw	$177,%mm5,%mm6
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	pslld	$8,%mm1
-	pslld	$8,%mm5
-	psrld	$8,%mm3
-	psrld	$8,%mm7
-	movq	(%edi),%mm2
-	movq	8(%edi),%mm6
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	movl	-128(%ebp),%eax
-	pslld	$16,%mm1
-	pslld	$16,%mm5
-	movl	-64(%ebp),%ebx
-	psrld	$16,%mm3
-	psrld	$16,%mm7
-	movl	(%ebp),%ecx
-	pxor	%mm1,%mm0
-	pxor	%mm5,%mm4
-	movl	64(%ebp),%edx
-	pxor	%mm3,%mm0
-	pxor	%mm7,%mm4
-	pxor	%mm2,%mm0
-	pxor	%mm6,%mm4
-	jmp	.L007loop
-.align	16
-.L008out:
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm4
-	ret
-.size	_sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
-.type	_x86_AES_decrypt, at function
-.align	16
-_x86_AES_decrypt:
-	movl	%edi,20(%esp)
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,24(%esp)
-.align	16
-.L009loop:
-	movl	%eax,%esi
-	andl	$255,%esi
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%dh,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movl	%ebx,%edi
-	shrl	$24,%edi
-	xorl	1(%ebp,%edi,8),%esi
-	movl	%esi,4(%esp)
-
-	movl	%ebx,%esi
-	andl	$255,%esi
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%ah,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movl	%ecx,%edi
-	shrl	$24,%edi
-	xorl	1(%ebp,%edi,8),%esi
-	movl	%esi,8(%esp)
-
-	movl	%ecx,%esi
-	andl	$255,%esi
-	movl	(%ebp,%esi,8),%esi
-	movzbl	%bh,%edi
-	xorl	3(%ebp,%edi,8),%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	xorl	2(%ebp,%edi,8),%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	xorl	1(%ebp,%edi,8),%esi
-
-	movl	20(%esp),%edi
-	andl	$255,%edx
-	movl	(%ebp,%edx,8),%edx
-	movzbl	%ch,%ecx
-	xorl	3(%ebp,%ecx,8),%edx
-	movl	%esi,%ecx
-	shrl	$16,%ebx
-	andl	$255,%ebx
-	xorl	2(%ebp,%ebx,8),%edx
-	movl	8(%esp),%ebx
-	shrl	$24,%eax
-	xorl	1(%ebp,%eax,8),%edx
-	movl	4(%esp),%eax
-
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	cmpl	24(%esp),%edi
-	movl	%edi,20(%esp)
-	jb	.L009loop
-	leal	2176(%ebp),%ebp
-	movl	-128(%ebp),%edi
-	movl	-96(%ebp),%esi
-	movl	-64(%ebp),%edi
-	movl	-32(%ebp),%esi
-	movl	(%ebp),%edi
-	movl	32(%ebp),%esi
-	movl	64(%ebp),%edi
-	movl	96(%ebp),%esi
-	leal	-128(%ebp),%ebp
-	movl	%eax,%esi
-	andl	$255,%esi
-	movzbl	(%ebp,%esi,1),%esi
-	movzbl	%dh,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ebx,%edi
-	shrl	$24,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,4(%esp)
-	movl	%ebx,%esi
-	andl	$255,%esi
-	movzbl	(%ebp,%esi,1),%esi
-	movzbl	%ah,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%ecx,%edi
-	shrl	$24,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	%esi,8(%esp)
-	movl	%ecx,%esi
-	andl	$255,%esi
-	movzbl	(%ebp,%esi,1),%esi
-	movzbl	%bh,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$8,%edi
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	shrl	$16,%edi
-	andl	$255,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$16,%edi
-	xorl	%edi,%esi
-	movl	%edx,%edi
-	shrl	$24,%edi
-	movzbl	(%ebp,%edi,1),%edi
-	shll	$24,%edi
-	xorl	%edi,%esi
-	movl	20(%esp),%edi
-	andl	$255,%edx
-	movzbl	(%ebp,%edx,1),%edx
-	movzbl	%ch,%ecx
-	movzbl	(%ebp,%ecx,1),%ecx
-	shll	$8,%ecx
-	xorl	%ecx,%edx
-	movl	%esi,%ecx
-	shrl	$16,%ebx
-	andl	$255,%ebx
-	movzbl	(%ebp,%ebx,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%edx
-	movl	8(%esp),%ebx
-	shrl	$24,%eax
-	movzbl	(%ebp,%eax,1),%eax
-	shll	$24,%eax
-	xorl	%eax,%edx
-	movl	4(%esp),%eax
-	leal	-2048(%ebp),%ebp
-	addl	$16,%edi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	ret
-.align	64
-.LAES_Td:
-.long	1353184337,1353184337
-.long	1399144830,1399144830
-.long	3282310938,3282310938
-.long	2522752826,2522752826
-.long	3412831035,3412831035
-.long	4047871263,4047871263
-.long	2874735276,2874735276
-.long	2466505547,2466505547
-.long	1442459680,1442459680
-.long	4134368941,4134368941
-.long	2440481928,2440481928
-.long	625738485,625738485
-.long	4242007375,4242007375
-.long	3620416197,3620416197
-.long	2151953702,2151953702
-.long	2409849525,2409849525
-.long	1230680542,1230680542
-.long	1729870373,1729870373
-.long	2551114309,2551114309
-.long	3787521629,3787521629
-.long	41234371,41234371
-.long	317738113,317738113
-.long	2744600205,2744600205
-.long	3338261355,3338261355
-.long	3881799427,3881799427
-.long	2510066197,2510066197
-.long	3950669247,3950669247
-.long	3663286933,3663286933
-.long	763608788,763608788
-.long	3542185048,3542185048
-.long	694804553,694804553
-.long	1154009486,1154009486
-.long	1787413109,1787413109
-.long	2021232372,2021232372
-.long	1799248025,1799248025
-.long	3715217703,3715217703
-.long	3058688446,3058688446
-.long	397248752,397248752
-.long	1722556617,1722556617
-.long	3023752829,3023752829
-.long	407560035,407560035
-.long	2184256229,2184256229
-.long	1613975959,1613975959
-.long	1165972322,1165972322
-.long	3765920945,3765920945
-.long	2226023355,2226023355
-.long	480281086,480281086
-.long	2485848313,2485848313
-.long	1483229296,1483229296
-.long	436028815,436028815
-.long	2272059028,2272059028
-.long	3086515026,3086515026
-.long	601060267,601060267
-.long	3791801202,3791801202
-.long	1468997603,1468997603
-.long	715871590,715871590
-.long	120122290,120122290
-.long	63092015,63092015
-.long	2591802758,2591802758
-.long	2768779219,2768779219
-.long	4068943920,4068943920
-.long	2997206819,2997206819
-.long	3127509762,3127509762
-.long	1552029421,1552029421
-.long	723308426,723308426
-.long	2461301159,2461301159
-.long	4042393587,4042393587
-.long	2715969870,2715969870
-.long	3455375973,3455375973
-.long	3586000134,3586000134
-.long	526529745,526529745
-.long	2331944644,2331944644
-.long	2639474228,2639474228
-.long	2689987490,2689987490
-.long	853641733,853641733
-.long	1978398372,1978398372
-.long	971801355,971801355
-.long	2867814464,2867814464
-.long	111112542,111112542
-.long	1360031421,1360031421
-.long	4186579262,4186579262
-.long	1023860118,1023860118
-.long	2919579357,2919579357
-.long	1186850381,1186850381
-.long	3045938321,3045938321
-.long	90031217,90031217
-.long	1876166148,1876166148
-.long	4279586912,4279586912
-.long	620468249,620468249
-.long	2548678102,2548678102
-.long	3426959497,3426959497
-.long	2006899047,2006899047
-.long	3175278768,3175278768
-.long	2290845959,2290845959
-.long	945494503,945494503
-.long	3689859193,3689859193
-.long	1191869601,1191869601
-.long	3910091388,3910091388
-.long	3374220536,3374220536
-.long	0,0
-.long	2206629897,2206629897
-.long	1223502642,1223502642
-.long	2893025566,2893025566
-.long	1316117100,1316117100
-.long	4227796733,4227796733
-.long	1446544655,1446544655
-.long	517320253,517320253
-.long	658058550,658058550
-.long	1691946762,1691946762
-.long	564550760,564550760
-.long	3511966619,3511966619
-.long	976107044,976107044
-.long	2976320012,2976320012
-.long	266819475,266819475
-.long	3533106868,3533106868
-.long	2660342555,2660342555
-.long	1338359936,1338359936
-.long	2720062561,2720062561
-.long	1766553434,1766553434
-.long	370807324,370807324
-.long	179999714,179999714
-.long	3844776128,3844776128
-.long	1138762300,1138762300
-.long	488053522,488053522
-.long	185403662,185403662
-.long	2915535858,2915535858
-.long	3114841645,3114841645
-.long	3366526484,3366526484
-.long	2233069911,2233069911
-.long	1275557295,1275557295
-.long	3151862254,3151862254
-.long	4250959779,4250959779
-.long	2670068215,2670068215
-.long	3170202204,3170202204
-.long	3309004356,3309004356
-.long	880737115,880737115
-.long	1982415755,1982415755
-.long	3703972811,3703972811
-.long	1761406390,1761406390
-.long	1676797112,1676797112
-.long	3403428311,3403428311
-.long	277177154,277177154
-.long	1076008723,1076008723
-.long	538035844,538035844
-.long	2099530373,2099530373
-.long	4164795346,4164795346
-.long	288553390,288553390
-.long	1839278535,1839278535
-.long	1261411869,1261411869
-.long	4080055004,4080055004
-.long	3964831245,3964831245
-.long	3504587127,3504587127
-.long	1813426987,1813426987
-.long	2579067049,2579067049
-.long	4199060497,4199060497
-.long	577038663,577038663
-.long	3297574056,3297574056
-.long	440397984,440397984
-.long	3626794326,3626794326
-.long	4019204898,4019204898
-.long	3343796615,3343796615
-.long	3251714265,3251714265
-.long	4272081548,4272081548
-.long	906744984,906744984
-.long	3481400742,3481400742
-.long	685669029,685669029
-.long	646887386,646887386
-.long	2764025151,2764025151
-.long	3835509292,3835509292
-.long	227702864,227702864
-.long	2613862250,2613862250
-.long	1648787028,1648787028
-.long	3256061430,3256061430
-.long	3904428176,3904428176
-.long	1593260334,1593260334
-.long	4121936770,4121936770
-.long	3196083615,3196083615
-.long	2090061929,2090061929
-.long	2838353263,2838353263
-.long	3004310991,3004310991
-.long	999926984,999926984
-.long	2809993232,2809993232
-.long	1852021992,1852021992
-.long	2075868123,2075868123
-.long	158869197,158869197
-.long	4095236462,4095236462
-.long	28809964,28809964
-.long	2828685187,2828685187
-.long	1701746150,1701746150
-.long	2129067946,2129067946
-.long	147831841,147831841
-.long	3873969647,3873969647
-.long	3650873274,3650873274
-.long	3459673930,3459673930
-.long	3557400554,3557400554
-.long	3598495785,3598495785
-.long	2947720241,2947720241
-.long	824393514,824393514
-.long	815048134,815048134
-.long	3227951669,3227951669
-.long	935087732,935087732
-.long	2798289660,2798289660
-.long	2966458592,2966458592
-.long	366520115,366520115
-.long	1251476721,1251476721
-.long	4158319681,4158319681
-.long	240176511,240176511
-.long	804688151,804688151
-.long	2379631990,2379631990
-.long	1303441219,1303441219
-.long	1414376140,1414376140
-.long	3741619940,3741619940
-.long	3820343710,3820343710
-.long	461924940,461924940
-.long	3089050817,3089050817
-.long	2136040774,2136040774
-.long	82468509,82468509
-.long	1563790337,1563790337
-.long	1937016826,1937016826
-.long	776014843,776014843
-.long	1511876531,1511876531
-.long	1389550482,1389550482
-.long	861278441,861278441
-.long	323475053,323475053
-.long	2355222426,2355222426
-.long	2047648055,2047648055
-.long	2383738969,2383738969
-.long	2302415851,2302415851
-.long	3995576782,3995576782
-.long	902390199,902390199
-.long	3991215329,3991215329
-.long	1018251130,1018251130
-.long	1507840668,1507840668
-.long	1064563285,1064563285
-.long	2043548696,2043548696
-.long	3208103795,3208103795
-.long	3939366739,3939366739
-.long	1537932639,1537932639
-.long	342834655,342834655
-.long	2262516856,2262516856
-.long	2180231114,2180231114
-.long	1053059257,1053059257
-.long	741614648,741614648
-.long	1598071746,1598071746
-.long	1925389590,1925389590
-.long	203809468,203809468
-.long	2336832552,2336832552
-.long	1100287487,1100287487
-.long	1895934009,1895934009
-.long	3736275976,3736275976
-.long	2632234200,2632234200
-.long	2428589668,2428589668
-.long	1636092795,1636092795
-.long	1890988757,1890988757
-.long	1952214088,1952214088
-.long	1113045200,1113045200
-.byte	82,9,106,213,48,54,165,56
-.byte	191,64,163,158,129,243,215,251
-.byte	124,227,57,130,155,47,255,135
-.byte	52,142,67,68,196,222,233,203
-.byte	84,123,148,50,166,194,35,61
-.byte	238,76,149,11,66,250,195,78
-.byte	8,46,161,102,40,217,36,178
-.byte	118,91,162,73,109,139,209,37
-.byte	114,248,246,100,134,104,152,22
-.byte	212,164,92,204,93,101,182,146
-.byte	108,112,72,80,253,237,185,218
-.byte	94,21,70,87,167,141,157,132
-.byte	144,216,171,0,140,188,211,10
-.byte	247,228,88,5,184,179,69,6
-.byte	208,44,30,143,202,63,15,2
-.byte	193,175,189,3,1,19,138,107
-.byte	58,145,17,65,79,103,220,234
-.byte	151,242,207,206,240,180,230,115
-.byte	150,172,116,34,231,173,53,133
-.byte	226,249,55,232,28,117,223,110
-.byte	71,241,26,113,29,41,197,137
-.byte	111,183,98,14,170,24,190,27
-.byte	252,86,62,75,198,210,121,32
-.byte	154,219,192,254,120,205,90,244
-.byte	31,221,168,51,136,7,199,49
-.byte	177,18,16,89,39,128,236,95
-.byte	96,81,127,169,25,181,74,13
-.byte	45,229,122,159,147,201,156,239
-.byte	160,224,59,77,174,42,245,176
-.byte	200,235,187,60,131,83,153,97
-.byte	23,43,4,126,186,119,214,38
-.byte	225,105,20,99,85,33,12,125
-.byte	82,9,106,213,48,54,165,56
-.byte	191,64,163,158,129,243,215,251
-.byte	124,227,57,130,155,47,255,135
-.byte	52,142,67,68,196,222,233,203
-.byte	84,123,148,50,166,194,35,61
-.byte	238,76,149,11,66,250,195,78
-.byte	8,46,161,102,40,217,36,178
-.byte	118,91,162,73,109,139,209,37
-.byte	114,248,246,100,134,104,152,22
-.byte	212,164,92,204,93,101,182,146
-.byte	108,112,72,80,253,237,185,218
-.byte	94,21,70,87,167,141,157,132
-.byte	144,216,171,0,140,188,211,10
-.byte	247,228,88,5,184,179,69,6
-.byte	208,44,30,143,202,63,15,2
-.byte	193,175,189,3,1,19,138,107
-.byte	58,145,17,65,79,103,220,234
-.byte	151,242,207,206,240,180,230,115
-.byte	150,172,116,34,231,173,53,133
-.byte	226,249,55,232,28,117,223,110
-.byte	71,241,26,113,29,41,197,137
-.byte	111,183,98,14,170,24,190,27
-.byte	252,86,62,75,198,210,121,32
-.byte	154,219,192,254,120,205,90,244
-.byte	31,221,168,51,136,7,199,49
-.byte	177,18,16,89,39,128,236,95
-.byte	96,81,127,169,25,181,74,13
-.byte	45,229,122,159,147,201,156,239
-.byte	160,224,59,77,174,42,245,176
-.byte	200,235,187,60,131,83,153,97
-.byte	23,43,4,126,186,119,214,38
-.byte	225,105,20,99,85,33,12,125
-.byte	82,9,106,213,48,54,165,56
-.byte	191,64,163,158,129,243,215,251
-.byte	124,227,57,130,155,47,255,135
-.byte	52,142,67,68,196,222,233,203
-.byte	84,123,148,50,166,194,35,61
-.byte	238,76,149,11,66,250,195,78
-.byte	8,46,161,102,40,217,36,178
-.byte	118,91,162,73,109,139,209,37
-.byte	114,248,246,100,134,104,152,22
-.byte	212,164,92,204,93,101,182,146
-.byte	108,112,72,80,253,237,185,218
-.byte	94,21,70,87,167,141,157,132
-.byte	144,216,171,0,140,188,211,10
-.byte	247,228,88,5,184,179,69,6
-.byte	208,44,30,143,202,63,15,2
-.byte	193,175,189,3,1,19,138,107
-.byte	58,145,17,65,79,103,220,234
-.byte	151,242,207,206,240,180,230,115
-.byte	150,172,116,34,231,173,53,133
-.byte	226,249,55,232,28,117,223,110
-.byte	71,241,26,113,29,41,197,137
-.byte	111,183,98,14,170,24,190,27
-.byte	252,86,62,75,198,210,121,32
-.byte	154,219,192,254,120,205,90,244
-.byte	31,221,168,51,136,7,199,49
-.byte	177,18,16,89,39,128,236,95
-.byte	96,81,127,169,25,181,74,13
-.byte	45,229,122,159,147,201,156,239
-.byte	160,224,59,77,174,42,245,176
-.byte	200,235,187,60,131,83,153,97
-.byte	23,43,4,126,186,119,214,38
-.byte	225,105,20,99,85,33,12,125
-.byte	82,9,106,213,48,54,165,56
-.byte	191,64,163,158,129,243,215,251
-.byte	124,227,57,130,155,47,255,135
-.byte	52,142,67,68,196,222,233,203
-.byte	84,123,148,50,166,194,35,61
-.byte	238,76,149,11,66,250,195,78
-.byte	8,46,161,102,40,217,36,178
-.byte	118,91,162,73,109,139,209,37
-.byte	114,248,246,100,134,104,152,22
-.byte	212,164,92,204,93,101,182,146
-.byte	108,112,72,80,253,237,185,218
-.byte	94,21,70,87,167,141,157,132
-.byte	144,216,171,0,140,188,211,10
-.byte	247,228,88,5,184,179,69,6
-.byte	208,44,30,143,202,63,15,2
-.byte	193,175,189,3,1,19,138,107
-.byte	58,145,17,65,79,103,220,234
-.byte	151,242,207,206,240,180,230,115
-.byte	150,172,116,34,231,173,53,133
-.byte	226,249,55,232,28,117,223,110
-.byte	71,241,26,113,29,41,197,137
-.byte	111,183,98,14,170,24,190,27
-.byte	252,86,62,75,198,210,121,32
-.byte	154,219,192,254,120,205,90,244
-.byte	31,221,168,51,136,7,199,49
-.byte	177,18,16,89,39,128,236,95
-.byte	96,81,127,169,25,181,74,13
-.byte	45,229,122,159,147,201,156,239
-.byte	160,224,59,77,174,42,245,176
-.byte	200,235,187,60,131,83,153,97
-.byte	23,43,4,126,186,119,214,38
-.byte	225,105,20,99,85,33,12,125
-.size	_x86_AES_decrypt,.-_x86_AES_decrypt
-.globl	AES_decrypt
-.type	AES_decrypt, at function
-.align	16
-AES_decrypt:
-.L_AES_decrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%eax
-	subl	$36,%esp
-	andl	$-64,%esp
-	leal	-127(%edi),%ebx
-	subl	%esp,%ebx
-	negl	%ebx
-	andl	$960,%ebx
-	subl	%ebx,%esp
-	addl	$4,%esp
-	movl	%eax,28(%esp)
-	call	.L010pic_point
-.L010pic_point:
-	popl	%ebp
-	leal	OPENSSL_ia32cap_P,%eax
-	leal	.LAES_Td-.L010pic_point(%ebp),%ebp
-	leal	764(%esp),%ebx
-	subl	%ebp,%ebx
-	andl	$768,%ebx
-	leal	2176(%ebp,%ebx,1),%ebp
-	btl	$25,(%eax)
-	jnc	.L011x86
-	movq	(%esi),%mm0
-	movq	8(%esi),%mm4
-	call	_sse_AES_decrypt_compact
-	movl	28(%esp),%esp
-	movl	24(%esp),%esi
-	movq	%mm0,(%esi)
-	movq	%mm4,8(%esi)
-	emms
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	16
-.L011x86:
-	movl	%ebp,24(%esp)
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	call	_x86_AES_decrypt_compact
-	movl	28(%esp),%esp
-	movl	24(%esp),%esi
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	AES_decrypt,.-.L_AES_decrypt_begin
-.globl	AES_cbc_encrypt
-.type	AES_cbc_encrypt, at function
-.align	16
-AES_cbc_encrypt:
-.L_AES_cbc_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ecx
-	cmpl	$0,%ecx
-	je	.L012drop_out
-	call	.L013pic_point
-.L013pic_point:
-	popl	%ebp
-	leal	OPENSSL_ia32cap_P,%eax
-	cmpl	$0,40(%esp)
-	leal	.LAES_Te-.L013pic_point(%ebp),%ebp
-	jne	.L014picked_te
-	leal	.LAES_Td-.LAES_Te(%ebp),%ebp
-.L014picked_te:
-	pushfl
-	cld
-	cmpl	$512,%ecx
-	jb	.L015slow_way
-	testl	$15,%ecx
-	jnz	.L015slow_way
-	btl	$28,(%eax)
-	jc	.L015slow_way
-	leal	-324(%esp),%esi
-	andl	$-64,%esi
-	movl	%ebp,%eax
-	leal	2304(%ebp),%ebx
-	movl	%esi,%edx
-	andl	$4095,%eax
-	andl	$4095,%ebx
-	andl	$4095,%edx
-	cmpl	%ebx,%edx
-	jb	.L016tbl_break_out
-	subl	%ebx,%edx
-	subl	%edx,%esi
-	jmp	.L017tbl_ok
-.align	4
-.L016tbl_break_out:
-	subl	%eax,%edx
-	andl	$4095,%edx
-	addl	$384,%edx
-	subl	%edx,%esi
-.align	4
-.L017tbl_ok:
-	leal	24(%esp),%edx
-	xchgl	%esi,%esp
-	addl	$4,%esp
-	movl	%ebp,24(%esp)
-	movl	%esi,28(%esp)
-	movl	(%edx),%eax
-	movl	4(%edx),%ebx
-	movl	12(%edx),%edi
-	movl	16(%edx),%esi
-	movl	20(%edx),%edx
-	movl	%eax,32(%esp)
-	movl	%ebx,36(%esp)
-	movl	%ecx,40(%esp)
-	movl	%edi,44(%esp)
-	movl	%esi,48(%esp)
-	movl	$0,316(%esp)
-	movl	%edi,%ebx
-	movl	$61,%ecx
-	subl	%ebp,%ebx
-	movl	%edi,%esi
-	andl	$4095,%ebx
-	leal	76(%esp),%edi
-	cmpl	$2304,%ebx
-	jb	.L018do_copy
-	cmpl	$3852,%ebx
-	jb	.L019skip_copy
-.align	4
-.L018do_copy:
-	movl	%edi,44(%esp)
-.long	2784229001
-.L019skip_copy:
-	movl	$16,%edi
-.align	4
-.L020prefetch_tbl:
-	movl	(%ebp),%eax
-	movl	32(%ebp),%ebx
-	movl	64(%ebp),%ecx
-	movl	96(%ebp),%esi
-	leal	128(%ebp),%ebp
-	subl	$1,%edi
-	jnz	.L020prefetch_tbl
-	subl	$2048,%ebp
-	movl	32(%esp),%esi
-	movl	48(%esp),%edi
-	cmpl	$0,%edx
-	je	.L021fast_decrypt
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-.align	16
-.L022fast_enc_loop:
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	xorl	(%esi),%eax
-	xorl	4(%esi),%ebx
-	xorl	8(%esi),%ecx
-	xorl	12(%esi),%edx
-	movl	44(%esp),%edi
-	call	_x86_AES_encrypt
-	movl	32(%esp),%esi
-	movl	36(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	leal	16(%esi),%esi
-	movl	40(%esp),%ecx
-	movl	%esi,32(%esp)
-	leal	16(%edi),%edx
-	movl	%edx,36(%esp)
-	subl	$16,%ecx
-	movl	%ecx,40(%esp)
-	jnz	.L022fast_enc_loop
-	movl	48(%esp),%esi
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	cmpl	$0,316(%esp)
-	movl	44(%esp),%edi
-	je	.L023skip_ezero
-	movl	$60,%ecx
-	xorl	%eax,%eax
-.align	4
-.long	2884892297
-.L023skip_ezero:
-	movl	28(%esp),%esp
-	popfl
-.L012drop_out:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L021fast_decrypt:
-	cmpl	36(%esp),%esi
-	je	.L024fast_dec_in_place
-	movl	%edi,52(%esp)
-.align	4
-.align	16
-.L025fast_dec_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	44(%esp),%edi
-	call	_x86_AES_decrypt
-	movl	52(%esp),%edi
-	movl	40(%esp),%esi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	36(%esp),%edi
-	movl	32(%esp),%esi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	40(%esp),%ecx
-	movl	%esi,52(%esp)
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	leal	16(%edi),%edi
-	movl	%edi,36(%esp)
-	subl	$16,%ecx
-	movl	%ecx,40(%esp)
-	jnz	.L025fast_dec_loop
-	movl	52(%esp),%edi
-	movl	48(%esp),%esi
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	jmp	.L026fast_dec_out
-.align	16
-.L024fast_dec_in_place:
-.L027fast_dec_in_place_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	leal	60(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	44(%esp),%edi
-	call	_x86_AES_decrypt
-	movl	48(%esp),%edi
-	movl	36(%esp),%esi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	leal	16(%esi),%esi
-	movl	%esi,36(%esp)
-	leal	60(%esp),%esi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	32(%esp),%esi
-	movl	40(%esp),%ecx
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	subl	$16,%ecx
-	movl	%ecx,40(%esp)
-	jnz	.L027fast_dec_in_place_loop
-.align	4
-.L026fast_dec_out:
-	cmpl	$0,316(%esp)
-	movl	44(%esp),%edi
-	je	.L028skip_dzero
-	movl	$60,%ecx
-	xorl	%eax,%eax
-.align	4
-.long	2884892297
-.L028skip_dzero:
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L015slow_way:
-	movl	(%eax),%eax
-	movl	36(%esp),%edi
-	leal	-80(%esp),%esi
-	andl	$-64,%esi
-	leal	-143(%edi),%ebx
-	subl	%esi,%ebx
-	negl	%ebx
-	andl	$960,%ebx
-	subl	%ebx,%esi
-	leal	768(%esi),%ebx
-	subl	%ebp,%ebx
-	andl	$768,%ebx
-	leal	2176(%ebp,%ebx,1),%ebp
-	leal	24(%esp),%edx
-	xchgl	%esi,%esp
-	addl	$4,%esp
-	movl	%ebp,24(%esp)
-	movl	%esi,28(%esp)
-	movl	%eax,52(%esp)
-	movl	(%edx),%eax
-	movl	4(%edx),%ebx
-	movl	16(%edx),%esi
-	movl	20(%edx),%edx
-	movl	%eax,32(%esp)
-	movl	%ebx,36(%esp)
-	movl	%ecx,40(%esp)
-	movl	%edi,44(%esp)
-	movl	%esi,48(%esp)
-	movl	%esi,%edi
-	movl	%eax,%esi
-	cmpl	$0,%edx
-	je	.L029slow_decrypt
-	cmpl	$16,%ecx
-	movl	%ebx,%edx
-	jb	.L030slow_enc_tail
-	btl	$25,52(%esp)
-	jnc	.L031slow_enc_x86
-	movq	(%edi),%mm0
-	movq	8(%edi),%mm4
-.align	16
-.L032slow_enc_loop_sse:
-	pxor	(%esi),%mm0
-	pxor	8(%esi),%mm4
-	movl	44(%esp),%edi
-	call	_sse_AES_encrypt_compact
-	movl	32(%esp),%esi
-	movl	36(%esp),%edi
-	movl	40(%esp),%ecx
-	movq	%mm0,(%edi)
-	movq	%mm4,8(%edi)
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	leal	16(%edi),%edx
-	movl	%edx,36(%esp)
-	subl	$16,%ecx
-	cmpl	$16,%ecx
-	movl	%ecx,40(%esp)
-	jae	.L032slow_enc_loop_sse
-	testl	$15,%ecx
-	jnz	.L030slow_enc_tail
-	movl	48(%esp),%esi
-	movq	%mm0,(%esi)
-	movq	%mm4,8(%esi)
-	emms
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L031slow_enc_x86:
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-.align	4
-.L033slow_enc_loop_x86:
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	xorl	(%esi),%eax
-	xorl	4(%esi),%ebx
-	xorl	8(%esi),%ecx
-	xorl	12(%esi),%edx
-	movl	44(%esp),%edi
-	call	_x86_AES_encrypt_compact
-	movl	32(%esp),%esi
-	movl	36(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	40(%esp),%ecx
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	leal	16(%edi),%edx
-	movl	%edx,36(%esp)
-	subl	$16,%ecx
-	cmpl	$16,%ecx
-	movl	%ecx,40(%esp)
-	jae	.L033slow_enc_loop_x86
-	testl	$15,%ecx
-	jnz	.L030slow_enc_tail
-	movl	48(%esp),%esi
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L030slow_enc_tail:
-	emms
-	movl	%edx,%edi
-	movl	$16,%ebx
-	subl	%ecx,%ebx
-	cmpl	%esi,%edi
-	je	.L034enc_in_place
-.align	4
-.long	2767451785
-	jmp	.L035enc_skip_in_place
-.L034enc_in_place:
-	leal	(%edi,%ecx,1),%edi
-.L035enc_skip_in_place:
-	movl	%ebx,%ecx
-	xorl	%eax,%eax
-.align	4
-.long	2868115081
-	movl	48(%esp),%edi
-	movl	%edx,%esi
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	$16,40(%esp)
-	jmp	.L033slow_enc_loop_x86
-.align	16
-.L029slow_decrypt:
-	btl	$25,52(%esp)
-	jnc	.L036slow_dec_loop_x86
-.align	4
-.L037slow_dec_loop_sse:
-	movq	(%esi),%mm0
-	movq	8(%esi),%mm4
-	movl	44(%esp),%edi
-	call	_sse_AES_decrypt_compact
-	movl	32(%esp),%esi
-	leal	60(%esp),%eax
-	movl	36(%esp),%ebx
-	movl	40(%esp),%ecx
-	movl	48(%esp),%edi
-	movq	(%esi),%mm1
-	movq	8(%esi),%mm5
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm4
-	movq	%mm1,(%edi)
-	movq	%mm5,8(%edi)
-	subl	$16,%ecx
-	jc	.L038slow_dec_partial_sse
-	movq	%mm0,(%ebx)
-	movq	%mm4,8(%ebx)
-	leal	16(%ebx),%ebx
-	movl	%ebx,36(%esp)
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	movl	%ecx,40(%esp)
-	jnz	.L037slow_dec_loop_sse
-	emms
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L038slow_dec_partial_sse:
-	movq	%mm0,(%eax)
-	movq	%mm4,8(%eax)
-	emms
-	addl	$16,%ecx
-	movl	%ebx,%edi
-	movl	%eax,%esi
-.align	4
-.long	2767451785
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L036slow_dec_loop_x86:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	leal	60(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	44(%esp),%edi
-	call	_x86_AES_decrypt_compact
-	movl	48(%esp),%edi
-	movl	40(%esp),%esi
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	subl	$16,%esi
-	jc	.L039slow_dec_partial_x86
-	movl	%esi,40(%esp)
-	movl	36(%esp),%esi
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	leal	16(%esi),%esi
-	movl	%esi,36(%esp)
-	leal	60(%esp),%esi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	32(%esp),%esi
-	leal	16(%esi),%esi
-	movl	%esi,32(%esp)
-	jnz	.L036slow_dec_loop_x86
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	16
-.L039slow_dec_partial_x86:
-	leal	60(%esp),%esi
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	movl	32(%esp),%esi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	40(%esp),%ecx
-	movl	36(%esp),%edi
-	leal	60(%esp),%esi
-.align	4
-.long	2767451785
-	movl	28(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
-.type	_x86_AES_set_encrypt_key, at function
-.align	16
-_x86_AES_set_encrypt_key:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	24(%esp),%esi
-	movl	32(%esp),%edi
-	testl	$-1,%esi
-	jz	.L040badpointer
-	testl	$-1,%edi
-	jz	.L040badpointer
-	call	.L041pic_point
-.L041pic_point:
-	popl	%ebp
-	leal	.LAES_Te-.L041pic_point(%ebp),%ebp
-	leal	2176(%ebp),%ebp
-	movl	-128(%ebp),%eax
-	movl	-96(%ebp),%ebx
-	movl	-64(%ebp),%ecx
-	movl	-32(%ebp),%edx
-	movl	(%ebp),%eax
-	movl	32(%ebp),%ebx
-	movl	64(%ebp),%ecx
-	movl	96(%ebp),%edx
-	movl	28(%esp),%ecx
-	cmpl	$128,%ecx
-	je	.L04210rounds
-	cmpl	$192,%ecx
-	je	.L04312rounds
-	cmpl	$256,%ecx
-	je	.L04414rounds
-	movl	$-2,%eax
-	jmp	.L045exit
-.L04210rounds:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	xorl	%ecx,%ecx
-	jmp	.L04610shortcut
-.align	4
-.L04710loop:
-	movl	(%edi),%eax
-	movl	12(%edi),%edx
-.L04610shortcut:
-	movzbl	%dl,%esi
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$24,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shrl	$16,%edx
-	movzbl	%dl,%esi
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$8,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%eax
-	xorl	896(%ebp,%ecx,4),%eax
-	movl	%eax,16(%edi)
-	xorl	4(%edi),%eax
-	movl	%eax,20(%edi)
-	xorl	8(%edi),%eax
-	movl	%eax,24(%edi)
-	xorl	12(%edi),%eax
-	movl	%eax,28(%edi)
-	incl	%ecx
-	addl	$16,%edi
-	cmpl	$10,%ecx
-	jl	.L04710loop
-	movl	$10,80(%edi)
-	xorl	%eax,%eax
-	jmp	.L045exit
-.L04312rounds:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	16(%esi),%ecx
-	movl	20(%esi),%edx
-	movl	%ecx,16(%edi)
-	movl	%edx,20(%edi)
-	xorl	%ecx,%ecx
-	jmp	.L04812shortcut
-.align	4
-.L04912loop:
-	movl	(%edi),%eax
-	movl	20(%edi),%edx
-.L04812shortcut:
-	movzbl	%dl,%esi
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$24,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shrl	$16,%edx
-	movzbl	%dl,%esi
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$8,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%eax
-	xorl	896(%ebp,%ecx,4),%eax
-	movl	%eax,24(%edi)
-	xorl	4(%edi),%eax
-	movl	%eax,28(%edi)
-	xorl	8(%edi),%eax
-	movl	%eax,32(%edi)
-	xorl	12(%edi),%eax
-	movl	%eax,36(%edi)
-	cmpl	$7,%ecx
-	je	.L05012break
-	incl	%ecx
-	xorl	16(%edi),%eax
-	movl	%eax,40(%edi)
-	xorl	20(%edi),%eax
-	movl	%eax,44(%edi)
-	addl	$24,%edi
-	jmp	.L04912loop
-.L05012break:
-	movl	$12,72(%edi)
-	xorl	%eax,%eax
-	jmp	.L045exit
-.L04414rounds:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	16(%esi),%eax
-	movl	20(%esi),%ebx
-	movl	24(%esi),%ecx
-	movl	28(%esi),%edx
-	movl	%eax,16(%edi)
-	movl	%ebx,20(%edi)
-	movl	%ecx,24(%edi)
-	movl	%edx,28(%edi)
-	xorl	%ecx,%ecx
-	jmp	.L05114shortcut
-.align	4
-.L05214loop:
-	movl	28(%edi),%edx
-.L05114shortcut:
-	movl	(%edi),%eax
-	movzbl	%dl,%esi
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$24,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shrl	$16,%edx
-	movzbl	%dl,%esi
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$8,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shll	$16,%ebx
-	xorl	%ebx,%eax
-	xorl	896(%ebp,%ecx,4),%eax
-	movl	%eax,32(%edi)
-	xorl	4(%edi),%eax
-	movl	%eax,36(%edi)
-	xorl	8(%edi),%eax
-	movl	%eax,40(%edi)
-	xorl	12(%edi),%eax
-	movl	%eax,44(%edi)
-	cmpl	$6,%ecx
-	je	.L05314break
-	incl	%ecx
-	movl	%eax,%edx
-	movl	16(%edi),%eax
-	movzbl	%dl,%esi
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shrl	$16,%edx
-	shll	$8,%ebx
-	movzbl	%dl,%esi
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	movzbl	%dh,%esi
-	shll	$16,%ebx
-	xorl	%ebx,%eax
-	movzbl	-128(%ebp,%esi,1),%ebx
-	shll	$24,%ebx
-	xorl	%ebx,%eax
-	movl	%eax,48(%edi)
-	xorl	20(%edi),%eax
-	movl	%eax,52(%edi)
-	xorl	24(%edi),%eax
-	movl	%eax,56(%edi)
-	xorl	28(%edi),%eax
-	movl	%eax,60(%edi)
-	addl	$32,%edi
-	jmp	.L05214loop
-.L05314break:
-	movl	$14,48(%edi)
-	xorl	%eax,%eax
-	jmp	.L045exit
-.L040badpointer:
-	movl	$-1,%eax
-.L045exit:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	_x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
-.globl	private_AES_set_encrypt_key
-.type	private_AES_set_encrypt_key, at function
-.align	16
-private_AES_set_encrypt_key:
-.L_private_AES_set_encrypt_key_begin:
-	call	_x86_AES_set_encrypt_key
-	ret
-.size	private_AES_set_encrypt_key,.-.L_private_AES_set_encrypt_key_begin
-.globl	private_AES_set_decrypt_key
-.type	private_AES_set_decrypt_key, at function
-.align	16
-private_AES_set_decrypt_key:
-.L_private_AES_set_decrypt_key_begin:
-	call	_x86_AES_set_encrypt_key
-	cmpl	$0,%eax
-	je	.L054proceed
-	ret
-.L054proceed:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%esi
-	movl	240(%esi),%ecx
-	leal	(,%ecx,4),%ecx
-	leal	(%esi,%ecx,4),%edi
-.align	4
-.L055invert:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	(%edi),%ecx
-	movl	4(%edi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,(%esi)
-	movl	%edx,4(%esi)
-	movl	8(%esi),%eax
-	movl	12(%esi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,8(%edi)
-	movl	%ebx,12(%edi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	addl	$16,%esi
-	subl	$16,%edi
-	cmpl	%edi,%esi
-	jne	.L055invert
-	movl	28(%esp),%edi
-	movl	240(%edi),%esi
-	leal	-2(%esi,%esi,1),%esi
-	leal	(%edi,%esi,8),%esi
-	movl	%esi,28(%esp)
-	movl	16(%edi),%eax
-.align	4
-.L056permute:
-	addl	$16,%edi
-	movl	%eax,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%eax,%eax,1),%ebx
-	subl	%ebp,%esi
-	andl	$4278124286,%ebx
-	andl	$454761243,%esi
-	xorl	%ebx,%esi
-	movl	%esi,%ebx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ebx,%ebx,1),%ecx
-	subl	%ebp,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	xorl	%eax,%ebx
-	xorl	%ecx,%esi
-	movl	%esi,%ecx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ecx,%ecx,1),%edx
-	xorl	%eax,%ecx
-	subl	%ebp,%esi
-	andl	$4278124286,%edx
-	andl	$454761243,%esi
-	roll	$8,%eax
-	xorl	%esi,%edx
-	movl	4(%edi),%ebp
-	xorl	%ebx,%eax
-	xorl	%edx,%ebx
-	xorl	%ecx,%eax
-	roll	$24,%ebx
-	xorl	%edx,%ecx
-	xorl	%edx,%eax
-	roll	$16,%ecx
-	xorl	%ebx,%eax
-	roll	$8,%edx
-	xorl	%ecx,%eax
-	movl	%ebp,%ebx
-	xorl	%edx,%eax
-	movl	%eax,(%edi)
-	movl	%ebx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ebx,%ebx,1),%ecx
-	subl	%ebp,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	xorl	%ecx,%esi
-	movl	%esi,%ecx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ecx,%ecx,1),%edx
-	subl	%ebp,%esi
-	andl	$4278124286,%edx
-	andl	$454761243,%esi
-	xorl	%ebx,%ecx
-	xorl	%edx,%esi
-	movl	%esi,%edx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%edx,%edx,1),%eax
-	xorl	%ebx,%edx
-	subl	%ebp,%esi
-	andl	$4278124286,%eax
-	andl	$454761243,%esi
-	roll	$8,%ebx
-	xorl	%esi,%eax
-	movl	8(%edi),%ebp
-	xorl	%ecx,%ebx
-	xorl	%eax,%ecx
-	xorl	%edx,%ebx
-	roll	$24,%ecx
-	xorl	%eax,%edx
-	xorl	%eax,%ebx
-	roll	$16,%edx
-	xorl	%ecx,%ebx
-	roll	$8,%eax
-	xorl	%edx,%ebx
-	movl	%ebp,%ecx
-	xorl	%eax,%ebx
-	movl	%ebx,4(%edi)
-	movl	%ecx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ecx,%ecx,1),%edx
-	subl	%ebp,%esi
-	andl	$4278124286,%edx
-	andl	$454761243,%esi
-	xorl	%edx,%esi
-	movl	%esi,%edx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%edx,%edx,1),%eax
-	subl	%ebp,%esi
-	andl	$4278124286,%eax
-	andl	$454761243,%esi
-	xorl	%ecx,%edx
-	xorl	%eax,%esi
-	movl	%esi,%eax
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%eax,%eax,1),%ebx
-	xorl	%ecx,%eax
-	subl	%ebp,%esi
-	andl	$4278124286,%ebx
-	andl	$454761243,%esi
-	roll	$8,%ecx
-	xorl	%esi,%ebx
-	movl	12(%edi),%ebp
-	xorl	%edx,%ecx
-	xorl	%ebx,%edx
-	xorl	%eax,%ecx
-	roll	$24,%edx
-	xorl	%ebx,%eax
-	xorl	%ebx,%ecx
-	roll	$16,%eax
-	xorl	%edx,%ecx
-	roll	$8,%ebx
-	xorl	%eax,%ecx
-	movl	%ebp,%edx
-	xorl	%ebx,%ecx
-	movl	%ecx,8(%edi)
-	movl	%edx,%esi
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%edx,%edx,1),%eax
-	subl	%ebp,%esi
-	andl	$4278124286,%eax
-	andl	$454761243,%esi
-	xorl	%eax,%esi
-	movl	%esi,%eax
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%eax,%eax,1),%ebx
-	subl	%ebp,%esi
-	andl	$4278124286,%ebx
-	andl	$454761243,%esi
-	xorl	%edx,%eax
-	xorl	%ebx,%esi
-	movl	%esi,%ebx
-	andl	$2155905152,%esi
-	movl	%esi,%ebp
-	shrl	$7,%ebp
-	leal	(%ebx,%ebx,1),%ecx
-	xorl	%edx,%ebx
-	subl	%ebp,%esi
-	andl	$4278124286,%ecx
-	andl	$454761243,%esi
-	roll	$8,%edx
-	xorl	%esi,%ecx
-	movl	16(%edi),%ebp
-	xorl	%eax,%edx
-	xorl	%ecx,%eax
-	xorl	%ebx,%edx
-	roll	$24,%eax
-	xorl	%ecx,%ebx
-	xorl	%ecx,%edx
-	roll	$16,%ebx
-	xorl	%eax,%edx
-	roll	$8,%ecx
-	xorl	%ebx,%edx
-	movl	%ebp,%eax
-	xorl	%ecx,%edx
-	movl	%edx,12(%edi)
-	cmpl	28(%esp),%edi
-	jb	.L056permute
-	xorl	%eax,%eax
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	private_AES_set_decrypt_key,.-.L_private_AES_set_decrypt_key_begin
-.byte	65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
-.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
-.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/aesni-x86.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/aesni-x86.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/aesni-x86.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,4292 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/aesni-x86.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from aesni-x86.pl.
+#ifdef PIC
+.file	"aesni-x86.S"
+.text
+.globl	aesni_encrypt
+.type	aesni_encrypt, at function
+.align	16
+aesni_encrypt:
+.L_aesni_encrypt_begin:
+	movl	4(%esp),%eax
+	movl	12(%esp),%edx
+	movups	(%eax),%xmm2
+	movl	240(%edx),%ecx
+	movl	8(%esp),%eax
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L000enc1_loop_1:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L000enc1_loop_1
+.byte	102,15,56,221,209
+	movups	%xmm2,(%eax)
+	ret
+.size	aesni_encrypt,.-.L_aesni_encrypt_begin
+.globl	aesni_decrypt
+.type	aesni_decrypt, at function
+.align	16
+aesni_decrypt:
+.L_aesni_decrypt_begin:
+	movl	4(%esp),%eax
+	movl	12(%esp),%edx
+	movups	(%eax),%xmm2
+	movl	240(%edx),%ecx
+	movl	8(%esp),%eax
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L001dec1_loop_2:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L001dec1_loop_2
+.byte	102,15,56,223,209
+	movups	%xmm2,(%eax)
+	ret
+.size	aesni_decrypt,.-.L_aesni_decrypt_begin
+.type	_aesni_encrypt3, at function
+.align	16
+_aesni_encrypt3:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	movups	(%edx),%xmm0
+.L002enc3_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+	movups	(%edx),%xmm0
+	jnz	.L002enc3_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+	ret
+.size	_aesni_encrypt3,.-_aesni_encrypt3
+.type	_aesni_decrypt3, at function
+.align	16
+_aesni_decrypt3:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	movups	(%edx),%xmm0
+.L003dec3_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+	movups	(%edx),%xmm0
+	jnz	.L003dec3_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+	ret
+.size	_aesni_decrypt3,.-_aesni_decrypt3
+.type	_aesni_encrypt4, at function
+.align	16
+_aesni_encrypt4:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	shrl	$1,%ecx
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	pxor	%xmm0,%xmm5
+	movups	(%edx),%xmm0
+.L004enc4_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+.byte	102,15,56,220,232
+	movups	(%edx),%xmm0
+	jnz	.L004enc4_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+.byte	102,15,56,221,232
+	ret
+.size	_aesni_encrypt4,.-_aesni_encrypt4
+.type	_aesni_decrypt4, at function
+.align	16
+_aesni_decrypt4:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	shrl	$1,%ecx
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	pxor	%xmm0,%xmm5
+	movups	(%edx),%xmm0
+.L005dec4_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+.byte	102,15,56,222,232
+	movups	(%edx),%xmm0
+	jnz	.L005dec4_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+.byte	102,15,56,223,232
+	ret
+.size	_aesni_decrypt4,.-_aesni_decrypt4
+.type	_aesni_encrypt6, at function
+.align	16
+_aesni_encrypt6:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,220,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,220,217
+	pxor	%xmm0,%xmm5
+	decl	%ecx
+.byte	102,15,56,220,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	jmp	.L_aesni_encrypt6_enter
+.align	16
+.L006enc6_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,220,241
+.byte	102,15,56,220,249
+.align	16
+.L_aesni_encrypt6_enter:
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+.byte	102,15,56,220,232
+.byte	102,15,56,220,240
+.byte	102,15,56,220,248
+	movups	(%edx),%xmm0
+	jnz	.L006enc6_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,220,241
+.byte	102,15,56,220,249
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+.byte	102,15,56,221,232
+.byte	102,15,56,221,240
+.byte	102,15,56,221,248
+	ret
+.size	_aesni_encrypt6,.-_aesni_encrypt6
+.type	_aesni_decrypt6, at function
+.align	16
+_aesni_decrypt6:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,222,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,222,217
+	pxor	%xmm0,%xmm5
+	decl	%ecx
+.byte	102,15,56,222,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,222,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,222,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,222,249
+	jmp	.L_aesni_decrypt6_enter
+.align	16
+.L007dec6_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,222,241
+.byte	102,15,56,222,249
+.align	16
+.L_aesni_decrypt6_enter:
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+.byte	102,15,56,222,232
+.byte	102,15,56,222,240
+.byte	102,15,56,222,248
+	movups	(%edx),%xmm0
+	jnz	.L007dec6_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,222,241
+.byte	102,15,56,222,249
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+.byte	102,15,56,223,232
+.byte	102,15,56,223,240
+.byte	102,15,56,223,248
+	ret
+.size	_aesni_decrypt6,.-_aesni_decrypt6
+.globl	aesni_ecb_encrypt
+.type	aesni_ecb_encrypt, at function
+.align	16
+aesni_ecb_encrypt:
+.L_aesni_ecb_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	andl	$-16,%eax
+	jz	.L008ecb_ret
+	movl	240(%edx),%ecx
+	testl	%ebx,%ebx
+	jz	.L009ecb_decrypt
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	cmpl	$96,%eax
+	jb	.L010ecb_enc_tail
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+	subl	$96,%eax
+	jmp	.L011ecb_enc_loop6_enter
+.align	16
+.L012ecb_enc_loop6:
+	movups	%xmm2,(%edi)
+	movdqu	(%esi),%xmm2
+	movups	%xmm3,16(%edi)
+	movdqu	16(%esi),%xmm3
+	movups	%xmm4,32(%edi)
+	movdqu	32(%esi),%xmm4
+	movups	%xmm5,48(%edi)
+	movdqu	48(%esi),%xmm5
+	movups	%xmm6,64(%edi)
+	movdqu	64(%esi),%xmm6
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+.L011ecb_enc_loop6_enter:
+	call	_aesni_encrypt6
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	subl	$96,%eax
+	jnc	.L012ecb_enc_loop6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	addl	$96,%eax
+	jz	.L008ecb_ret
+.L010ecb_enc_tail:
+	movups	(%esi),%xmm2
+	cmpl	$32,%eax
+	jb	.L013ecb_enc_one
+	movups	16(%esi),%xmm3
+	je	.L014ecb_enc_two
+	movups	32(%esi),%xmm4
+	cmpl	$64,%eax
+	jb	.L015ecb_enc_three
+	movups	48(%esi),%xmm5
+	je	.L016ecb_enc_four
+	movups	64(%esi),%xmm6
+	xorps	%xmm7,%xmm7
+	call	_aesni_encrypt6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L013ecb_enc_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L017enc1_loop_3:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L017enc1_loop_3
+.byte	102,15,56,221,209
+	movups	%xmm2,(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L014ecb_enc_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_encrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L015ecb_enc_three:
+	call	_aesni_encrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L016ecb_enc_four:
+	call	_aesni_encrypt4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L009ecb_decrypt:
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	cmpl	$96,%eax
+	jb	.L018ecb_dec_tail
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+	subl	$96,%eax
+	jmp	.L019ecb_dec_loop6_enter
+.align	16
+.L020ecb_dec_loop6:
+	movups	%xmm2,(%edi)
+	movdqu	(%esi),%xmm2
+	movups	%xmm3,16(%edi)
+	movdqu	16(%esi),%xmm3
+	movups	%xmm4,32(%edi)
+	movdqu	32(%esi),%xmm4
+	movups	%xmm5,48(%edi)
+	movdqu	48(%esi),%xmm5
+	movups	%xmm6,64(%edi)
+	movdqu	64(%esi),%xmm6
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+.L019ecb_dec_loop6_enter:
+	call	_aesni_decrypt6
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	subl	$96,%eax
+	jnc	.L020ecb_dec_loop6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	addl	$96,%eax
+	jz	.L008ecb_ret
+.L018ecb_dec_tail:
+	movups	(%esi),%xmm2
+	cmpl	$32,%eax
+	jb	.L021ecb_dec_one
+	movups	16(%esi),%xmm3
+	je	.L022ecb_dec_two
+	movups	32(%esi),%xmm4
+	cmpl	$64,%eax
+	jb	.L023ecb_dec_three
+	movups	48(%esi),%xmm5
+	je	.L024ecb_dec_four
+	movups	64(%esi),%xmm6
+	xorps	%xmm7,%xmm7
+	call	_aesni_decrypt6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L021ecb_dec_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L025dec1_loop_4:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L025dec1_loop_4
+.byte	102,15,56,223,209
+	movups	%xmm2,(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L022ecb_dec_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_decrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L023ecb_dec_three:
+	call	_aesni_decrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L024ecb_dec_four:
+	call	_aesni_decrypt4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+.L008ecb_ret:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
+.globl	aesni_ccm64_encrypt_blocks
+.type	aesni_ccm64_encrypt_blocks, at function
+.align	16
+aesni_ccm64_encrypt_blocks:
+.L_aesni_ccm64_encrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	%esp,%ebp
+	subl	$60,%esp
+	andl	$-16,%esp
+	movl	%ebp,48(%esp)
+	movdqu	(%ebx),%xmm7
+	movdqu	(%ecx),%xmm3
+	movl	240(%edx),%ecx
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$1,%ebx
+	xorl	%ebp,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	%ebp,24(%esp)
+	movl	%ebp,28(%esp)
+	shrl	$1,%ecx
+	leal	(%edx),%ebp
+	movdqa	(%esp),%xmm5
+	movdqa	%xmm7,%xmm2
+	movl	%ecx,%ebx
+.byte	102,15,56,0,253
+.L026ccm64_enc_outer:
+	movups	(%ebp),%xmm0
+	movl	%ebx,%ecx
+	movups	(%esi),%xmm6
+	xorps	%xmm0,%xmm2
+	movups	16(%ebp),%xmm1
+	xorps	%xmm6,%xmm0
+	leal	32(%ebp),%edx
+	xorps	%xmm0,%xmm3
+	movups	(%edx),%xmm0
+.L027ccm64_enc2_loop:
+.byte	102,15,56,220,209
+	decl	%ecx
+.byte	102,15,56,220,217
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+	leal	32(%edx),%edx
+.byte	102,15,56,220,216
+	movups	(%edx),%xmm0
+	jnz	.L027ccm64_enc2_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	paddq	16(%esp),%xmm7
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+	decl	%eax
+	leal	16(%esi),%esi
+	xorps	%xmm2,%xmm6
+	movdqa	%xmm7,%xmm2
+	movups	%xmm6,(%edi)
+	leal	16(%edi),%edi
+.byte	102,15,56,0,213
+	jnz	.L026ccm64_enc_outer
+	movl	48(%esp),%esp
+	movl	40(%esp),%edi
+	movups	%xmm3,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
+.globl	aesni_ccm64_decrypt_blocks
+.type	aesni_ccm64_decrypt_blocks, at function
+.align	16
+aesni_ccm64_decrypt_blocks:
+.L_aesni_ccm64_decrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	%esp,%ebp
+	subl	$60,%esp
+	andl	$-16,%esp
+	movl	%ebp,48(%esp)
+	movdqu	(%ebx),%xmm7
+	movdqu	(%ecx),%xmm3
+	movl	240(%edx),%ecx
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$1,%ebx
+	xorl	%ebp,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	%ebp,24(%esp)
+	movl	%ebp,28(%esp)
+	movdqa	(%esp),%xmm5
+	movdqa	%xmm7,%xmm2
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+.byte	102,15,56,0,253
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L028enc1_loop_5:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L028enc1_loop_5
+.byte	102,15,56,221,209
+	movups	(%esi),%xmm6
+	paddq	16(%esp),%xmm7
+	leal	16(%esi),%esi
+	jmp	.L029ccm64_dec_outer
+.align	16
+.L029ccm64_dec_outer:
+	xorps	%xmm2,%xmm6
+	movdqa	%xmm7,%xmm2
+	movl	%ebx,%ecx
+	movups	%xmm6,(%edi)
+	leal	16(%edi),%edi
+.byte	102,15,56,0,213
+	subl	$1,%eax
+	jz	.L030ccm64_dec_break
+	movups	(%ebp),%xmm0
+	shrl	$1,%ecx
+	movups	16(%ebp),%xmm1
+	xorps	%xmm0,%xmm6
+	leal	32(%ebp),%edx
+	xorps	%xmm0,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	(%edx),%xmm0
+.L031ccm64_dec2_loop:
+.byte	102,15,56,220,209
+	decl	%ecx
+.byte	102,15,56,220,217
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+	leal	32(%edx),%edx
+.byte	102,15,56,220,216
+	movups	(%edx),%xmm0
+	jnz	.L031ccm64_dec2_loop
+	movups	(%esi),%xmm6
+	paddq	16(%esp),%xmm7
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	leal	16(%esi),%esi
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+	jmp	.L029ccm64_dec_outer
+.align	16
+.L030ccm64_dec_break:
+	movl	%ebp,%edx
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	xorps	%xmm0,%xmm6
+	leal	32(%edx),%edx
+	xorps	%xmm6,%xmm3
+.L032enc1_loop_6:
+.byte	102,15,56,220,217
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L032enc1_loop_6
+.byte	102,15,56,221,217
+	movl	48(%esp),%esp
+	movl	40(%esp),%edi
+	movups	%xmm3,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
+.globl	aesni_ctr32_encrypt_blocks
+.type	aesni_ctr32_encrypt_blocks, at function
+.align	16
+aesni_ctr32_encrypt_blocks:
+.L_aesni_ctr32_encrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	%esp,%ebp
+	subl	$88,%esp
+	andl	$-16,%esp
+	movl	%ebp,80(%esp)
+	cmpl	$1,%eax
+	je	.L033ctr32_one_shortcut
+	movdqu	(%ebx),%xmm7
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$6,%ecx
+	xorl	%ebp,%ebp
+	movl	%ecx,16(%esp)
+	movl	%ecx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%ebp,28(%esp)
+.byte	102,15,58,22,251,3
+.byte	102,15,58,34,253,3
+	movl	240(%edx),%ecx
+	bswap	%ebx
+	pxor	%xmm1,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	(%esp),%xmm2
+.byte	102,15,58,34,203,0
+	leal	3(%ebx),%ebp
+.byte	102,15,58,34,197,0
+	incl	%ebx
+.byte	102,15,58,34,203,1
+	incl	%ebp
+.byte	102,15,58,34,197,1
+	incl	%ebx
+.byte	102,15,58,34,203,2
+	incl	%ebp
+.byte	102,15,58,34,197,2
+	movdqa	%xmm1,48(%esp)
+.byte	102,15,56,0,202
+	movdqa	%xmm0,64(%esp)
+.byte	102,15,56,0,194
+	pshufd	$192,%xmm1,%xmm2
+	pshufd	$128,%xmm1,%xmm3
+	cmpl	$6,%eax
+	jb	.L034ctr32_tail
+	movdqa	%xmm7,32(%esp)
+	shrl	$1,%ecx
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	subl	$6,%eax
+	jmp	.L035ctr32_loop6
+.align	16
+.L035ctr32_loop6:
+	pshufd	$64,%xmm1,%xmm4
+	movdqa	32(%esp),%xmm1
+	pshufd	$192,%xmm0,%xmm5
+	por	%xmm1,%xmm2
+	pshufd	$128,%xmm0,%xmm6
+	por	%xmm1,%xmm3
+	pshufd	$64,%xmm0,%xmm7
+	por	%xmm1,%xmm4
+	por	%xmm1,%xmm5
+	por	%xmm1,%xmm6
+	por	%xmm1,%xmm7
+	movups	(%ebp),%xmm0
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	decl	%ecx
+	pxor	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,220,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,220,217
+	pxor	%xmm0,%xmm5
+.byte	102,15,56,220,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	call	.L_aesni_encrypt6_enter
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	%xmm1,%xmm2
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm3
+	movups	%xmm2,(%edi)
+	movdqa	16(%esp),%xmm0
+	xorps	%xmm1,%xmm4
+	movdqa	48(%esp),%xmm1
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	paddd	%xmm0,%xmm1
+	paddd	64(%esp),%xmm0
+	movdqa	(%esp),%xmm2
+	movups	48(%esi),%xmm3
+	movups	64(%esi),%xmm4
+	xorps	%xmm3,%xmm5
+	movups	80(%esi),%xmm3
+	leal	96(%esi),%esi
+	movdqa	%xmm1,48(%esp)
+.byte	102,15,56,0,202
+	xorps	%xmm4,%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm3,%xmm7
+	movdqa	%xmm0,64(%esp)
+.byte	102,15,56,0,194
+	movups	%xmm6,64(%edi)
+	pshufd	$192,%xmm1,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movl	%ebx,%ecx
+	pshufd	$128,%xmm1,%xmm3
+	subl	$6,%eax
+	jnc	.L035ctr32_loop6
+	addl	$6,%eax
+	jz	.L036ctr32_ret
+	movl	%ebp,%edx
+	leal	1(,%ecx,2),%ecx
+	movdqa	32(%esp),%xmm7
+.L034ctr32_tail:
+	por	%xmm7,%xmm2
+	cmpl	$2,%eax
+	jb	.L037ctr32_one
+	pshufd	$64,%xmm1,%xmm4
+	por	%xmm7,%xmm3
+	je	.L038ctr32_two
+	pshufd	$192,%xmm0,%xmm5
+	por	%xmm7,%xmm4
+	cmpl	$4,%eax
+	jb	.L039ctr32_three
+	pshufd	$128,%xmm0,%xmm6
+	por	%xmm7,%xmm5
+	je	.L040ctr32_four
+	por	%xmm7,%xmm6
+	call	_aesni_encrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	%xmm1,%xmm2
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm3
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm4
+	movups	64(%esi),%xmm1
+	xorps	%xmm0,%xmm5
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L033ctr32_one_shortcut:
+	movups	(%ebx),%xmm2
+	movl	240(%edx),%ecx
+.L037ctr32_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L041enc1_loop_7:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L041enc1_loop_7
+.byte	102,15,56,221,209
+	movups	(%esi),%xmm6
+	xorps	%xmm2,%xmm6
+	movups	%xmm6,(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L038ctr32_two:
+	call	_aesni_encrypt3
+	movups	(%esi),%xmm5
+	movups	16(%esi),%xmm6
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L039ctr32_three:
+	call	_aesni_encrypt3
+	movups	(%esi),%xmm5
+	movups	16(%esi),%xmm6
+	xorps	%xmm5,%xmm2
+	movups	32(%esi),%xmm7
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm7,%xmm4
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L040ctr32_four:
+	call	_aesni_encrypt4
+	movups	(%esi),%xmm6
+	movups	16(%esi),%xmm7
+	movups	32(%esi),%xmm1
+	xorps	%xmm6,%xmm2
+	movups	48(%esi),%xmm0
+	xorps	%xmm7,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	%xmm0,%xmm5
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+.L036ctr32_ret:
+	movl	80(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
+.globl	aesni_xts_encrypt
+.type	aesni_xts_encrypt, at function
+.align	16
+aesni_xts_encrypt:
+.L_aesni_xts_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	36(%esp),%edx
+	movl	40(%esp),%esi
+	movl	240(%edx),%ecx
+	movups	(%esi),%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L042enc1_loop_8:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L042enc1_loop_8
+.byte	102,15,56,221,209
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	subl	$120,%esp
+	movl	240(%edx),%ecx
+	andl	$-16,%esp
+	movl	$135,96(%esp)
+	movl	$0,100(%esp)
+	movl	$1,104(%esp)
+	movl	$0,108(%esp)
+	movl	%eax,112(%esp)
+	movl	%ebp,116(%esp)
+	movdqa	%xmm2,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	pcmpgtd	%xmm1,%xmm0
+	andl	$-16,%eax
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	subl	$96,%eax
+	jc	.L043xts_enc_short
+	shrl	$1,%ecx
+	movl	%ecx,%ebx
+	jmp	.L044xts_enc_loop6
+.align	16
+.L044xts_enc_loop6:
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,16(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,32(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,64(%esp)
+	paddq	%xmm1,%xmm1
+	movups	(%ebp),%xmm0
+	pand	%xmm3,%xmm7
+	movups	(%esi),%xmm2
+	pxor	%xmm1,%xmm7
+	movdqu	16(%esi),%xmm3
+	xorps	%xmm0,%xmm2
+	movdqu	32(%esi),%xmm4
+	pxor	%xmm0,%xmm3
+	movdqu	48(%esi),%xmm5
+	pxor	%xmm0,%xmm4
+	movdqu	64(%esi),%xmm6
+	pxor	%xmm0,%xmm5
+	movdqu	80(%esi),%xmm1
+	pxor	%xmm0,%xmm6
+	leal	96(%esi),%esi
+	pxor	(%esp),%xmm2
+	movdqa	%xmm7,80(%esp)
+	pxor	%xmm1,%xmm7
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	pxor	16(%esp),%xmm3
+.byte	102,15,56,220,209
+	pxor	32(%esp),%xmm4
+.byte	102,15,56,220,217
+	pxor	48(%esp),%xmm5
+	decl	%ecx
+.byte	102,15,56,220,225
+	pxor	64(%esp),%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	call	.L_aesni_encrypt6_enter
+	movdqa	80(%esp),%xmm1
+	pxor	%xmm0,%xmm0
+	xorps	(%esp),%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	xorps	16(%esp),%xmm3
+	movups	%xmm2,(%edi)
+	xorps	32(%esp),%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm4,32(%edi)
+	xorps	64(%esp),%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm1,%xmm7
+	movups	%xmm6,64(%edi)
+	pshufd	$19,%xmm0,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqa	96(%esp),%xmm3
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	movl	%ebx,%ecx
+	pxor	%xmm2,%xmm1
+	subl	$96,%eax
+	jnc	.L044xts_enc_loop6
+	leal	1(,%ecx,2),%ecx
+	movl	%ebp,%edx
+	movl	%ecx,%ebx
+.L043xts_enc_short:
+	addl	$96,%eax
+	jz	.L045xts_enc_done6x
+	movdqa	%xmm1,%xmm5
+	cmpl	$32,%eax
+	jb	.L046xts_enc_one
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	je	.L047xts_enc_two
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	cmpl	$64,%eax
+	jb	.L048xts_enc_three
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm7
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,(%esp)
+	movdqa	%xmm6,16(%esp)
+	je	.L049xts_enc_four
+	movdqa	%xmm7,32(%esp)
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	pxor	(%esp),%xmm2
+	movdqu	48(%esi),%xmm5
+	pxor	16(%esp),%xmm3
+	movdqu	64(%esi),%xmm6
+	pxor	32(%esp),%xmm4
+	leal	80(%esi),%esi
+	pxor	48(%esp),%xmm5
+	movdqa	%xmm7,64(%esp)
+	pxor	%xmm7,%xmm6
+	call	_aesni_encrypt6
+	movaps	64(%esp),%xmm1
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	32(%esp),%xmm4
+	movups	%xmm2,(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm3,16(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	jmp	.L050xts_enc_done
+.align	16
+.L046xts_enc_one:
+	movups	(%esi),%xmm2
+	leal	16(%esi),%esi
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L051enc1_loop_9:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L051enc1_loop_9
+.byte	102,15,56,221,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	movdqa	%xmm5,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L047xts_enc_two:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	leal	32(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm4,%xmm4
+	call	_aesni_encrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L048xts_enc_three:
+	movaps	%xmm1,%xmm7
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	leal	48(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	call	_aesni_encrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movdqa	%xmm7,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L049xts_enc_four:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	xorps	(%esp),%xmm2
+	movups	48(%esi),%xmm5
+	leal	64(%esi),%esi
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	xorps	%xmm6,%xmm5
+	call	_aesni_encrypt4
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	xorps	%xmm6,%xmm5
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L045xts_enc_done6x:
+	movl	112(%esp),%eax
+	andl	$15,%eax
+	jz	.L052xts_enc_ret
+	movdqa	%xmm1,%xmm5
+	movl	%eax,112(%esp)
+	jmp	.L053xts_enc_steal
+.align	16
+.L050xts_enc_done:
+	movl	112(%esp),%eax
+	pxor	%xmm0,%xmm0
+	andl	$15,%eax
+	jz	.L052xts_enc_ret
+	pcmpgtd	%xmm1,%xmm0
+	movl	%eax,112(%esp)
+	pshufd	$19,%xmm0,%xmm5
+	paddq	%xmm1,%xmm1
+	pand	96(%esp),%xmm5
+	pxor	%xmm1,%xmm5
+.L053xts_enc_steal:
+	movzbl	(%esi),%ecx
+	movzbl	-16(%edi),%edx
+	leal	1(%esi),%esi
+	movb	%cl,-16(%edi)
+	movb	%dl,(%edi)
+	leal	1(%edi),%edi
+	subl	$1,%eax
+	jnz	.L053xts_enc_steal
+	subl	112(%esp),%edi
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	-16(%edi),%xmm2
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L054enc1_loop_10:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L054enc1_loop_10
+.byte	102,15,56,221,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,-16(%edi)
+.L052xts_enc_ret:
+	movl	116(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
+.globl	aesni_xts_decrypt
+.type	aesni_xts_decrypt, at function
+.align	16
+aesni_xts_decrypt:
+.L_aesni_xts_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	36(%esp),%edx
+	movl	40(%esp),%esi
+	movl	240(%edx),%ecx
+	movups	(%esi),%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L055enc1_loop_11:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L055enc1_loop_11
+.byte	102,15,56,221,209
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	subl	$120,%esp
+	andl	$-16,%esp
+	xorl	%ebx,%ebx
+	testl	$15,%eax
+	setnz	%bl
+	shll	$4,%ebx
+	subl	%ebx,%eax
+	movl	$135,96(%esp)
+	movl	$0,100(%esp)
+	movl	$1,104(%esp)
+	movl	$0,108(%esp)
+	movl	%eax,112(%esp)
+	movl	%ebp,116(%esp)
+	movl	240(%edx),%ecx
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	movdqa	%xmm2,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	pcmpgtd	%xmm1,%xmm0
+	andl	$-16,%eax
+	subl	$96,%eax
+	jc	.L056xts_dec_short
+	shrl	$1,%ecx
+	movl	%ecx,%ebx
+	jmp	.L057xts_dec_loop6
+.align	16
+.L057xts_dec_loop6:
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,16(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,32(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,64(%esp)
+	paddq	%xmm1,%xmm1
+	movups	(%ebp),%xmm0
+	pand	%xmm3,%xmm7
+	movups	(%esi),%xmm2
+	pxor	%xmm1,%xmm7
+	movdqu	16(%esi),%xmm3
+	xorps	%xmm0,%xmm2
+	movdqu	32(%esi),%xmm4
+	pxor	%xmm0,%xmm3
+	movdqu	48(%esi),%xmm5
+	pxor	%xmm0,%xmm4
+	movdqu	64(%esi),%xmm6
+	pxor	%xmm0,%xmm5
+	movdqu	80(%esi),%xmm1
+	pxor	%xmm0,%xmm6
+	leal	96(%esi),%esi
+	pxor	(%esp),%xmm2
+	movdqa	%xmm7,80(%esp)
+	pxor	%xmm1,%xmm7
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	pxor	16(%esp),%xmm3
+.byte	102,15,56,222,209
+	pxor	32(%esp),%xmm4
+.byte	102,15,56,222,217
+	pxor	48(%esp),%xmm5
+	decl	%ecx
+.byte	102,15,56,222,225
+	pxor	64(%esp),%xmm6
+.byte	102,15,56,222,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,222,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,222,249
+	call	.L_aesni_decrypt6_enter
+	movdqa	80(%esp),%xmm1
+	pxor	%xmm0,%xmm0
+	xorps	(%esp),%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	xorps	16(%esp),%xmm3
+	movups	%xmm2,(%edi)
+	xorps	32(%esp),%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm4,32(%edi)
+	xorps	64(%esp),%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm1,%xmm7
+	movups	%xmm6,64(%edi)
+	pshufd	$19,%xmm0,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqa	96(%esp),%xmm3
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	movl	%ebx,%ecx
+	pxor	%xmm2,%xmm1
+	subl	$96,%eax
+	jnc	.L057xts_dec_loop6
+	leal	1(,%ecx,2),%ecx
+	movl	%ebp,%edx
+	movl	%ecx,%ebx
+.L056xts_dec_short:
+	addl	$96,%eax
+	jz	.L058xts_dec_done6x
+	movdqa	%xmm1,%xmm5
+	cmpl	$32,%eax
+	jb	.L059xts_dec_one
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	je	.L060xts_dec_two
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	cmpl	$64,%eax
+	jb	.L061xts_dec_three
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm7
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,(%esp)
+	movdqa	%xmm6,16(%esp)
+	je	.L062xts_dec_four
+	movdqa	%xmm7,32(%esp)
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	pxor	(%esp),%xmm2
+	movdqu	48(%esi),%xmm5
+	pxor	16(%esp),%xmm3
+	movdqu	64(%esi),%xmm6
+	pxor	32(%esp),%xmm4
+	leal	80(%esi),%esi
+	pxor	48(%esp),%xmm5
+	movdqa	%xmm7,64(%esp)
+	pxor	%xmm7,%xmm6
+	call	_aesni_decrypt6
+	movaps	64(%esp),%xmm1
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	32(%esp),%xmm4
+	movups	%xmm2,(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm3,16(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	jmp	.L063xts_dec_done
+.align	16
+.L059xts_dec_one:
+	movups	(%esi),%xmm2
+	leal	16(%esi),%esi
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L064dec1_loop_12:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L064dec1_loop_12
+.byte	102,15,56,223,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	movdqa	%xmm5,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L060xts_dec_two:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	leal	32(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	call	_aesni_decrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L061xts_dec_three:
+	movaps	%xmm1,%xmm7
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	leal	48(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	call	_aesni_decrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movdqa	%xmm7,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L062xts_dec_four:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	xorps	(%esp),%xmm2
+	movups	48(%esi),%xmm5
+	leal	64(%esi),%esi
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	xorps	%xmm6,%xmm5
+	call	_aesni_decrypt4
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	xorps	%xmm6,%xmm5
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L058xts_dec_done6x:
+	movl	112(%esp),%eax
+	andl	$15,%eax
+	jz	.L065xts_dec_ret
+	movl	%eax,112(%esp)
+	jmp	.L066xts_dec_only_one_more
+.align	16
+.L063xts_dec_done:
+	movl	112(%esp),%eax
+	pxor	%xmm0,%xmm0
+	andl	$15,%eax
+	jz	.L065xts_dec_ret
+	pcmpgtd	%xmm1,%xmm0
+	movl	%eax,112(%esp)
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+.L066xts_dec_only_one_more:
+	pshufd	$19,%xmm0,%xmm5
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm5
+	pxor	%xmm1,%xmm5
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	(%esi),%xmm2
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L067dec1_loop_13:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L067dec1_loop_13
+.byte	102,15,56,223,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+.L068xts_dec_steal:
+	movzbl	16(%esi),%ecx
+	movzbl	(%edi),%edx
+	leal	1(%esi),%esi
+	movb	%cl,(%edi)
+	movb	%dl,16(%edi)
+	leal	1(%edi),%edi
+	subl	$1,%eax
+	jnz	.L068xts_dec_steal
+	subl	112(%esp),%edi
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	(%edi),%xmm2
+	xorps	%xmm6,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L069dec1_loop_14:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L069dec1_loop_14
+.byte	102,15,56,223,209
+	xorps	%xmm6,%xmm2
+	movups	%xmm2,(%edi)
+.L065xts_dec_ret:
+	movl	116(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
+.globl	aesni_cbc_encrypt
+.type	aesni_cbc_encrypt, at function
+.align	16
+aesni_cbc_encrypt:
+.L_aesni_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	%esp,%ebx
+	movl	24(%esp),%edi
+	subl	$24,%ebx
+	movl	28(%esp),%eax
+	andl	$-16,%ebx
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebp
+	testl	%eax,%eax
+	jz	.L070cbc_abort
+	cmpl	$0,40(%esp)
+	xchgl	%esp,%ebx
+	movups	(%ebp),%xmm7
+	movl	240(%edx),%ecx
+	movl	%edx,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ecx,%ebx
+	je	.L071cbc_decrypt
+	movaps	%xmm7,%xmm2
+	cmpl	$16,%eax
+	jb	.L072cbc_enc_tail
+	subl	$16,%eax
+	jmp	.L073cbc_enc_loop
+.align	16
+.L073cbc_enc_loop:
+	movups	(%esi),%xmm7
+	leal	16(%esi),%esi
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	xorps	%xmm0,%xmm7
+	leal	32(%edx),%edx
+	xorps	%xmm7,%xmm2
+.L074enc1_loop_15:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L074enc1_loop_15
+.byte	102,15,56,221,209
+	movl	%ebx,%ecx
+	movl	%ebp,%edx
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	subl	$16,%eax
+	jnc	.L073cbc_enc_loop
+	addl	$16,%eax
+	jnz	.L072cbc_enc_tail
+	movaps	%xmm2,%xmm7
+	jmp	.L075cbc_ret
+.L072cbc_enc_tail:
+	movl	%eax,%ecx
+.long	2767451785
+	movl	$16,%ecx
+	subl	%eax,%ecx
+	xorl	%eax,%eax
+.long	2868115081
+	leal	-16(%edi),%edi
+	movl	%ebx,%ecx
+	movl	%edi,%esi
+	movl	%ebp,%edx
+	jmp	.L073cbc_enc_loop
+.align	16
+.L071cbc_decrypt:
+	cmpl	$80,%eax
+	jbe	.L076cbc_dec_tail
+	movaps	%xmm7,(%esp)
+	subl	$80,%eax
+	jmp	.L077cbc_dec_loop6_enter
+.align	16
+.L078cbc_dec_loop6:
+	movaps	%xmm0,(%esp)
+	movups	%xmm7,(%edi)
+	leal	16(%edi),%edi
+.L077cbc_dec_loop6_enter:
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	call	_aesni_decrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	(%esp),%xmm2
+	xorps	%xmm1,%xmm3
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm4
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm5
+	movups	64(%esi),%xmm1
+	xorps	%xmm0,%xmm6
+	movups	80(%esi),%xmm0
+	xorps	%xmm1,%xmm7
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	96(%esi),%esi
+	movups	%xmm4,32(%edi)
+	movl	%ebx,%ecx
+	movups	%xmm5,48(%edi)
+	movl	%ebp,%edx
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	subl	$96,%eax
+	ja	.L078cbc_dec_loop6
+	movaps	%xmm7,%xmm2
+	movaps	%xmm0,%xmm7
+	addl	$80,%eax
+	jle	.L079cbc_dec_tail_collected
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+.L076cbc_dec_tail:
+	movups	(%esi),%xmm2
+	movaps	%xmm2,%xmm6
+	cmpl	$16,%eax
+	jbe	.L080cbc_dec_one
+	movups	16(%esi),%xmm3
+	movaps	%xmm3,%xmm5
+	cmpl	$32,%eax
+	jbe	.L081cbc_dec_two
+	movups	32(%esi),%xmm4
+	cmpl	$48,%eax
+	jbe	.L082cbc_dec_three
+	movups	48(%esi),%xmm5
+	cmpl	$64,%eax
+	jbe	.L083cbc_dec_four
+	movups	64(%esi),%xmm6
+	movaps	%xmm7,(%esp)
+	movups	(%esi),%xmm2
+	xorps	%xmm7,%xmm7
+	call	_aesni_decrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	(%esp),%xmm2
+	xorps	%xmm1,%xmm3
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm4
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm5
+	movups	64(%esi),%xmm7
+	xorps	%xmm0,%xmm6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movaps	%xmm6,%xmm2
+	subl	$80,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L080cbc_dec_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L084dec1_loop_16:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L084dec1_loop_16
+.byte	102,15,56,223,209
+	xorps	%xmm7,%xmm2
+	movaps	%xmm6,%xmm7
+	subl	$16,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L081cbc_dec_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_decrypt3
+	xorps	%xmm7,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movaps	%xmm3,%xmm2
+	leal	16(%edi),%edi
+	movaps	%xmm5,%xmm7
+	subl	$32,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L082cbc_dec_three:
+	call	_aesni_decrypt3
+	xorps	%xmm7,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm5,%xmm4
+	movups	%xmm2,(%edi)
+	movaps	%xmm4,%xmm2
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movups	32(%esi),%xmm7
+	subl	$48,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L083cbc_dec_four:
+	call	_aesni_decrypt4
+	movups	16(%esi),%xmm1
+	movups	32(%esi),%xmm0
+	xorps	%xmm7,%xmm2
+	movups	48(%esi),%xmm7
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	%xmm0,%xmm5
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movaps	%xmm5,%xmm2
+	subl	$64,%eax
+.L079cbc_dec_tail_collected:
+	andl	$15,%eax
+	jnz	.L085cbc_dec_tail_partial
+	movups	%xmm2,(%edi)
+	jmp	.L075cbc_ret
+.align	16
+.L085cbc_dec_tail_partial:
+	movaps	%xmm2,(%esp)
+	movl	$16,%ecx
+	movl	%esp,%esi
+	subl	%eax,%ecx
+.long	2767451785
+.L075cbc_ret:
+	movl	16(%esp),%esp
+	movl	36(%esp),%ebp
+	movups	%xmm7,(%ebp)
+.L070cbc_abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
+.type	_aesni_set_encrypt_key, at function
+.align	16
+_aesni_set_encrypt_key:
+	testl	%eax,%eax
+	jz	.L086bad_pointer
+	testl	%edx,%edx
+	jz	.L086bad_pointer
+	movups	(%eax),%xmm0
+	xorps	%xmm4,%xmm4
+	leal	16(%edx),%edx
+	cmpl	$256,%ecx
+	je	.L08714rounds
+	cmpl	$192,%ecx
+	je	.L08812rounds
+	cmpl	$128,%ecx
+	jne	.L089bad_keybits
+.align	16
+.L09010rounds:
+	movl	$9,%ecx
+	movups	%xmm0,-16(%edx)
+.byte	102,15,58,223,200,1
+	call	.L091key_128_cold
+.byte	102,15,58,223,200,2
+	call	.L092key_128
+.byte	102,15,58,223,200,4
+	call	.L092key_128
+.byte	102,15,58,223,200,8
+	call	.L092key_128
+.byte	102,15,58,223,200,16
+	call	.L092key_128
+.byte	102,15,58,223,200,32
+	call	.L092key_128
+.byte	102,15,58,223,200,64
+	call	.L092key_128
+.byte	102,15,58,223,200,128
+	call	.L092key_128
+.byte	102,15,58,223,200,27
+	call	.L092key_128
+.byte	102,15,58,223,200,54
+	call	.L092key_128
+	movups	%xmm0,(%edx)
+	movl	%ecx,80(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L092key_128:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+.L091key_128_cold:
+	shufps	$16,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$255,%xmm1,%xmm1
+	xorps	%xmm1,%xmm0
+	ret
+.align	16
+.L08812rounds:
+	movq	16(%eax),%xmm2
+	movl	$11,%ecx
+	movups	%xmm0,-16(%edx)
+.byte	102,15,58,223,202,1
+	call	.L093key_192a_cold
+.byte	102,15,58,223,202,2
+	call	.L094key_192b
+.byte	102,15,58,223,202,4
+	call	.L095key_192a
+.byte	102,15,58,223,202,8
+	call	.L094key_192b
+.byte	102,15,58,223,202,16
+	call	.L095key_192a
+.byte	102,15,58,223,202,32
+	call	.L094key_192b
+.byte	102,15,58,223,202,64
+	call	.L095key_192a
+.byte	102,15,58,223,202,128
+	call	.L094key_192b
+	movups	%xmm0,(%edx)
+	movl	%ecx,48(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L095key_192a:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+.align	16
+.L093key_192a_cold:
+	movaps	%xmm2,%xmm5
+.L096key_192b_warm:
+	shufps	$16,%xmm0,%xmm4
+	movdqa	%xmm2,%xmm3
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	pslldq	$4,%xmm3
+	xorps	%xmm4,%xmm0
+	pshufd	$85,%xmm1,%xmm1
+	pxor	%xmm3,%xmm2
+	pxor	%xmm1,%xmm0
+	pshufd	$255,%xmm0,%xmm3
+	pxor	%xmm3,%xmm2
+	ret
+.align	16
+.L094key_192b:
+	movaps	%xmm0,%xmm3
+	shufps	$68,%xmm0,%xmm5
+	movups	%xmm5,(%edx)
+	shufps	$78,%xmm2,%xmm3
+	movups	%xmm3,16(%edx)
+	leal	32(%edx),%edx
+	jmp	.L096key_192b_warm
+.align	16
+.L08714rounds:
+	movups	16(%eax),%xmm2
+	movl	$13,%ecx
+	leal	16(%edx),%edx
+	movups	%xmm0,-32(%edx)
+	movups	%xmm2,-16(%edx)
+.byte	102,15,58,223,202,1
+	call	.L097key_256a_cold
+.byte	102,15,58,223,200,1
+	call	.L098key_256b
+.byte	102,15,58,223,202,2
+	call	.L099key_256a
+.byte	102,15,58,223,200,2
+	call	.L098key_256b
+.byte	102,15,58,223,202,4
+	call	.L099key_256a
+.byte	102,15,58,223,200,4
+	call	.L098key_256b
+.byte	102,15,58,223,202,8
+	call	.L099key_256a
+.byte	102,15,58,223,200,8
+	call	.L098key_256b
+.byte	102,15,58,223,202,16
+	call	.L099key_256a
+.byte	102,15,58,223,200,16
+	call	.L098key_256b
+.byte	102,15,58,223,202,32
+	call	.L099key_256a
+.byte	102,15,58,223,200,32
+	call	.L098key_256b
+.byte	102,15,58,223,202,64
+	call	.L099key_256a
+	movups	%xmm0,(%edx)
+	movl	%ecx,16(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L099key_256a:
+	movups	%xmm2,(%edx)
+	leal	16(%edx),%edx
+.L097key_256a_cold:
+	shufps	$16,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$255,%xmm1,%xmm1
+	xorps	%xmm1,%xmm0
+	ret
+.align	16
+.L098key_256b:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+	shufps	$16,%xmm2,%xmm4
+	xorps	%xmm4,%xmm2
+	shufps	$140,%xmm2,%xmm4
+	xorps	%xmm4,%xmm2
+	shufps	$170,%xmm1,%xmm1
+	xorps	%xmm1,%xmm2
+	ret
+.align	4
+.L086bad_pointer:
+	movl	$-1,%eax
+	ret
+.align	4
+.L089bad_keybits:
+	movl	$-2,%eax
+	ret
+.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
+.globl	aesni_set_encrypt_key
+.type	aesni_set_encrypt_key, at function
+.align	16
+aesni_set_encrypt_key:
+.L_aesni_set_encrypt_key_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	call	_aesni_set_encrypt_key
+	ret
+.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
+.globl	aesni_set_decrypt_key
+.type	aesni_set_decrypt_key, at function
+.align	16
+aesni_set_decrypt_key:
+.L_aesni_set_decrypt_key_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	call	_aesni_set_encrypt_key
+	movl	12(%esp),%edx
+	shll	$4,%ecx
+	testl	%eax,%eax
+	jnz	.L100dec_key_ret
+	leal	16(%edx,%ecx,1),%eax
+	movups	(%edx),%xmm0
+	movups	(%eax),%xmm1
+	movups	%xmm0,(%eax)
+	movups	%xmm1,(%edx)
+	leal	16(%edx),%edx
+	leal	-16(%eax),%eax
+.L101dec_key_inverse:
+	movups	(%edx),%xmm0
+	movups	(%eax),%xmm1
+.byte	102,15,56,219,192
+.byte	102,15,56,219,201
+	leal	16(%edx),%edx
+	leal	-16(%eax),%eax
+	movups	%xmm0,16(%eax)
+	movups	%xmm1,-16(%edx)
+	cmpl	%edx,%eax
+	ja	.L101dec_key_inverse
+	movups	(%edx),%xmm0
+.byte	102,15,56,219,192
+	movups	%xmm0,(%edx)
+	xorl	%eax,%eax
+.L100dec_key_ret:
+	ret
+.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
+.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
+.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
+.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte	115,108,46,111,114,103,62,0
+#else
+.file	"aesni-x86.S"
+.text
+.globl	aesni_encrypt
+.type	aesni_encrypt, at function
+.align	16
+aesni_encrypt:
+.L_aesni_encrypt_begin:
+	movl	4(%esp),%eax
+	movl	12(%esp),%edx
+	movups	(%eax),%xmm2
+	movl	240(%edx),%ecx
+	movl	8(%esp),%eax
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L000enc1_loop_1:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L000enc1_loop_1
+.byte	102,15,56,221,209
+	movups	%xmm2,(%eax)
+	ret
+.size	aesni_encrypt,.-.L_aesni_encrypt_begin
+.globl	aesni_decrypt
+.type	aesni_decrypt, at function
+.align	16
+aesni_decrypt:
+.L_aesni_decrypt_begin:
+	movl	4(%esp),%eax
+	movl	12(%esp),%edx
+	movups	(%eax),%xmm2
+	movl	240(%edx),%ecx
+	movl	8(%esp),%eax
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L001dec1_loop_2:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L001dec1_loop_2
+.byte	102,15,56,223,209
+	movups	%xmm2,(%eax)
+	ret
+.size	aesni_decrypt,.-.L_aesni_decrypt_begin
+.type	_aesni_encrypt3, at function
+.align	16
+_aesni_encrypt3:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	movups	(%edx),%xmm0
+.L002enc3_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+	movups	(%edx),%xmm0
+	jnz	.L002enc3_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+	ret
+.size	_aesni_encrypt3,.-_aesni_encrypt3
+.type	_aesni_decrypt3, at function
+.align	16
+_aesni_decrypt3:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	movups	(%edx),%xmm0
+.L003dec3_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+	movups	(%edx),%xmm0
+	jnz	.L003dec3_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+	ret
+.size	_aesni_decrypt3,.-_aesni_decrypt3
+.type	_aesni_encrypt4, at function
+.align	16
+_aesni_encrypt4:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	shrl	$1,%ecx
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	pxor	%xmm0,%xmm5
+	movups	(%edx),%xmm0
+.L004enc4_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+.byte	102,15,56,220,232
+	movups	(%edx),%xmm0
+	jnz	.L004enc4_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+.byte	102,15,56,221,232
+	ret
+.size	_aesni_encrypt4,.-_aesni_encrypt4
+.type	_aesni_decrypt4, at function
+.align	16
+_aesni_decrypt4:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	shrl	$1,%ecx
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+	pxor	%xmm0,%xmm4
+	pxor	%xmm0,%xmm5
+	movups	(%edx),%xmm0
+.L005dec4_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+.byte	102,15,56,222,232
+	movups	(%edx),%xmm0
+	jnz	.L005dec4_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+.byte	102,15,56,223,232
+	ret
+.size	_aesni_decrypt4,.-_aesni_decrypt4
+.type	_aesni_encrypt6, at function
+.align	16
+_aesni_encrypt6:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,220,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,220,217
+	pxor	%xmm0,%xmm5
+	decl	%ecx
+.byte	102,15,56,220,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	jmp	.L_aesni_encrypt6_enter
+.align	16
+.L006enc6_loop:
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	decl	%ecx
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,220,241
+.byte	102,15,56,220,249
+.align	16
+.L_aesni_encrypt6_enter:
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+.byte	102,15,56,220,216
+	leal	32(%edx),%edx
+.byte	102,15,56,220,224
+.byte	102,15,56,220,232
+.byte	102,15,56,220,240
+.byte	102,15,56,220,248
+	movups	(%edx),%xmm0
+	jnz	.L006enc6_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+.byte	102,15,56,220,225
+.byte	102,15,56,220,233
+.byte	102,15,56,220,241
+.byte	102,15,56,220,249
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+.byte	102,15,56,221,224
+.byte	102,15,56,221,232
+.byte	102,15,56,221,240
+.byte	102,15,56,221,248
+	ret
+.size	_aesni_encrypt6,.-_aesni_encrypt6
+.type	_aesni_decrypt6, at function
+.align	16
+_aesni_decrypt6:
+	movups	(%edx),%xmm0
+	shrl	$1,%ecx
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,222,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,222,217
+	pxor	%xmm0,%xmm5
+	decl	%ecx
+.byte	102,15,56,222,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,222,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,222,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,222,249
+	jmp	.L_aesni_decrypt6_enter
+.align	16
+.L007dec6_loop:
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+	decl	%ecx
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,222,241
+.byte	102,15,56,222,249
+.align	16
+.L_aesni_decrypt6_enter:
+	movups	16(%edx),%xmm1
+.byte	102,15,56,222,208
+.byte	102,15,56,222,216
+	leal	32(%edx),%edx
+.byte	102,15,56,222,224
+.byte	102,15,56,222,232
+.byte	102,15,56,222,240
+.byte	102,15,56,222,248
+	movups	(%edx),%xmm0
+	jnz	.L007dec6_loop
+.byte	102,15,56,222,209
+.byte	102,15,56,222,217
+.byte	102,15,56,222,225
+.byte	102,15,56,222,233
+.byte	102,15,56,222,241
+.byte	102,15,56,222,249
+.byte	102,15,56,223,208
+.byte	102,15,56,223,216
+.byte	102,15,56,223,224
+.byte	102,15,56,223,232
+.byte	102,15,56,223,240
+.byte	102,15,56,223,248
+	ret
+.size	_aesni_decrypt6,.-_aesni_decrypt6
+.globl	aesni_ecb_encrypt
+.type	aesni_ecb_encrypt, at function
+.align	16
+aesni_ecb_encrypt:
+.L_aesni_ecb_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	andl	$-16,%eax
+	jz	.L008ecb_ret
+	movl	240(%edx),%ecx
+	testl	%ebx,%ebx
+	jz	.L009ecb_decrypt
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	cmpl	$96,%eax
+	jb	.L010ecb_enc_tail
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+	subl	$96,%eax
+	jmp	.L011ecb_enc_loop6_enter
+.align	16
+.L012ecb_enc_loop6:
+	movups	%xmm2,(%edi)
+	movdqu	(%esi),%xmm2
+	movups	%xmm3,16(%edi)
+	movdqu	16(%esi),%xmm3
+	movups	%xmm4,32(%edi)
+	movdqu	32(%esi),%xmm4
+	movups	%xmm5,48(%edi)
+	movdqu	48(%esi),%xmm5
+	movups	%xmm6,64(%edi)
+	movdqu	64(%esi),%xmm6
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+.L011ecb_enc_loop6_enter:
+	call	_aesni_encrypt6
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	subl	$96,%eax
+	jnc	.L012ecb_enc_loop6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	addl	$96,%eax
+	jz	.L008ecb_ret
+.L010ecb_enc_tail:
+	movups	(%esi),%xmm2
+	cmpl	$32,%eax
+	jb	.L013ecb_enc_one
+	movups	16(%esi),%xmm3
+	je	.L014ecb_enc_two
+	movups	32(%esi),%xmm4
+	cmpl	$64,%eax
+	jb	.L015ecb_enc_three
+	movups	48(%esi),%xmm5
+	je	.L016ecb_enc_four
+	movups	64(%esi),%xmm6
+	xorps	%xmm7,%xmm7
+	call	_aesni_encrypt6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L013ecb_enc_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L017enc1_loop_3:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L017enc1_loop_3
+.byte	102,15,56,221,209
+	movups	%xmm2,(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L014ecb_enc_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_encrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L015ecb_enc_three:
+	call	_aesni_encrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L016ecb_enc_four:
+	call	_aesni_encrypt4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L009ecb_decrypt:
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	cmpl	$96,%eax
+	jb	.L018ecb_dec_tail
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+	subl	$96,%eax
+	jmp	.L019ecb_dec_loop6_enter
+.align	16
+.L020ecb_dec_loop6:
+	movups	%xmm2,(%edi)
+	movdqu	(%esi),%xmm2
+	movups	%xmm3,16(%edi)
+	movdqu	16(%esi),%xmm3
+	movups	%xmm4,32(%edi)
+	movdqu	32(%esi),%xmm4
+	movups	%xmm5,48(%edi)
+	movdqu	48(%esi),%xmm5
+	movups	%xmm6,64(%edi)
+	movdqu	64(%esi),%xmm6
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqu	80(%esi),%xmm7
+	leal	96(%esi),%esi
+.L019ecb_dec_loop6_enter:
+	call	_aesni_decrypt6
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	subl	$96,%eax
+	jnc	.L020ecb_dec_loop6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	addl	$96,%eax
+	jz	.L008ecb_ret
+.L018ecb_dec_tail:
+	movups	(%esi),%xmm2
+	cmpl	$32,%eax
+	jb	.L021ecb_dec_one
+	movups	16(%esi),%xmm3
+	je	.L022ecb_dec_two
+	movups	32(%esi),%xmm4
+	cmpl	$64,%eax
+	jb	.L023ecb_dec_three
+	movups	48(%esi),%xmm5
+	je	.L024ecb_dec_four
+	movups	64(%esi),%xmm6
+	xorps	%xmm7,%xmm7
+	call	_aesni_decrypt6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L021ecb_dec_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L025dec1_loop_4:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L025dec1_loop_4
+.byte	102,15,56,223,209
+	movups	%xmm2,(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L022ecb_dec_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_decrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L023ecb_dec_three:
+	call	_aesni_decrypt3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L008ecb_ret
+.align	16
+.L024ecb_dec_four:
+	call	_aesni_decrypt4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+.L008ecb_ret:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
+.globl	aesni_ccm64_encrypt_blocks
+.type	aesni_ccm64_encrypt_blocks, at function
+.align	16
+aesni_ccm64_encrypt_blocks:
+.L_aesni_ccm64_encrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	%esp,%ebp
+	subl	$60,%esp
+	andl	$-16,%esp
+	movl	%ebp,48(%esp)
+	movdqu	(%ebx),%xmm7
+	movdqu	(%ecx),%xmm3
+	movl	240(%edx),%ecx
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$1,%ebx
+	xorl	%ebp,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	%ebp,24(%esp)
+	movl	%ebp,28(%esp)
+	shrl	$1,%ecx
+	leal	(%edx),%ebp
+	movdqa	(%esp),%xmm5
+	movdqa	%xmm7,%xmm2
+	movl	%ecx,%ebx
+.byte	102,15,56,0,253
+.L026ccm64_enc_outer:
+	movups	(%ebp),%xmm0
+	movl	%ebx,%ecx
+	movups	(%esi),%xmm6
+	xorps	%xmm0,%xmm2
+	movups	16(%ebp),%xmm1
+	xorps	%xmm6,%xmm0
+	leal	32(%ebp),%edx
+	xorps	%xmm0,%xmm3
+	movups	(%edx),%xmm0
+.L027ccm64_enc2_loop:
+.byte	102,15,56,220,209
+	decl	%ecx
+.byte	102,15,56,220,217
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+	leal	32(%edx),%edx
+.byte	102,15,56,220,216
+	movups	(%edx),%xmm0
+	jnz	.L027ccm64_enc2_loop
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	paddq	16(%esp),%xmm7
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+	decl	%eax
+	leal	16(%esi),%esi
+	xorps	%xmm2,%xmm6
+	movdqa	%xmm7,%xmm2
+	movups	%xmm6,(%edi)
+	leal	16(%edi),%edi
+.byte	102,15,56,0,213
+	jnz	.L026ccm64_enc_outer
+	movl	48(%esp),%esp
+	movl	40(%esp),%edi
+	movups	%xmm3,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
+.globl	aesni_ccm64_decrypt_blocks
+.type	aesni_ccm64_decrypt_blocks, at function
+.align	16
+aesni_ccm64_decrypt_blocks:
+.L_aesni_ccm64_decrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	40(%esp),%ecx
+	movl	%esp,%ebp
+	subl	$60,%esp
+	andl	$-16,%esp
+	movl	%ebp,48(%esp)
+	movdqu	(%ebx),%xmm7
+	movdqu	(%ecx),%xmm3
+	movl	240(%edx),%ecx
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$1,%ebx
+	xorl	%ebp,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	%ebp,24(%esp)
+	movl	%ebp,28(%esp)
+	movdqa	(%esp),%xmm5
+	movdqa	%xmm7,%xmm2
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+.byte	102,15,56,0,253
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L028enc1_loop_5:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L028enc1_loop_5
+.byte	102,15,56,221,209
+	movups	(%esi),%xmm6
+	paddq	16(%esp),%xmm7
+	leal	16(%esi),%esi
+	jmp	.L029ccm64_dec_outer
+.align	16
+.L029ccm64_dec_outer:
+	xorps	%xmm2,%xmm6
+	movdqa	%xmm7,%xmm2
+	movl	%ebx,%ecx
+	movups	%xmm6,(%edi)
+	leal	16(%edi),%edi
+.byte	102,15,56,0,213
+	subl	$1,%eax
+	jz	.L030ccm64_dec_break
+	movups	(%ebp),%xmm0
+	shrl	$1,%ecx
+	movups	16(%ebp),%xmm1
+	xorps	%xmm0,%xmm6
+	leal	32(%ebp),%edx
+	xorps	%xmm0,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	(%edx),%xmm0
+.L031ccm64_dec2_loop:
+.byte	102,15,56,220,209
+	decl	%ecx
+.byte	102,15,56,220,217
+	movups	16(%edx),%xmm1
+.byte	102,15,56,220,208
+	leal	32(%edx),%edx
+.byte	102,15,56,220,216
+	movups	(%edx),%xmm0
+	jnz	.L031ccm64_dec2_loop
+	movups	(%esi),%xmm6
+	paddq	16(%esp),%xmm7
+.byte	102,15,56,220,209
+.byte	102,15,56,220,217
+	leal	16(%esi),%esi
+.byte	102,15,56,221,208
+.byte	102,15,56,221,216
+	jmp	.L029ccm64_dec_outer
+.align	16
+.L030ccm64_dec_break:
+	movl	%ebp,%edx
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	xorps	%xmm0,%xmm6
+	leal	32(%edx),%edx
+	xorps	%xmm6,%xmm3
+.L032enc1_loop_6:
+.byte	102,15,56,220,217
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L032enc1_loop_6
+.byte	102,15,56,221,217
+	movl	48(%esp),%esp
+	movl	40(%esp),%edi
+	movups	%xmm3,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
+.globl	aesni_ctr32_encrypt_blocks
+.type	aesni_ctr32_encrypt_blocks, at function
+.align	16
+aesni_ctr32_encrypt_blocks:
+.L_aesni_ctr32_encrypt_blocks_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebx
+	movl	%esp,%ebp
+	subl	$88,%esp
+	andl	$-16,%esp
+	movl	%ebp,80(%esp)
+	cmpl	$1,%eax
+	je	.L033ctr32_one_shortcut
+	movdqu	(%ebx),%xmm7
+	movl	$202182159,(%esp)
+	movl	$134810123,4(%esp)
+	movl	$67438087,8(%esp)
+	movl	$66051,12(%esp)
+	movl	$6,%ecx
+	xorl	%ebp,%ebp
+	movl	%ecx,16(%esp)
+	movl	%ecx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%ebp,28(%esp)
+.byte	102,15,58,22,251,3
+.byte	102,15,58,34,253,3
+	movl	240(%edx),%ecx
+	bswap	%ebx
+	pxor	%xmm1,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	(%esp),%xmm2
+.byte	102,15,58,34,203,0
+	leal	3(%ebx),%ebp
+.byte	102,15,58,34,197,0
+	incl	%ebx
+.byte	102,15,58,34,203,1
+	incl	%ebp
+.byte	102,15,58,34,197,1
+	incl	%ebx
+.byte	102,15,58,34,203,2
+	incl	%ebp
+.byte	102,15,58,34,197,2
+	movdqa	%xmm1,48(%esp)
+.byte	102,15,56,0,202
+	movdqa	%xmm0,64(%esp)
+.byte	102,15,56,0,194
+	pshufd	$192,%xmm1,%xmm2
+	pshufd	$128,%xmm1,%xmm3
+	cmpl	$6,%eax
+	jb	.L034ctr32_tail
+	movdqa	%xmm7,32(%esp)
+	shrl	$1,%ecx
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	subl	$6,%eax
+	jmp	.L035ctr32_loop6
+.align	16
+.L035ctr32_loop6:
+	pshufd	$64,%xmm1,%xmm4
+	movdqa	32(%esp),%xmm1
+	pshufd	$192,%xmm0,%xmm5
+	por	%xmm1,%xmm2
+	pshufd	$128,%xmm0,%xmm6
+	por	%xmm1,%xmm3
+	pshufd	$64,%xmm0,%xmm7
+	por	%xmm1,%xmm4
+	por	%xmm1,%xmm5
+	por	%xmm1,%xmm6
+	por	%xmm1,%xmm7
+	movups	(%ebp),%xmm0
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	decl	%ecx
+	pxor	%xmm0,%xmm2
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,220,209
+	pxor	%xmm0,%xmm4
+.byte	102,15,56,220,217
+	pxor	%xmm0,%xmm5
+.byte	102,15,56,220,225
+	pxor	%xmm0,%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	call	.L_aesni_encrypt6_enter
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	%xmm1,%xmm2
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm3
+	movups	%xmm2,(%edi)
+	movdqa	16(%esp),%xmm0
+	xorps	%xmm1,%xmm4
+	movdqa	48(%esp),%xmm1
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	paddd	%xmm0,%xmm1
+	paddd	64(%esp),%xmm0
+	movdqa	(%esp),%xmm2
+	movups	48(%esi),%xmm3
+	movups	64(%esi),%xmm4
+	xorps	%xmm3,%xmm5
+	movups	80(%esi),%xmm3
+	leal	96(%esi),%esi
+	movdqa	%xmm1,48(%esp)
+.byte	102,15,56,0,202
+	xorps	%xmm4,%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm3,%xmm7
+	movdqa	%xmm0,64(%esp)
+.byte	102,15,56,0,194
+	movups	%xmm6,64(%edi)
+	pshufd	$192,%xmm1,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movl	%ebx,%ecx
+	pshufd	$128,%xmm1,%xmm3
+	subl	$6,%eax
+	jnc	.L035ctr32_loop6
+	addl	$6,%eax
+	jz	.L036ctr32_ret
+	movl	%ebp,%edx
+	leal	1(,%ecx,2),%ecx
+	movdqa	32(%esp),%xmm7
+.L034ctr32_tail:
+	por	%xmm7,%xmm2
+	cmpl	$2,%eax
+	jb	.L037ctr32_one
+	pshufd	$64,%xmm1,%xmm4
+	por	%xmm7,%xmm3
+	je	.L038ctr32_two
+	pshufd	$192,%xmm0,%xmm5
+	por	%xmm7,%xmm4
+	cmpl	$4,%eax
+	jb	.L039ctr32_three
+	pshufd	$128,%xmm0,%xmm6
+	por	%xmm7,%xmm5
+	je	.L040ctr32_four
+	por	%xmm7,%xmm6
+	call	_aesni_encrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	%xmm1,%xmm2
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm3
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm4
+	movups	64(%esi),%xmm1
+	xorps	%xmm0,%xmm5
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L033ctr32_one_shortcut:
+	movups	(%ebx),%xmm2
+	movl	240(%edx),%ecx
+.L037ctr32_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L041enc1_loop_7:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L041enc1_loop_7
+.byte	102,15,56,221,209
+	movups	(%esi),%xmm6
+	xorps	%xmm2,%xmm6
+	movups	%xmm6,(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L038ctr32_two:
+	call	_aesni_encrypt3
+	movups	(%esi),%xmm5
+	movups	16(%esi),%xmm6
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L039ctr32_three:
+	call	_aesni_encrypt3
+	movups	(%esi),%xmm5
+	movups	16(%esi),%xmm6
+	xorps	%xmm5,%xmm2
+	movups	32(%esi),%xmm7
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm7,%xmm4
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	jmp	.L036ctr32_ret
+.align	16
+.L040ctr32_four:
+	call	_aesni_encrypt4
+	movups	(%esi),%xmm6
+	movups	16(%esi),%xmm7
+	movups	32(%esi),%xmm1
+	xorps	%xmm6,%xmm2
+	movups	48(%esi),%xmm0
+	xorps	%xmm7,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	%xmm0,%xmm5
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+.L036ctr32_ret:
+	movl	80(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
+.globl	aesni_xts_encrypt
+.type	aesni_xts_encrypt, at function
+.align	16
+aesni_xts_encrypt:
+.L_aesni_xts_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	36(%esp),%edx
+	movl	40(%esp),%esi
+	movl	240(%edx),%ecx
+	movups	(%esi),%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L042enc1_loop_8:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L042enc1_loop_8
+.byte	102,15,56,221,209
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	subl	$120,%esp
+	movl	240(%edx),%ecx
+	andl	$-16,%esp
+	movl	$135,96(%esp)
+	movl	$0,100(%esp)
+	movl	$1,104(%esp)
+	movl	$0,108(%esp)
+	movl	%eax,112(%esp)
+	movl	%ebp,116(%esp)
+	movdqa	%xmm2,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	pcmpgtd	%xmm1,%xmm0
+	andl	$-16,%eax
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	subl	$96,%eax
+	jc	.L043xts_enc_short
+	shrl	$1,%ecx
+	movl	%ecx,%ebx
+	jmp	.L044xts_enc_loop6
+.align	16
+.L044xts_enc_loop6:
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,16(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,32(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,64(%esp)
+	paddq	%xmm1,%xmm1
+	movups	(%ebp),%xmm0
+	pand	%xmm3,%xmm7
+	movups	(%esi),%xmm2
+	pxor	%xmm1,%xmm7
+	movdqu	16(%esi),%xmm3
+	xorps	%xmm0,%xmm2
+	movdqu	32(%esi),%xmm4
+	pxor	%xmm0,%xmm3
+	movdqu	48(%esi),%xmm5
+	pxor	%xmm0,%xmm4
+	movdqu	64(%esi),%xmm6
+	pxor	%xmm0,%xmm5
+	movdqu	80(%esi),%xmm1
+	pxor	%xmm0,%xmm6
+	leal	96(%esi),%esi
+	pxor	(%esp),%xmm2
+	movdqa	%xmm7,80(%esp)
+	pxor	%xmm1,%xmm7
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	pxor	16(%esp),%xmm3
+.byte	102,15,56,220,209
+	pxor	32(%esp),%xmm4
+.byte	102,15,56,220,217
+	pxor	48(%esp),%xmm5
+	decl	%ecx
+.byte	102,15,56,220,225
+	pxor	64(%esp),%xmm6
+.byte	102,15,56,220,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,220,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,220,249
+	call	.L_aesni_encrypt6_enter
+	movdqa	80(%esp),%xmm1
+	pxor	%xmm0,%xmm0
+	xorps	(%esp),%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	xorps	16(%esp),%xmm3
+	movups	%xmm2,(%edi)
+	xorps	32(%esp),%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm4,32(%edi)
+	xorps	64(%esp),%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm1,%xmm7
+	movups	%xmm6,64(%edi)
+	pshufd	$19,%xmm0,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqa	96(%esp),%xmm3
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	movl	%ebx,%ecx
+	pxor	%xmm2,%xmm1
+	subl	$96,%eax
+	jnc	.L044xts_enc_loop6
+	leal	1(,%ecx,2),%ecx
+	movl	%ebp,%edx
+	movl	%ecx,%ebx
+.L043xts_enc_short:
+	addl	$96,%eax
+	jz	.L045xts_enc_done6x
+	movdqa	%xmm1,%xmm5
+	cmpl	$32,%eax
+	jb	.L046xts_enc_one
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	je	.L047xts_enc_two
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	cmpl	$64,%eax
+	jb	.L048xts_enc_three
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm7
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,(%esp)
+	movdqa	%xmm6,16(%esp)
+	je	.L049xts_enc_four
+	movdqa	%xmm7,32(%esp)
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	pxor	(%esp),%xmm2
+	movdqu	48(%esi),%xmm5
+	pxor	16(%esp),%xmm3
+	movdqu	64(%esi),%xmm6
+	pxor	32(%esp),%xmm4
+	leal	80(%esi),%esi
+	pxor	48(%esp),%xmm5
+	movdqa	%xmm7,64(%esp)
+	pxor	%xmm7,%xmm6
+	call	_aesni_encrypt6
+	movaps	64(%esp),%xmm1
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	32(%esp),%xmm4
+	movups	%xmm2,(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm3,16(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	jmp	.L050xts_enc_done
+.align	16
+.L046xts_enc_one:
+	movups	(%esi),%xmm2
+	leal	16(%esi),%esi
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L051enc1_loop_9:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L051enc1_loop_9
+.byte	102,15,56,221,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	movdqa	%xmm5,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L047xts_enc_two:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	leal	32(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm4,%xmm4
+	call	_aesni_encrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L048xts_enc_three:
+	movaps	%xmm1,%xmm7
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	leal	48(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	call	_aesni_encrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movdqa	%xmm7,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L049xts_enc_four:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	xorps	(%esp),%xmm2
+	movups	48(%esi),%xmm5
+	leal	64(%esi),%esi
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	xorps	%xmm6,%xmm5
+	call	_aesni_encrypt4
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	xorps	%xmm6,%xmm5
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L050xts_enc_done
+.align	16
+.L045xts_enc_done6x:
+	movl	112(%esp),%eax
+	andl	$15,%eax
+	jz	.L052xts_enc_ret
+	movdqa	%xmm1,%xmm5
+	movl	%eax,112(%esp)
+	jmp	.L053xts_enc_steal
+.align	16
+.L050xts_enc_done:
+	movl	112(%esp),%eax
+	pxor	%xmm0,%xmm0
+	andl	$15,%eax
+	jz	.L052xts_enc_ret
+	pcmpgtd	%xmm1,%xmm0
+	movl	%eax,112(%esp)
+	pshufd	$19,%xmm0,%xmm5
+	paddq	%xmm1,%xmm1
+	pand	96(%esp),%xmm5
+	pxor	%xmm1,%xmm5
+.L053xts_enc_steal:
+	movzbl	(%esi),%ecx
+	movzbl	-16(%edi),%edx
+	leal	1(%esi),%esi
+	movb	%cl,-16(%edi)
+	movb	%dl,(%edi)
+	leal	1(%edi),%edi
+	subl	$1,%eax
+	jnz	.L053xts_enc_steal
+	subl	112(%esp),%edi
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	-16(%edi),%xmm2
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L054enc1_loop_10:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L054enc1_loop_10
+.byte	102,15,56,221,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,-16(%edi)
+.L052xts_enc_ret:
+	movl	116(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
+.globl	aesni_xts_decrypt
+.type	aesni_xts_decrypt, at function
+.align	16
+aesni_xts_decrypt:
+.L_aesni_xts_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	36(%esp),%edx
+	movl	40(%esp),%esi
+	movl	240(%edx),%ecx
+	movups	(%esi),%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L055enc1_loop_11:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L055enc1_loop_11
+.byte	102,15,56,221,209
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	subl	$120,%esp
+	andl	$-16,%esp
+	xorl	%ebx,%ebx
+	testl	$15,%eax
+	setnz	%bl
+	shll	$4,%ebx
+	subl	%ebx,%eax
+	movl	$135,96(%esp)
+	movl	$0,100(%esp)
+	movl	$1,104(%esp)
+	movl	$0,108(%esp)
+	movl	%eax,112(%esp)
+	movl	%ebp,116(%esp)
+	movl	240(%edx),%ecx
+	movl	%edx,%ebp
+	movl	%ecx,%ebx
+	movdqa	%xmm2,%xmm1
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	pcmpgtd	%xmm1,%xmm0
+	andl	$-16,%eax
+	subl	$96,%eax
+	jc	.L056xts_dec_short
+	shrl	$1,%ecx
+	movl	%ecx,%ebx
+	jmp	.L057xts_dec_loop6
+.align	16
+.L057xts_dec_loop6:
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,16(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,32(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,64(%esp)
+	paddq	%xmm1,%xmm1
+	movups	(%ebp),%xmm0
+	pand	%xmm3,%xmm7
+	movups	(%esi),%xmm2
+	pxor	%xmm1,%xmm7
+	movdqu	16(%esi),%xmm3
+	xorps	%xmm0,%xmm2
+	movdqu	32(%esi),%xmm4
+	pxor	%xmm0,%xmm3
+	movdqu	48(%esi),%xmm5
+	pxor	%xmm0,%xmm4
+	movdqu	64(%esi),%xmm6
+	pxor	%xmm0,%xmm5
+	movdqu	80(%esi),%xmm1
+	pxor	%xmm0,%xmm6
+	leal	96(%esi),%esi
+	pxor	(%esp),%xmm2
+	movdqa	%xmm7,80(%esp)
+	pxor	%xmm1,%xmm7
+	movups	16(%ebp),%xmm1
+	leal	32(%ebp),%edx
+	pxor	16(%esp),%xmm3
+.byte	102,15,56,222,209
+	pxor	32(%esp),%xmm4
+.byte	102,15,56,222,217
+	pxor	48(%esp),%xmm5
+	decl	%ecx
+.byte	102,15,56,222,225
+	pxor	64(%esp),%xmm6
+.byte	102,15,56,222,233
+	pxor	%xmm0,%xmm7
+.byte	102,15,56,222,241
+	movups	(%edx),%xmm0
+.byte	102,15,56,222,249
+	call	.L_aesni_decrypt6_enter
+	movdqa	80(%esp),%xmm1
+	pxor	%xmm0,%xmm0
+	xorps	(%esp),%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	xorps	16(%esp),%xmm3
+	movups	%xmm2,(%edi)
+	xorps	32(%esp),%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm4,32(%edi)
+	xorps	64(%esp),%xmm6
+	movups	%xmm5,48(%edi)
+	xorps	%xmm1,%xmm7
+	movups	%xmm6,64(%edi)
+	pshufd	$19,%xmm0,%xmm2
+	movups	%xmm7,80(%edi)
+	leal	96(%edi),%edi
+	movdqa	96(%esp),%xmm3
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	movl	%ebx,%ecx
+	pxor	%xmm2,%xmm1
+	subl	$96,%eax
+	jnc	.L057xts_dec_loop6
+	leal	1(,%ecx,2),%ecx
+	movl	%ebp,%edx
+	movl	%ecx,%ebx
+.L056xts_dec_short:
+	addl	$96,%eax
+	jz	.L058xts_dec_done6x
+	movdqa	%xmm1,%xmm5
+	cmpl	$32,%eax
+	jb	.L059xts_dec_one
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	je	.L060xts_dec_two
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	cmpl	$64,%eax
+	jb	.L061xts_dec_three
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	%xmm1,%xmm7
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,(%esp)
+	movdqa	%xmm6,16(%esp)
+	je	.L062xts_dec_four
+	movdqa	%xmm7,32(%esp)
+	pshufd	$19,%xmm0,%xmm7
+	movdqa	%xmm1,48(%esp)
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	pxor	(%esp),%xmm2
+	movdqu	48(%esi),%xmm5
+	pxor	16(%esp),%xmm3
+	movdqu	64(%esi),%xmm6
+	pxor	32(%esp),%xmm4
+	leal	80(%esi),%esi
+	pxor	48(%esp),%xmm5
+	movdqa	%xmm7,64(%esp)
+	pxor	%xmm7,%xmm6
+	call	_aesni_decrypt6
+	movaps	64(%esp),%xmm1
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	32(%esp),%xmm4
+	movups	%xmm2,(%edi)
+	xorps	48(%esp),%xmm5
+	movups	%xmm3,16(%edi)
+	xorps	%xmm1,%xmm6
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	jmp	.L063xts_dec_done
+.align	16
+.L059xts_dec_one:
+	movups	(%esi),%xmm2
+	leal	16(%esi),%esi
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L064dec1_loop_12:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L064dec1_loop_12
+.byte	102,15,56,223,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	movdqa	%xmm5,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L060xts_dec_two:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	leal	32(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	call	_aesni_decrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L061xts_dec_three:
+	movaps	%xmm1,%xmm7
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	leal	48(%esi),%esi
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	call	_aesni_decrypt3
+	xorps	%xmm5,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movdqa	%xmm7,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L062xts_dec_four:
+	movaps	%xmm1,%xmm6
+	movups	(%esi),%xmm2
+	movups	16(%esi),%xmm3
+	movups	32(%esi),%xmm4
+	xorps	(%esp),%xmm2
+	movups	48(%esi),%xmm5
+	leal	64(%esi),%esi
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	xorps	%xmm6,%xmm5
+	call	_aesni_decrypt4
+	xorps	(%esp),%xmm2
+	xorps	16(%esp),%xmm3
+	xorps	%xmm7,%xmm4
+	movups	%xmm2,(%edi)
+	xorps	%xmm6,%xmm5
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movdqa	%xmm6,%xmm1
+	jmp	.L063xts_dec_done
+.align	16
+.L058xts_dec_done6x:
+	movl	112(%esp),%eax
+	andl	$15,%eax
+	jz	.L065xts_dec_ret
+	movl	%eax,112(%esp)
+	jmp	.L066xts_dec_only_one_more
+.align	16
+.L063xts_dec_done:
+	movl	112(%esp),%eax
+	pxor	%xmm0,%xmm0
+	andl	$15,%eax
+	jz	.L065xts_dec_ret
+	pcmpgtd	%xmm1,%xmm0
+	movl	%eax,112(%esp)
+	pshufd	$19,%xmm0,%xmm2
+	pxor	%xmm0,%xmm0
+	movdqa	96(%esp),%xmm3
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm2
+	pcmpgtd	%xmm1,%xmm0
+	pxor	%xmm2,%xmm1
+.L066xts_dec_only_one_more:
+	pshufd	$19,%xmm0,%xmm5
+	movdqa	%xmm1,%xmm6
+	paddq	%xmm1,%xmm1
+	pand	%xmm3,%xmm5
+	pxor	%xmm1,%xmm5
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	(%esi),%xmm2
+	xorps	%xmm5,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L067dec1_loop_13:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L067dec1_loop_13
+.byte	102,15,56,223,209
+	xorps	%xmm5,%xmm2
+	movups	%xmm2,(%edi)
+.L068xts_dec_steal:
+	movzbl	16(%esi),%ecx
+	movzbl	(%edi),%edx
+	leal	1(%esi),%esi
+	movb	%cl,(%edi)
+	movb	%dl,16(%edi)
+	leal	1(%edi),%edi
+	subl	$1,%eax
+	jnz	.L068xts_dec_steal
+	subl	112(%esp),%edi
+	movl	%ebp,%edx
+	movl	%ebx,%ecx
+	movups	(%edi),%xmm2
+	xorps	%xmm6,%xmm2
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L069dec1_loop_14:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L069dec1_loop_14
+.byte	102,15,56,223,209
+	xorps	%xmm6,%xmm2
+	movups	%xmm2,(%edi)
+.L065xts_dec_ret:
+	movl	116(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
+.globl	aesni_cbc_encrypt
+.type	aesni_cbc_encrypt, at function
+.align	16
+aesni_cbc_encrypt:
+.L_aesni_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	%esp,%ebx
+	movl	24(%esp),%edi
+	subl	$24,%ebx
+	movl	28(%esp),%eax
+	andl	$-16,%ebx
+	movl	32(%esp),%edx
+	movl	36(%esp),%ebp
+	testl	%eax,%eax
+	jz	.L070cbc_abort
+	cmpl	$0,40(%esp)
+	xchgl	%esp,%ebx
+	movups	(%ebp),%xmm7
+	movl	240(%edx),%ecx
+	movl	%edx,%ebp
+	movl	%ebx,16(%esp)
+	movl	%ecx,%ebx
+	je	.L071cbc_decrypt
+	movaps	%xmm7,%xmm2
+	cmpl	$16,%eax
+	jb	.L072cbc_enc_tail
+	subl	$16,%eax
+	jmp	.L073cbc_enc_loop
+.align	16
+.L073cbc_enc_loop:
+	movups	(%esi),%xmm7
+	leal	16(%esi),%esi
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	xorps	%xmm0,%xmm7
+	leal	32(%edx),%edx
+	xorps	%xmm7,%xmm2
+.L074enc1_loop_15:
+.byte	102,15,56,220,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L074enc1_loop_15
+.byte	102,15,56,221,209
+	movl	%ebx,%ecx
+	movl	%ebp,%edx
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+	subl	$16,%eax
+	jnc	.L073cbc_enc_loop
+	addl	$16,%eax
+	jnz	.L072cbc_enc_tail
+	movaps	%xmm2,%xmm7
+	jmp	.L075cbc_ret
+.L072cbc_enc_tail:
+	movl	%eax,%ecx
+.long	2767451785
+	movl	$16,%ecx
+	subl	%eax,%ecx
+	xorl	%eax,%eax
+.long	2868115081
+	leal	-16(%edi),%edi
+	movl	%ebx,%ecx
+	movl	%edi,%esi
+	movl	%ebp,%edx
+	jmp	.L073cbc_enc_loop
+.align	16
+.L071cbc_decrypt:
+	cmpl	$80,%eax
+	jbe	.L076cbc_dec_tail
+	movaps	%xmm7,(%esp)
+	subl	$80,%eax
+	jmp	.L077cbc_dec_loop6_enter
+.align	16
+.L078cbc_dec_loop6:
+	movaps	%xmm0,(%esp)
+	movups	%xmm7,(%edi)
+	leal	16(%edi),%edi
+.L077cbc_dec_loop6_enter:
+	movdqu	(%esi),%xmm2
+	movdqu	16(%esi),%xmm3
+	movdqu	32(%esi),%xmm4
+	movdqu	48(%esi),%xmm5
+	movdqu	64(%esi),%xmm6
+	movdqu	80(%esi),%xmm7
+	call	_aesni_decrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	(%esp),%xmm2
+	xorps	%xmm1,%xmm3
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm4
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm5
+	movups	64(%esi),%xmm1
+	xorps	%xmm0,%xmm6
+	movups	80(%esi),%xmm0
+	xorps	%xmm1,%xmm7
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	leal	96(%esi),%esi
+	movups	%xmm4,32(%edi)
+	movl	%ebx,%ecx
+	movups	%xmm5,48(%edi)
+	movl	%ebp,%edx
+	movups	%xmm6,64(%edi)
+	leal	80(%edi),%edi
+	subl	$96,%eax
+	ja	.L078cbc_dec_loop6
+	movaps	%xmm7,%xmm2
+	movaps	%xmm0,%xmm7
+	addl	$80,%eax
+	jle	.L079cbc_dec_tail_collected
+	movups	%xmm2,(%edi)
+	leal	16(%edi),%edi
+.L076cbc_dec_tail:
+	movups	(%esi),%xmm2
+	movaps	%xmm2,%xmm6
+	cmpl	$16,%eax
+	jbe	.L080cbc_dec_one
+	movups	16(%esi),%xmm3
+	movaps	%xmm3,%xmm5
+	cmpl	$32,%eax
+	jbe	.L081cbc_dec_two
+	movups	32(%esi),%xmm4
+	cmpl	$48,%eax
+	jbe	.L082cbc_dec_three
+	movups	48(%esi),%xmm5
+	cmpl	$64,%eax
+	jbe	.L083cbc_dec_four
+	movups	64(%esi),%xmm6
+	movaps	%xmm7,(%esp)
+	movups	(%esi),%xmm2
+	xorps	%xmm7,%xmm7
+	call	_aesni_decrypt6
+	movups	(%esi),%xmm1
+	movups	16(%esi),%xmm0
+	xorps	(%esp),%xmm2
+	xorps	%xmm1,%xmm3
+	movups	32(%esi),%xmm1
+	xorps	%xmm0,%xmm4
+	movups	48(%esi),%xmm0
+	xorps	%xmm1,%xmm5
+	movups	64(%esi),%xmm7
+	xorps	%xmm0,%xmm6
+	movups	%xmm2,(%edi)
+	movups	%xmm3,16(%edi)
+	movups	%xmm4,32(%edi)
+	movups	%xmm5,48(%edi)
+	leal	64(%edi),%edi
+	movaps	%xmm6,%xmm2
+	subl	$80,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L080cbc_dec_one:
+	movups	(%edx),%xmm0
+	movups	16(%edx),%xmm1
+	leal	32(%edx),%edx
+	xorps	%xmm0,%xmm2
+.L084dec1_loop_16:
+.byte	102,15,56,222,209
+	decl	%ecx
+	movups	(%edx),%xmm1
+	leal	16(%edx),%edx
+	jnz	.L084dec1_loop_16
+.byte	102,15,56,223,209
+	xorps	%xmm7,%xmm2
+	movaps	%xmm6,%xmm7
+	subl	$16,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L081cbc_dec_two:
+	xorps	%xmm4,%xmm4
+	call	_aesni_decrypt3
+	xorps	%xmm7,%xmm2
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	movaps	%xmm3,%xmm2
+	leal	16(%edi),%edi
+	movaps	%xmm5,%xmm7
+	subl	$32,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L082cbc_dec_three:
+	call	_aesni_decrypt3
+	xorps	%xmm7,%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm5,%xmm4
+	movups	%xmm2,(%edi)
+	movaps	%xmm4,%xmm2
+	movups	%xmm3,16(%edi)
+	leal	32(%edi),%edi
+	movups	32(%esi),%xmm7
+	subl	$48,%eax
+	jmp	.L079cbc_dec_tail_collected
+.align	16
+.L083cbc_dec_four:
+	call	_aesni_decrypt4
+	movups	16(%esi),%xmm1
+	movups	32(%esi),%xmm0
+	xorps	%xmm7,%xmm2
+	movups	48(%esi),%xmm7
+	xorps	%xmm6,%xmm3
+	movups	%xmm2,(%edi)
+	xorps	%xmm1,%xmm4
+	movups	%xmm3,16(%edi)
+	xorps	%xmm0,%xmm5
+	movups	%xmm4,32(%edi)
+	leal	48(%edi),%edi
+	movaps	%xmm5,%xmm2
+	subl	$64,%eax
+.L079cbc_dec_tail_collected:
+	andl	$15,%eax
+	jnz	.L085cbc_dec_tail_partial
+	movups	%xmm2,(%edi)
+	jmp	.L075cbc_ret
+.align	16
+.L085cbc_dec_tail_partial:
+	movaps	%xmm2,(%esp)
+	movl	$16,%ecx
+	movl	%esp,%esi
+	subl	%eax,%ecx
+.long	2767451785
+.L075cbc_ret:
+	movl	16(%esp),%esp
+	movl	36(%esp),%ebp
+	movups	%xmm7,(%ebp)
+.L070cbc_abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
+.type	_aesni_set_encrypt_key, at function
+.align	16
+_aesni_set_encrypt_key:
+	testl	%eax,%eax
+	jz	.L086bad_pointer
+	testl	%edx,%edx
+	jz	.L086bad_pointer
+	movups	(%eax),%xmm0
+	xorps	%xmm4,%xmm4
+	leal	16(%edx),%edx
+	cmpl	$256,%ecx
+	je	.L08714rounds
+	cmpl	$192,%ecx
+	je	.L08812rounds
+	cmpl	$128,%ecx
+	jne	.L089bad_keybits
+.align	16
+.L09010rounds:
+	movl	$9,%ecx
+	movups	%xmm0,-16(%edx)
+.byte	102,15,58,223,200,1
+	call	.L091key_128_cold
+.byte	102,15,58,223,200,2
+	call	.L092key_128
+.byte	102,15,58,223,200,4
+	call	.L092key_128
+.byte	102,15,58,223,200,8
+	call	.L092key_128
+.byte	102,15,58,223,200,16
+	call	.L092key_128
+.byte	102,15,58,223,200,32
+	call	.L092key_128
+.byte	102,15,58,223,200,64
+	call	.L092key_128
+.byte	102,15,58,223,200,128
+	call	.L092key_128
+.byte	102,15,58,223,200,27
+	call	.L092key_128
+.byte	102,15,58,223,200,54
+	call	.L092key_128
+	movups	%xmm0,(%edx)
+	movl	%ecx,80(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L092key_128:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+.L091key_128_cold:
+	shufps	$16,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$255,%xmm1,%xmm1
+	xorps	%xmm1,%xmm0
+	ret
+.align	16
+.L08812rounds:
+	movq	16(%eax),%xmm2
+	movl	$11,%ecx
+	movups	%xmm0,-16(%edx)
+.byte	102,15,58,223,202,1
+	call	.L093key_192a_cold
+.byte	102,15,58,223,202,2
+	call	.L094key_192b
+.byte	102,15,58,223,202,4
+	call	.L095key_192a
+.byte	102,15,58,223,202,8
+	call	.L094key_192b
+.byte	102,15,58,223,202,16
+	call	.L095key_192a
+.byte	102,15,58,223,202,32
+	call	.L094key_192b
+.byte	102,15,58,223,202,64
+	call	.L095key_192a
+.byte	102,15,58,223,202,128
+	call	.L094key_192b
+	movups	%xmm0,(%edx)
+	movl	%ecx,48(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L095key_192a:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+.align	16
+.L093key_192a_cold:
+	movaps	%xmm2,%xmm5
+.L096key_192b_warm:
+	shufps	$16,%xmm0,%xmm4
+	movdqa	%xmm2,%xmm3
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	pslldq	$4,%xmm3
+	xorps	%xmm4,%xmm0
+	pshufd	$85,%xmm1,%xmm1
+	pxor	%xmm3,%xmm2
+	pxor	%xmm1,%xmm0
+	pshufd	$255,%xmm0,%xmm3
+	pxor	%xmm3,%xmm2
+	ret
+.align	16
+.L094key_192b:
+	movaps	%xmm0,%xmm3
+	shufps	$68,%xmm0,%xmm5
+	movups	%xmm5,(%edx)
+	shufps	$78,%xmm2,%xmm3
+	movups	%xmm3,16(%edx)
+	leal	32(%edx),%edx
+	jmp	.L096key_192b_warm
+.align	16
+.L08714rounds:
+	movups	16(%eax),%xmm2
+	movl	$13,%ecx
+	leal	16(%edx),%edx
+	movups	%xmm0,-32(%edx)
+	movups	%xmm2,-16(%edx)
+.byte	102,15,58,223,202,1
+	call	.L097key_256a_cold
+.byte	102,15,58,223,200,1
+	call	.L098key_256b
+.byte	102,15,58,223,202,2
+	call	.L099key_256a
+.byte	102,15,58,223,200,2
+	call	.L098key_256b
+.byte	102,15,58,223,202,4
+	call	.L099key_256a
+.byte	102,15,58,223,200,4
+	call	.L098key_256b
+.byte	102,15,58,223,202,8
+	call	.L099key_256a
+.byte	102,15,58,223,200,8
+	call	.L098key_256b
+.byte	102,15,58,223,202,16
+	call	.L099key_256a
+.byte	102,15,58,223,200,16
+	call	.L098key_256b
+.byte	102,15,58,223,202,32
+	call	.L099key_256a
+.byte	102,15,58,223,200,32
+	call	.L098key_256b
+.byte	102,15,58,223,202,64
+	call	.L099key_256a
+	movups	%xmm0,(%edx)
+	movl	%ecx,16(%edx)
+	xorl	%eax,%eax
+	ret
+.align	16
+.L099key_256a:
+	movups	%xmm2,(%edx)
+	leal	16(%edx),%edx
+.L097key_256a_cold:
+	shufps	$16,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$140,%xmm0,%xmm4
+	xorps	%xmm4,%xmm0
+	shufps	$255,%xmm1,%xmm1
+	xorps	%xmm1,%xmm0
+	ret
+.align	16
+.L098key_256b:
+	movups	%xmm0,(%edx)
+	leal	16(%edx),%edx
+	shufps	$16,%xmm2,%xmm4
+	xorps	%xmm4,%xmm2
+	shufps	$140,%xmm2,%xmm4
+	xorps	%xmm4,%xmm2
+	shufps	$170,%xmm1,%xmm1
+	xorps	%xmm1,%xmm2
+	ret
+.align	4
+.L086bad_pointer:
+	movl	$-1,%eax
+	ret
+.align	4
+.L089bad_keybits:
+	movl	$-2,%eax
+	ret
+.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
+.globl	aesni_set_encrypt_key
+.type	aesni_set_encrypt_key, at function
+.align	16
+aesni_set_encrypt_key:
+.L_aesni_set_encrypt_key_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	call	_aesni_set_encrypt_key
+	ret
+.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
+.globl	aesni_set_decrypt_key
+.type	aesni_set_decrypt_key, at function
+.align	16
+aesni_set_decrypt_key:
+.L_aesni_set_decrypt_key_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	call	_aesni_set_encrypt_key
+	movl	12(%esp),%edx
+	shll	$4,%ecx
+	testl	%eax,%eax
+	jnz	.L100dec_key_ret
+	leal	16(%edx,%ecx,1),%eax
+	movups	(%edx),%xmm0
+	movups	(%eax),%xmm1
+	movups	%xmm0,(%eax)
+	movups	%xmm1,(%edx)
+	leal	16(%edx),%edx
+	leal	-16(%eax),%eax
+.L101dec_key_inverse:
+	movups	(%edx),%xmm0
+	movups	(%eax),%xmm1
+.byte	102,15,56,219,192
+.byte	102,15,56,219,201
+	leal	16(%edx),%edx
+	leal	-16(%eax),%eax
+	movups	%xmm0,16(%eax)
+	movups	%xmm1,-16(%edx)
+	cmpl	%edx,%eax
+	ja	.L101dec_key_inverse
+	movups	(%edx),%xmm0
+.byte	102,15,56,219,192
+	movups	%xmm0,(%edx)
+	xorl	%eax,%eax
+.L100dec_key_ret:
+	ret
+.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
+.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
+.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
+.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte	115,108,46,111,114,103,62,0
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/aesni-x86.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/aesni-x86.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/aesni-x86.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/aesni-x86.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,2144 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/aesni-x86.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"aesni-x86.s"
-.text
-.globl	aesni_encrypt
-.type	aesni_encrypt, at function
-.align	16
-aesni_encrypt:
-.L_aesni_encrypt_begin:
-	movl	4(%esp),%eax
-	movl	12(%esp),%edx
-	movups	(%eax),%xmm2
-	movl	240(%edx),%ecx
-	movl	8(%esp),%eax
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L000enc1_loop_1:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L000enc1_loop_1
-.byte	102,15,56,221,209
-	movups	%xmm2,(%eax)
-	ret
-.size	aesni_encrypt,.-.L_aesni_encrypt_begin
-.globl	aesni_decrypt
-.type	aesni_decrypt, at function
-.align	16
-aesni_decrypt:
-.L_aesni_decrypt_begin:
-	movl	4(%esp),%eax
-	movl	12(%esp),%edx
-	movups	(%eax),%xmm2
-	movl	240(%edx),%ecx
-	movl	8(%esp),%eax
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L001dec1_loop_2:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L001dec1_loop_2
-.byte	102,15,56,223,209
-	movups	%xmm2,(%eax)
-	ret
-.size	aesni_decrypt,.-.L_aesni_decrypt_begin
-.type	_aesni_encrypt3, at function
-.align	16
-_aesni_encrypt3:
-	movups	(%edx),%xmm0
-	shrl	$1,%ecx
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-	pxor	%xmm0,%xmm4
-	movups	(%edx),%xmm0
-.L002enc3_loop:
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-	decl	%ecx
-.byte	102,15,56,220,225
-	movups	16(%edx),%xmm1
-.byte	102,15,56,220,208
-.byte	102,15,56,220,216
-	leal	32(%edx),%edx
-.byte	102,15,56,220,224
-	movups	(%edx),%xmm0
-	jnz	.L002enc3_loop
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-.byte	102,15,56,220,225
-.byte	102,15,56,221,208
-.byte	102,15,56,221,216
-.byte	102,15,56,221,224
-	ret
-.size	_aesni_encrypt3,.-_aesni_encrypt3
-.type	_aesni_decrypt3, at function
-.align	16
-_aesni_decrypt3:
-	movups	(%edx),%xmm0
-	shrl	$1,%ecx
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-	pxor	%xmm0,%xmm4
-	movups	(%edx),%xmm0
-.L003dec3_loop:
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-	decl	%ecx
-.byte	102,15,56,222,225
-	movups	16(%edx),%xmm1
-.byte	102,15,56,222,208
-.byte	102,15,56,222,216
-	leal	32(%edx),%edx
-.byte	102,15,56,222,224
-	movups	(%edx),%xmm0
-	jnz	.L003dec3_loop
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-.byte	102,15,56,222,225
-.byte	102,15,56,223,208
-.byte	102,15,56,223,216
-.byte	102,15,56,223,224
-	ret
-.size	_aesni_decrypt3,.-_aesni_decrypt3
-.type	_aesni_encrypt4, at function
-.align	16
-_aesni_encrypt4:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	shrl	$1,%ecx
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-	pxor	%xmm0,%xmm4
-	pxor	%xmm0,%xmm5
-	movups	(%edx),%xmm0
-.L004enc4_loop:
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-	decl	%ecx
-.byte	102,15,56,220,225
-.byte	102,15,56,220,233
-	movups	16(%edx),%xmm1
-.byte	102,15,56,220,208
-.byte	102,15,56,220,216
-	leal	32(%edx),%edx
-.byte	102,15,56,220,224
-.byte	102,15,56,220,232
-	movups	(%edx),%xmm0
-	jnz	.L004enc4_loop
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-.byte	102,15,56,220,225
-.byte	102,15,56,220,233
-.byte	102,15,56,221,208
-.byte	102,15,56,221,216
-.byte	102,15,56,221,224
-.byte	102,15,56,221,232
-	ret
-.size	_aesni_encrypt4,.-_aesni_encrypt4
-.type	_aesni_decrypt4, at function
-.align	16
-_aesni_decrypt4:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	shrl	$1,%ecx
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-	pxor	%xmm0,%xmm4
-	pxor	%xmm0,%xmm5
-	movups	(%edx),%xmm0
-.L005dec4_loop:
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-	decl	%ecx
-.byte	102,15,56,222,225
-.byte	102,15,56,222,233
-	movups	16(%edx),%xmm1
-.byte	102,15,56,222,208
-.byte	102,15,56,222,216
-	leal	32(%edx),%edx
-.byte	102,15,56,222,224
-.byte	102,15,56,222,232
-	movups	(%edx),%xmm0
-	jnz	.L005dec4_loop
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-.byte	102,15,56,222,225
-.byte	102,15,56,222,233
-.byte	102,15,56,223,208
-.byte	102,15,56,223,216
-.byte	102,15,56,223,224
-.byte	102,15,56,223,232
-	ret
-.size	_aesni_decrypt4,.-_aesni_decrypt4
-.type	_aesni_encrypt6, at function
-.align	16
-_aesni_encrypt6:
-	movups	(%edx),%xmm0
-	shrl	$1,%ecx
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-.byte	102,15,56,220,209
-	pxor	%xmm0,%xmm4
-.byte	102,15,56,220,217
-	pxor	%xmm0,%xmm5
-	decl	%ecx
-.byte	102,15,56,220,225
-	pxor	%xmm0,%xmm6
-.byte	102,15,56,220,233
-	pxor	%xmm0,%xmm7
-.byte	102,15,56,220,241
-	movups	(%edx),%xmm0
-.byte	102,15,56,220,249
-	jmp	.L_aesni_encrypt6_enter
-.align	16
-.L006enc6_loop:
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-	decl	%ecx
-.byte	102,15,56,220,225
-.byte	102,15,56,220,233
-.byte	102,15,56,220,241
-.byte	102,15,56,220,249
-.align	16
-.L_aesni_encrypt6_enter:
-	movups	16(%edx),%xmm1
-.byte	102,15,56,220,208
-.byte	102,15,56,220,216
-	leal	32(%edx),%edx
-.byte	102,15,56,220,224
-.byte	102,15,56,220,232
-.byte	102,15,56,220,240
-.byte	102,15,56,220,248
-	movups	(%edx),%xmm0
-	jnz	.L006enc6_loop
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-.byte	102,15,56,220,225
-.byte	102,15,56,220,233
-.byte	102,15,56,220,241
-.byte	102,15,56,220,249
-.byte	102,15,56,221,208
-.byte	102,15,56,221,216
-.byte	102,15,56,221,224
-.byte	102,15,56,221,232
-.byte	102,15,56,221,240
-.byte	102,15,56,221,248
-	ret
-.size	_aesni_encrypt6,.-_aesni_encrypt6
-.type	_aesni_decrypt6, at function
-.align	16
-_aesni_decrypt6:
-	movups	(%edx),%xmm0
-	shrl	$1,%ecx
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-.byte	102,15,56,222,209
-	pxor	%xmm0,%xmm4
-.byte	102,15,56,222,217
-	pxor	%xmm0,%xmm5
-	decl	%ecx
-.byte	102,15,56,222,225
-	pxor	%xmm0,%xmm6
-.byte	102,15,56,222,233
-	pxor	%xmm0,%xmm7
-.byte	102,15,56,222,241
-	movups	(%edx),%xmm0
-.byte	102,15,56,222,249
-	jmp	.L_aesni_decrypt6_enter
-.align	16
-.L007dec6_loop:
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-	decl	%ecx
-.byte	102,15,56,222,225
-.byte	102,15,56,222,233
-.byte	102,15,56,222,241
-.byte	102,15,56,222,249
-.align	16
-.L_aesni_decrypt6_enter:
-	movups	16(%edx),%xmm1
-.byte	102,15,56,222,208
-.byte	102,15,56,222,216
-	leal	32(%edx),%edx
-.byte	102,15,56,222,224
-.byte	102,15,56,222,232
-.byte	102,15,56,222,240
-.byte	102,15,56,222,248
-	movups	(%edx),%xmm0
-	jnz	.L007dec6_loop
-.byte	102,15,56,222,209
-.byte	102,15,56,222,217
-.byte	102,15,56,222,225
-.byte	102,15,56,222,233
-.byte	102,15,56,222,241
-.byte	102,15,56,222,249
-.byte	102,15,56,223,208
-.byte	102,15,56,223,216
-.byte	102,15,56,223,224
-.byte	102,15,56,223,232
-.byte	102,15,56,223,240
-.byte	102,15,56,223,248
-	ret
-.size	_aesni_decrypt6,.-_aesni_decrypt6
-.globl	aesni_ecb_encrypt
-.type	aesni_ecb_encrypt, at function
-.align	16
-aesni_ecb_encrypt:
-.L_aesni_ecb_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	36(%esp),%ebx
-	andl	$-16,%eax
-	jz	.L008ecb_ret
-	movl	240(%edx),%ecx
-	testl	%ebx,%ebx
-	jz	.L009ecb_decrypt
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-	cmpl	$96,%eax
-	jb	.L010ecb_enc_tail
-	movdqu	(%esi),%xmm2
-	movdqu	16(%esi),%xmm3
-	movdqu	32(%esi),%xmm4
-	movdqu	48(%esi),%xmm5
-	movdqu	64(%esi),%xmm6
-	movdqu	80(%esi),%xmm7
-	leal	96(%esi),%esi
-	subl	$96,%eax
-	jmp	.L011ecb_enc_loop6_enter
-.align	16
-.L012ecb_enc_loop6:
-	movups	%xmm2,(%edi)
-	movdqu	(%esi),%xmm2
-	movups	%xmm3,16(%edi)
-	movdqu	16(%esi),%xmm3
-	movups	%xmm4,32(%edi)
-	movdqu	32(%esi),%xmm4
-	movups	%xmm5,48(%edi)
-	movdqu	48(%esi),%xmm5
-	movups	%xmm6,64(%edi)
-	movdqu	64(%esi),%xmm6
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	movdqu	80(%esi),%xmm7
-	leal	96(%esi),%esi
-.L011ecb_enc_loop6_enter:
-	call	_aesni_encrypt6
-	movl	%ebp,%edx
-	movl	%ebx,%ecx
-	subl	$96,%eax
-	jnc	.L012ecb_enc_loop6
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	addl	$96,%eax
-	jz	.L008ecb_ret
-.L010ecb_enc_tail:
-	movups	(%esi),%xmm2
-	cmpl	$32,%eax
-	jb	.L013ecb_enc_one
-	movups	16(%esi),%xmm3
-	je	.L014ecb_enc_two
-	movups	32(%esi),%xmm4
-	cmpl	$64,%eax
-	jb	.L015ecb_enc_three
-	movups	48(%esi),%xmm5
-	je	.L016ecb_enc_four
-	movups	64(%esi),%xmm6
-	xorps	%xmm7,%xmm7
-	call	_aesni_encrypt6
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L013ecb_enc_one:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L017enc1_loop_3:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L017enc1_loop_3
-.byte	102,15,56,221,209
-	movups	%xmm2,(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L014ecb_enc_two:
-	xorps	%xmm4,%xmm4
-	call	_aesni_encrypt3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L015ecb_enc_three:
-	call	_aesni_encrypt3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L016ecb_enc_four:
-	call	_aesni_encrypt4
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L009ecb_decrypt:
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-	cmpl	$96,%eax
-	jb	.L018ecb_dec_tail
-	movdqu	(%esi),%xmm2
-	movdqu	16(%esi),%xmm3
-	movdqu	32(%esi),%xmm4
-	movdqu	48(%esi),%xmm5
-	movdqu	64(%esi),%xmm6
-	movdqu	80(%esi),%xmm7
-	leal	96(%esi),%esi
-	subl	$96,%eax
-	jmp	.L019ecb_dec_loop6_enter
-.align	16
-.L020ecb_dec_loop6:
-	movups	%xmm2,(%edi)
-	movdqu	(%esi),%xmm2
-	movups	%xmm3,16(%edi)
-	movdqu	16(%esi),%xmm3
-	movups	%xmm4,32(%edi)
-	movdqu	32(%esi),%xmm4
-	movups	%xmm5,48(%edi)
-	movdqu	48(%esi),%xmm5
-	movups	%xmm6,64(%edi)
-	movdqu	64(%esi),%xmm6
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	movdqu	80(%esi),%xmm7
-	leal	96(%esi),%esi
-.L019ecb_dec_loop6_enter:
-	call	_aesni_decrypt6
-	movl	%ebp,%edx
-	movl	%ebx,%ecx
-	subl	$96,%eax
-	jnc	.L020ecb_dec_loop6
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	addl	$96,%eax
-	jz	.L008ecb_ret
-.L018ecb_dec_tail:
-	movups	(%esi),%xmm2
-	cmpl	$32,%eax
-	jb	.L021ecb_dec_one
-	movups	16(%esi),%xmm3
-	je	.L022ecb_dec_two
-	movups	32(%esi),%xmm4
-	cmpl	$64,%eax
-	jb	.L023ecb_dec_three
-	movups	48(%esi),%xmm5
-	je	.L024ecb_dec_four
-	movups	64(%esi),%xmm6
-	xorps	%xmm7,%xmm7
-	call	_aesni_decrypt6
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L021ecb_dec_one:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L025dec1_loop_4:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L025dec1_loop_4
-.byte	102,15,56,223,209
-	movups	%xmm2,(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L022ecb_dec_two:
-	xorps	%xmm4,%xmm4
-	call	_aesni_decrypt3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L023ecb_dec_three:
-	call	_aesni_decrypt3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	jmp	.L008ecb_ret
-.align	16
-.L024ecb_dec_four:
-	call	_aesni_decrypt4
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-.L008ecb_ret:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
-.globl	aesni_ccm64_encrypt_blocks
-.type	aesni_ccm64_encrypt_blocks, at function
-.align	16
-aesni_ccm64_encrypt_blocks:
-.L_aesni_ccm64_encrypt_blocks_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	36(%esp),%ebx
-	movl	40(%esp),%ecx
-	movl	%esp,%ebp
-	subl	$60,%esp
-	andl	$-16,%esp
-	movl	%ebp,48(%esp)
-	movdqu	(%ebx),%xmm7
-	movdqu	(%ecx),%xmm3
-	movl	240(%edx),%ecx
-	movl	$202182159,(%esp)
-	movl	$134810123,4(%esp)
-	movl	$67438087,8(%esp)
-	movl	$66051,12(%esp)
-	movl	$1,%ebx
-	xorl	%ebp,%ebp
-	movl	%ebx,16(%esp)
-	movl	%ebp,20(%esp)
-	movl	%ebp,24(%esp)
-	movl	%ebp,28(%esp)
-	shrl	$1,%ecx
-	leal	(%edx),%ebp
-	movdqa	(%esp),%xmm5
-	movdqa	%xmm7,%xmm2
-	movl	%ecx,%ebx
-.byte	102,15,56,0,253
-.L026ccm64_enc_outer:
-	movups	(%ebp),%xmm0
-	movl	%ebx,%ecx
-	movups	(%esi),%xmm6
-	xorps	%xmm0,%xmm2
-	movups	16(%ebp),%xmm1
-	xorps	%xmm6,%xmm0
-	leal	32(%ebp),%edx
-	xorps	%xmm0,%xmm3
-	movups	(%edx),%xmm0
-.L027ccm64_enc2_loop:
-.byte	102,15,56,220,209
-	decl	%ecx
-.byte	102,15,56,220,217
-	movups	16(%edx),%xmm1
-.byte	102,15,56,220,208
-	leal	32(%edx),%edx
-.byte	102,15,56,220,216
-	movups	(%edx),%xmm0
-	jnz	.L027ccm64_enc2_loop
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-	paddq	16(%esp),%xmm7
-.byte	102,15,56,221,208
-.byte	102,15,56,221,216
-	decl	%eax
-	leal	16(%esi),%esi
-	xorps	%xmm2,%xmm6
-	movdqa	%xmm7,%xmm2
-	movups	%xmm6,(%edi)
-	leal	16(%edi),%edi
-.byte	102,15,56,0,213
-	jnz	.L026ccm64_enc_outer
-	movl	48(%esp),%esp
-	movl	40(%esp),%edi
-	movups	%xmm3,(%edi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
-.globl	aesni_ccm64_decrypt_blocks
-.type	aesni_ccm64_decrypt_blocks, at function
-.align	16
-aesni_ccm64_decrypt_blocks:
-.L_aesni_ccm64_decrypt_blocks_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	36(%esp),%ebx
-	movl	40(%esp),%ecx
-	movl	%esp,%ebp
-	subl	$60,%esp
-	andl	$-16,%esp
-	movl	%ebp,48(%esp)
-	movdqu	(%ebx),%xmm7
-	movdqu	(%ecx),%xmm3
-	movl	240(%edx),%ecx
-	movl	$202182159,(%esp)
-	movl	$134810123,4(%esp)
-	movl	$67438087,8(%esp)
-	movl	$66051,12(%esp)
-	movl	$1,%ebx
-	xorl	%ebp,%ebp
-	movl	%ebx,16(%esp)
-	movl	%ebp,20(%esp)
-	movl	%ebp,24(%esp)
-	movl	%ebp,28(%esp)
-	movdqa	(%esp),%xmm5
-	movdqa	%xmm7,%xmm2
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-.byte	102,15,56,0,253
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L028enc1_loop_5:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L028enc1_loop_5
-.byte	102,15,56,221,209
-	movups	(%esi),%xmm6
-	paddq	16(%esp),%xmm7
-	leal	16(%esi),%esi
-	jmp	.L029ccm64_dec_outer
-.align	16
-.L029ccm64_dec_outer:
-	xorps	%xmm2,%xmm6
-	movdqa	%xmm7,%xmm2
-	movl	%ebx,%ecx
-	movups	%xmm6,(%edi)
-	leal	16(%edi),%edi
-.byte	102,15,56,0,213
-	subl	$1,%eax
-	jz	.L030ccm64_dec_break
-	movups	(%ebp),%xmm0
-	shrl	$1,%ecx
-	movups	16(%ebp),%xmm1
-	xorps	%xmm0,%xmm6
-	leal	32(%ebp),%edx
-	xorps	%xmm0,%xmm2
-	xorps	%xmm6,%xmm3
-	movups	(%edx),%xmm0
-.L031ccm64_dec2_loop:
-.byte	102,15,56,220,209
-	decl	%ecx
-.byte	102,15,56,220,217
-	movups	16(%edx),%xmm1
-.byte	102,15,56,220,208
-	leal	32(%edx),%edx
-.byte	102,15,56,220,216
-	movups	(%edx),%xmm0
-	jnz	.L031ccm64_dec2_loop
-	movups	(%esi),%xmm6
-	paddq	16(%esp),%xmm7
-.byte	102,15,56,220,209
-.byte	102,15,56,220,217
-	leal	16(%esi),%esi
-.byte	102,15,56,221,208
-.byte	102,15,56,221,216
-	jmp	.L029ccm64_dec_outer
-.align	16
-.L030ccm64_dec_break:
-	movl	%ebp,%edx
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	xorps	%xmm0,%xmm6
-	leal	32(%edx),%edx
-	xorps	%xmm6,%xmm3
-.L032enc1_loop_6:
-.byte	102,15,56,220,217
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L032enc1_loop_6
-.byte	102,15,56,221,217
-	movl	48(%esp),%esp
-	movl	40(%esp),%edi
-	movups	%xmm3,(%edi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
-.globl	aesni_ctr32_encrypt_blocks
-.type	aesni_ctr32_encrypt_blocks, at function
-.align	16
-aesni_ctr32_encrypt_blocks:
-.L_aesni_ctr32_encrypt_blocks_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	36(%esp),%ebx
-	movl	%esp,%ebp
-	subl	$88,%esp
-	andl	$-16,%esp
-	movl	%ebp,80(%esp)
-	cmpl	$1,%eax
-	je	.L033ctr32_one_shortcut
-	movdqu	(%ebx),%xmm7
-	movl	$202182159,(%esp)
-	movl	$134810123,4(%esp)
-	movl	$67438087,8(%esp)
-	movl	$66051,12(%esp)
-	movl	$6,%ecx
-	xorl	%ebp,%ebp
-	movl	%ecx,16(%esp)
-	movl	%ecx,20(%esp)
-	movl	%ecx,24(%esp)
-	movl	%ebp,28(%esp)
-.byte	102,15,58,22,251,3
-.byte	102,15,58,34,253,3
-	movl	240(%edx),%ecx
-	bswap	%ebx
-	pxor	%xmm1,%xmm1
-	pxor	%xmm0,%xmm0
-	movdqa	(%esp),%xmm2
-.byte	102,15,58,34,203,0
-	leal	3(%ebx),%ebp
-.byte	102,15,58,34,197,0
-	incl	%ebx
-.byte	102,15,58,34,203,1
-	incl	%ebp
-.byte	102,15,58,34,197,1
-	incl	%ebx
-.byte	102,15,58,34,203,2
-	incl	%ebp
-.byte	102,15,58,34,197,2
-	movdqa	%xmm1,48(%esp)
-.byte	102,15,56,0,202
-	movdqa	%xmm0,64(%esp)
-.byte	102,15,56,0,194
-	pshufd	$192,%xmm1,%xmm2
-	pshufd	$128,%xmm1,%xmm3
-	cmpl	$6,%eax
-	jb	.L034ctr32_tail
-	movdqa	%xmm7,32(%esp)
-	shrl	$1,%ecx
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-	subl	$6,%eax
-	jmp	.L035ctr32_loop6
-.align	16
-.L035ctr32_loop6:
-	pshufd	$64,%xmm1,%xmm4
-	movdqa	32(%esp),%xmm1
-	pshufd	$192,%xmm0,%xmm5
-	por	%xmm1,%xmm2
-	pshufd	$128,%xmm0,%xmm6
-	por	%xmm1,%xmm3
-	pshufd	$64,%xmm0,%xmm7
-	por	%xmm1,%xmm4
-	por	%xmm1,%xmm5
-	por	%xmm1,%xmm6
-	por	%xmm1,%xmm7
-	movups	(%ebp),%xmm0
-	movups	16(%ebp),%xmm1
-	leal	32(%ebp),%edx
-	decl	%ecx
-	pxor	%xmm0,%xmm2
-	pxor	%xmm0,%xmm3
-.byte	102,15,56,220,209
-	pxor	%xmm0,%xmm4
-.byte	102,15,56,220,217
-	pxor	%xmm0,%xmm5
-.byte	102,15,56,220,225
-	pxor	%xmm0,%xmm6
-.byte	102,15,56,220,233
-	pxor	%xmm0,%xmm7
-.byte	102,15,56,220,241
-	movups	(%edx),%xmm0
-.byte	102,15,56,220,249
-	call	.L_aesni_encrypt6_enter
-	movups	(%esi),%xmm1
-	movups	16(%esi),%xmm0
-	xorps	%xmm1,%xmm2
-	movups	32(%esi),%xmm1
-	xorps	%xmm0,%xmm3
-	movups	%xmm2,(%edi)
-	movdqa	16(%esp),%xmm0
-	xorps	%xmm1,%xmm4
-	movdqa	48(%esp),%xmm1
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	paddd	%xmm0,%xmm1
-	paddd	64(%esp),%xmm0
-	movdqa	(%esp),%xmm2
-	movups	48(%esi),%xmm3
-	movups	64(%esi),%xmm4
-	xorps	%xmm3,%xmm5
-	movups	80(%esi),%xmm3
-	leal	96(%esi),%esi
-	movdqa	%xmm1,48(%esp)
-.byte	102,15,56,0,202
-	xorps	%xmm4,%xmm6
-	movups	%xmm5,48(%edi)
-	xorps	%xmm3,%xmm7
-	movdqa	%xmm0,64(%esp)
-.byte	102,15,56,0,194
-	movups	%xmm6,64(%edi)
-	pshufd	$192,%xmm1,%xmm2
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	movl	%ebx,%ecx
-	pshufd	$128,%xmm1,%xmm3
-	subl	$6,%eax
-	jnc	.L035ctr32_loop6
-	addl	$6,%eax
-	jz	.L036ctr32_ret
-	movl	%ebp,%edx
-	leal	1(,%ecx,2),%ecx
-	movdqa	32(%esp),%xmm7
-.L034ctr32_tail:
-	por	%xmm7,%xmm2
-	cmpl	$2,%eax
-	jb	.L037ctr32_one
-	pshufd	$64,%xmm1,%xmm4
-	por	%xmm7,%xmm3
-	je	.L038ctr32_two
-	pshufd	$192,%xmm0,%xmm5
-	por	%xmm7,%xmm4
-	cmpl	$4,%eax
-	jb	.L039ctr32_three
-	pshufd	$128,%xmm0,%xmm6
-	por	%xmm7,%xmm5
-	je	.L040ctr32_four
-	por	%xmm7,%xmm6
-	call	_aesni_encrypt6
-	movups	(%esi),%xmm1
-	movups	16(%esi),%xmm0
-	xorps	%xmm1,%xmm2
-	movups	32(%esi),%xmm1
-	xorps	%xmm0,%xmm3
-	movups	48(%esi),%xmm0
-	xorps	%xmm1,%xmm4
-	movups	64(%esi),%xmm1
-	xorps	%xmm0,%xmm5
-	movups	%xmm2,(%edi)
-	xorps	%xmm1,%xmm6
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	jmp	.L036ctr32_ret
-.align	16
-.L033ctr32_one_shortcut:
-	movups	(%ebx),%xmm2
-	movl	240(%edx),%ecx
-.L037ctr32_one:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L041enc1_loop_7:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L041enc1_loop_7
-.byte	102,15,56,221,209
-	movups	(%esi),%xmm6
-	xorps	%xmm2,%xmm6
-	movups	%xmm6,(%edi)
-	jmp	.L036ctr32_ret
-.align	16
-.L038ctr32_two:
-	call	_aesni_encrypt3
-	movups	(%esi),%xmm5
-	movups	16(%esi),%xmm6
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	jmp	.L036ctr32_ret
-.align	16
-.L039ctr32_three:
-	call	_aesni_encrypt3
-	movups	(%esi),%xmm5
-	movups	16(%esi),%xmm6
-	xorps	%xmm5,%xmm2
-	movups	32(%esi),%xmm7
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	xorps	%xmm7,%xmm4
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	jmp	.L036ctr32_ret
-.align	16
-.L040ctr32_four:
-	call	_aesni_encrypt4
-	movups	(%esi),%xmm6
-	movups	16(%esi),%xmm7
-	movups	32(%esi),%xmm1
-	xorps	%xmm6,%xmm2
-	movups	48(%esi),%xmm0
-	xorps	%xmm7,%xmm3
-	movups	%xmm2,(%edi)
-	xorps	%xmm1,%xmm4
-	movups	%xmm3,16(%edi)
-	xorps	%xmm0,%xmm5
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-.L036ctr32_ret:
-	movl	80(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
-.globl	aesni_xts_encrypt
-.type	aesni_xts_encrypt, at function
-.align	16
-aesni_xts_encrypt:
-.L_aesni_xts_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	36(%esp),%edx
-	movl	40(%esp),%esi
-	movl	240(%edx),%ecx
-	movups	(%esi),%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L042enc1_loop_8:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L042enc1_loop_8
-.byte	102,15,56,221,209
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	%esp,%ebp
-	subl	$120,%esp
-	movl	240(%edx),%ecx
-	andl	$-16,%esp
-	movl	$135,96(%esp)
-	movl	$0,100(%esp)
-	movl	$1,104(%esp)
-	movl	$0,108(%esp)
-	movl	%eax,112(%esp)
-	movl	%ebp,116(%esp)
-	movdqa	%xmm2,%xmm1
-	pxor	%xmm0,%xmm0
-	movdqa	96(%esp),%xmm3
-	pcmpgtd	%xmm1,%xmm0
-	andl	$-16,%eax
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-	subl	$96,%eax
-	jc	.L043xts_enc_short
-	shrl	$1,%ecx
-	movl	%ecx,%ebx
-	jmp	.L044xts_enc_loop6
-.align	16
-.L044xts_enc_loop6:
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,16(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,32(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,48(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm7
-	movdqa	%xmm1,64(%esp)
-	paddq	%xmm1,%xmm1
-	movups	(%ebp),%xmm0
-	pand	%xmm3,%xmm7
-	movups	(%esi),%xmm2
-	pxor	%xmm1,%xmm7
-	movdqu	16(%esi),%xmm3
-	xorps	%xmm0,%xmm2
-	movdqu	32(%esi),%xmm4
-	pxor	%xmm0,%xmm3
-	movdqu	48(%esi),%xmm5
-	pxor	%xmm0,%xmm4
-	movdqu	64(%esi),%xmm6
-	pxor	%xmm0,%xmm5
-	movdqu	80(%esi),%xmm1
-	pxor	%xmm0,%xmm6
-	leal	96(%esi),%esi
-	pxor	(%esp),%xmm2
-	movdqa	%xmm7,80(%esp)
-	pxor	%xmm1,%xmm7
-	movups	16(%ebp),%xmm1
-	leal	32(%ebp),%edx
-	pxor	16(%esp),%xmm3
-.byte	102,15,56,220,209
-	pxor	32(%esp),%xmm4
-.byte	102,15,56,220,217
-	pxor	48(%esp),%xmm5
-	decl	%ecx
-.byte	102,15,56,220,225
-	pxor	64(%esp),%xmm6
-.byte	102,15,56,220,233
-	pxor	%xmm0,%xmm7
-.byte	102,15,56,220,241
-	movups	(%edx),%xmm0
-.byte	102,15,56,220,249
-	call	.L_aesni_encrypt6_enter
-	movdqa	80(%esp),%xmm1
-	pxor	%xmm0,%xmm0
-	xorps	(%esp),%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	xorps	16(%esp),%xmm3
-	movups	%xmm2,(%edi)
-	xorps	32(%esp),%xmm4
-	movups	%xmm3,16(%edi)
-	xorps	48(%esp),%xmm5
-	movups	%xmm4,32(%edi)
-	xorps	64(%esp),%xmm6
-	movups	%xmm5,48(%edi)
-	xorps	%xmm1,%xmm7
-	movups	%xmm6,64(%edi)
-	pshufd	$19,%xmm0,%xmm2
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	movdqa	96(%esp),%xmm3
-	pxor	%xmm0,%xmm0
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	movl	%ebx,%ecx
-	pxor	%xmm2,%xmm1
-	subl	$96,%eax
-	jnc	.L044xts_enc_loop6
-	leal	1(,%ecx,2),%ecx
-	movl	%ebp,%edx
-	movl	%ecx,%ebx
-.L043xts_enc_short:
-	addl	$96,%eax
-	jz	.L045xts_enc_done6x
-	movdqa	%xmm1,%xmm5
-	cmpl	$32,%eax
-	jb	.L046xts_enc_one
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	je	.L047xts_enc_two
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,%xmm6
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	cmpl	$64,%eax
-	jb	.L048xts_enc_three
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,%xmm7
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	movdqa	%xmm5,(%esp)
-	movdqa	%xmm6,16(%esp)
-	je	.L049xts_enc_four
-	movdqa	%xmm7,32(%esp)
-	pshufd	$19,%xmm0,%xmm7
-	movdqa	%xmm1,48(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm7
-	pxor	%xmm1,%xmm7
-	movdqu	(%esi),%xmm2
-	movdqu	16(%esi),%xmm3
-	movdqu	32(%esi),%xmm4
-	pxor	(%esp),%xmm2
-	movdqu	48(%esi),%xmm5
-	pxor	16(%esp),%xmm3
-	movdqu	64(%esi),%xmm6
-	pxor	32(%esp),%xmm4
-	leal	80(%esi),%esi
-	pxor	48(%esp),%xmm5
-	movdqa	%xmm7,64(%esp)
-	pxor	%xmm7,%xmm6
-	call	_aesni_encrypt6
-	movaps	64(%esp),%xmm1
-	xorps	(%esp),%xmm2
-	xorps	16(%esp),%xmm3
-	xorps	32(%esp),%xmm4
-	movups	%xmm2,(%edi)
-	xorps	48(%esp),%xmm5
-	movups	%xmm3,16(%edi)
-	xorps	%xmm1,%xmm6
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	leal	80(%edi),%edi
-	jmp	.L050xts_enc_done
-.align	16
-.L046xts_enc_one:
-	movups	(%esi),%xmm2
-	leal	16(%esi),%esi
-	xorps	%xmm5,%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L051enc1_loop_9:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L051enc1_loop_9
-.byte	102,15,56,221,209
-	xorps	%xmm5,%xmm2
-	movups	%xmm2,(%edi)
-	leal	16(%edi),%edi
-	movdqa	%xmm5,%xmm1
-	jmp	.L050xts_enc_done
-.align	16
-.L047xts_enc_two:
-	movaps	%xmm1,%xmm6
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	leal	32(%esi),%esi
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm4,%xmm4
-	call	_aesni_encrypt3
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	leal	32(%edi),%edi
-	movdqa	%xmm6,%xmm1
-	jmp	.L050xts_enc_done
-.align	16
-.L048xts_enc_three:
-	movaps	%xmm1,%xmm7
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	movups	32(%esi),%xmm4
-	leal	48(%esi),%esi
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm4
-	call	_aesni_encrypt3
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm4
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	leal	48(%edi),%edi
-	movdqa	%xmm7,%xmm1
-	jmp	.L050xts_enc_done
-.align	16
-.L049xts_enc_four:
-	movaps	%xmm1,%xmm6
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	movups	32(%esi),%xmm4
-	xorps	(%esp),%xmm2
-	movups	48(%esi),%xmm5
-	leal	64(%esi),%esi
-	xorps	16(%esp),%xmm3
-	xorps	%xmm7,%xmm4
-	xorps	%xmm6,%xmm5
-	call	_aesni_encrypt4
-	xorps	(%esp),%xmm2
-	xorps	16(%esp),%xmm3
-	xorps	%xmm7,%xmm4
-	movups	%xmm2,(%edi)
-	xorps	%xmm6,%xmm5
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	leal	64(%edi),%edi
-	movdqa	%xmm6,%xmm1
-	jmp	.L050xts_enc_done
-.align	16
-.L045xts_enc_done6x:
-	movl	112(%esp),%eax
-	andl	$15,%eax
-	jz	.L052xts_enc_ret
-	movdqa	%xmm1,%xmm5
-	movl	%eax,112(%esp)
-	jmp	.L053xts_enc_steal
-.align	16
-.L050xts_enc_done:
-	movl	112(%esp),%eax
-	pxor	%xmm0,%xmm0
-	andl	$15,%eax
-	jz	.L052xts_enc_ret
-	pcmpgtd	%xmm1,%xmm0
-	movl	%eax,112(%esp)
-	pshufd	$19,%xmm0,%xmm5
-	paddq	%xmm1,%xmm1
-	pand	96(%esp),%xmm5
-	pxor	%xmm1,%xmm5
-.L053xts_enc_steal:
-	movzbl	(%esi),%ecx
-	movzbl	-16(%edi),%edx
-	leal	1(%esi),%esi
-	movb	%cl,-16(%edi)
-	movb	%dl,(%edi)
-	leal	1(%edi),%edi
-	subl	$1,%eax
-	jnz	.L053xts_enc_steal
-	subl	112(%esp),%edi
-	movl	%ebp,%edx
-	movl	%ebx,%ecx
-	movups	-16(%edi),%xmm2
-	xorps	%xmm5,%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L054enc1_loop_10:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L054enc1_loop_10
-.byte	102,15,56,221,209
-	xorps	%xmm5,%xmm2
-	movups	%xmm2,-16(%edi)
-.L052xts_enc_ret:
-	movl	116(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
-.globl	aesni_xts_decrypt
-.type	aesni_xts_decrypt, at function
-.align	16
-aesni_xts_decrypt:
-.L_aesni_xts_decrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	36(%esp),%edx
-	movl	40(%esp),%esi
-	movl	240(%edx),%ecx
-	movups	(%esi),%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L055enc1_loop_11:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L055enc1_loop_11
-.byte	102,15,56,221,209
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	movl	%esp,%ebp
-	subl	$120,%esp
-	andl	$-16,%esp
-	xorl	%ebx,%ebx
-	testl	$15,%eax
-	setnz	%bl
-	shll	$4,%ebx
-	subl	%ebx,%eax
-	movl	$135,96(%esp)
-	movl	$0,100(%esp)
-	movl	$1,104(%esp)
-	movl	$0,108(%esp)
-	movl	%eax,112(%esp)
-	movl	%ebp,116(%esp)
-	movl	240(%edx),%ecx
-	movl	%edx,%ebp
-	movl	%ecx,%ebx
-	movdqa	%xmm2,%xmm1
-	pxor	%xmm0,%xmm0
-	movdqa	96(%esp),%xmm3
-	pcmpgtd	%xmm1,%xmm0
-	andl	$-16,%eax
-	subl	$96,%eax
-	jc	.L056xts_dec_short
-	shrl	$1,%ecx
-	movl	%ecx,%ebx
-	jmp	.L057xts_dec_loop6
-.align	16
-.L057xts_dec_loop6:
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,16(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,32(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,48(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	pshufd	$19,%xmm0,%xmm7
-	movdqa	%xmm1,64(%esp)
-	paddq	%xmm1,%xmm1
-	movups	(%ebp),%xmm0
-	pand	%xmm3,%xmm7
-	movups	(%esi),%xmm2
-	pxor	%xmm1,%xmm7
-	movdqu	16(%esi),%xmm3
-	xorps	%xmm0,%xmm2
-	movdqu	32(%esi),%xmm4
-	pxor	%xmm0,%xmm3
-	movdqu	48(%esi),%xmm5
-	pxor	%xmm0,%xmm4
-	movdqu	64(%esi),%xmm6
-	pxor	%xmm0,%xmm5
-	movdqu	80(%esi),%xmm1
-	pxor	%xmm0,%xmm6
-	leal	96(%esi),%esi
-	pxor	(%esp),%xmm2
-	movdqa	%xmm7,80(%esp)
-	pxor	%xmm1,%xmm7
-	movups	16(%ebp),%xmm1
-	leal	32(%ebp),%edx
-	pxor	16(%esp),%xmm3
-.byte	102,15,56,222,209
-	pxor	32(%esp),%xmm4
-.byte	102,15,56,222,217
-	pxor	48(%esp),%xmm5
-	decl	%ecx
-.byte	102,15,56,222,225
-	pxor	64(%esp),%xmm6
-.byte	102,15,56,222,233
-	pxor	%xmm0,%xmm7
-.byte	102,15,56,222,241
-	movups	(%edx),%xmm0
-.byte	102,15,56,222,249
-	call	.L_aesni_decrypt6_enter
-	movdqa	80(%esp),%xmm1
-	pxor	%xmm0,%xmm0
-	xorps	(%esp),%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	xorps	16(%esp),%xmm3
-	movups	%xmm2,(%edi)
-	xorps	32(%esp),%xmm4
-	movups	%xmm3,16(%edi)
-	xorps	48(%esp),%xmm5
-	movups	%xmm4,32(%edi)
-	xorps	64(%esp),%xmm6
-	movups	%xmm5,48(%edi)
-	xorps	%xmm1,%xmm7
-	movups	%xmm6,64(%edi)
-	pshufd	$19,%xmm0,%xmm2
-	movups	%xmm7,80(%edi)
-	leal	96(%edi),%edi
-	movdqa	96(%esp),%xmm3
-	pxor	%xmm0,%xmm0
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	movl	%ebx,%ecx
-	pxor	%xmm2,%xmm1
-	subl	$96,%eax
-	jnc	.L057xts_dec_loop6
-	leal	1(,%ecx,2),%ecx
-	movl	%ebp,%edx
-	movl	%ecx,%ebx
-.L056xts_dec_short:
-	addl	$96,%eax
-	jz	.L058xts_dec_done6x
-	movdqa	%xmm1,%xmm5
-	cmpl	$32,%eax
-	jb	.L059xts_dec_one
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	je	.L060xts_dec_two
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,%xmm6
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	cmpl	$64,%eax
-	jb	.L061xts_dec_three
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	%xmm1,%xmm7
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-	movdqa	%xmm5,(%esp)
-	movdqa	%xmm6,16(%esp)
-	je	.L062xts_dec_four
-	movdqa	%xmm7,32(%esp)
-	pshufd	$19,%xmm0,%xmm7
-	movdqa	%xmm1,48(%esp)
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm7
-	pxor	%xmm1,%xmm7
-	movdqu	(%esi),%xmm2
-	movdqu	16(%esi),%xmm3
-	movdqu	32(%esi),%xmm4
-	pxor	(%esp),%xmm2
-	movdqu	48(%esi),%xmm5
-	pxor	16(%esp),%xmm3
-	movdqu	64(%esi),%xmm6
-	pxor	32(%esp),%xmm4
-	leal	80(%esi),%esi
-	pxor	48(%esp),%xmm5
-	movdqa	%xmm7,64(%esp)
-	pxor	%xmm7,%xmm6
-	call	_aesni_decrypt6
-	movaps	64(%esp),%xmm1
-	xorps	(%esp),%xmm2
-	xorps	16(%esp),%xmm3
-	xorps	32(%esp),%xmm4
-	movups	%xmm2,(%edi)
-	xorps	48(%esp),%xmm5
-	movups	%xmm3,16(%edi)
-	xorps	%xmm1,%xmm6
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	movups	%xmm6,64(%edi)
-	leal	80(%edi),%edi
-	jmp	.L063xts_dec_done
-.align	16
-.L059xts_dec_one:
-	movups	(%esi),%xmm2
-	leal	16(%esi),%esi
-	xorps	%xmm5,%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L064dec1_loop_12:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L064dec1_loop_12
-.byte	102,15,56,223,209
-	xorps	%xmm5,%xmm2
-	movups	%xmm2,(%edi)
-	leal	16(%edi),%edi
-	movdqa	%xmm5,%xmm1
-	jmp	.L063xts_dec_done
-.align	16
-.L060xts_dec_two:
-	movaps	%xmm1,%xmm6
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	leal	32(%esi),%esi
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	call	_aesni_decrypt3
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	leal	32(%edi),%edi
-	movdqa	%xmm6,%xmm1
-	jmp	.L063xts_dec_done
-.align	16
-.L061xts_dec_three:
-	movaps	%xmm1,%xmm7
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	movups	32(%esi),%xmm4
-	leal	48(%esi),%esi
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm4
-	call	_aesni_decrypt3
-	xorps	%xmm5,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm4
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	leal	48(%edi),%edi
-	movdqa	%xmm7,%xmm1
-	jmp	.L063xts_dec_done
-.align	16
-.L062xts_dec_four:
-	movaps	%xmm1,%xmm6
-	movups	(%esi),%xmm2
-	movups	16(%esi),%xmm3
-	movups	32(%esi),%xmm4
-	xorps	(%esp),%xmm2
-	movups	48(%esi),%xmm5
-	leal	64(%esi),%esi
-	xorps	16(%esp),%xmm3
-	xorps	%xmm7,%xmm4
-	xorps	%xmm6,%xmm5
-	call	_aesni_decrypt4
-	xorps	(%esp),%xmm2
-	xorps	16(%esp),%xmm3
-	xorps	%xmm7,%xmm4
-	movups	%xmm2,(%edi)
-	xorps	%xmm6,%xmm5
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	leal	64(%edi),%edi
-	movdqa	%xmm6,%xmm1
-	jmp	.L063xts_dec_done
-.align	16
-.L058xts_dec_done6x:
-	movl	112(%esp),%eax
-	andl	$15,%eax
-	jz	.L065xts_dec_ret
-	movl	%eax,112(%esp)
-	jmp	.L066xts_dec_only_one_more
-.align	16
-.L063xts_dec_done:
-	movl	112(%esp),%eax
-	pxor	%xmm0,%xmm0
-	andl	$15,%eax
-	jz	.L065xts_dec_ret
-	pcmpgtd	%xmm1,%xmm0
-	movl	%eax,112(%esp)
-	pshufd	$19,%xmm0,%xmm2
-	pxor	%xmm0,%xmm0
-	movdqa	96(%esp),%xmm3
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm2
-	pcmpgtd	%xmm1,%xmm0
-	pxor	%xmm2,%xmm1
-.L066xts_dec_only_one_more:
-	pshufd	$19,%xmm0,%xmm5
-	movdqa	%xmm1,%xmm6
-	paddq	%xmm1,%xmm1
-	pand	%xmm3,%xmm5
-	pxor	%xmm1,%xmm5
-	movl	%ebp,%edx
-	movl	%ebx,%ecx
-	movups	(%esi),%xmm2
-	xorps	%xmm5,%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L067dec1_loop_13:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L067dec1_loop_13
-.byte	102,15,56,223,209
-	xorps	%xmm5,%xmm2
-	movups	%xmm2,(%edi)
-.L068xts_dec_steal:
-	movzbl	16(%esi),%ecx
-	movzbl	(%edi),%edx
-	leal	1(%esi),%esi
-	movb	%cl,(%edi)
-	movb	%dl,16(%edi)
-	leal	1(%edi),%edi
-	subl	$1,%eax
-	jnz	.L068xts_dec_steal
-	subl	112(%esp),%edi
-	movl	%ebp,%edx
-	movl	%ebx,%ecx
-	movups	(%edi),%xmm2
-	xorps	%xmm6,%xmm2
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L069dec1_loop_14:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L069dec1_loop_14
-.byte	102,15,56,223,209
-	xorps	%xmm6,%xmm2
-	movups	%xmm2,(%edi)
-.L065xts_dec_ret:
-	movl	116(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
-.globl	aesni_cbc_encrypt
-.type	aesni_cbc_encrypt, at function
-.align	16
-aesni_cbc_encrypt:
-.L_aesni_cbc_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	%esp,%ebx
-	movl	24(%esp),%edi
-	subl	$24,%ebx
-	movl	28(%esp),%eax
-	andl	$-16,%ebx
-	movl	32(%esp),%edx
-	movl	36(%esp),%ebp
-	testl	%eax,%eax
-	jz	.L070cbc_abort
-	cmpl	$0,40(%esp)
-	xchgl	%esp,%ebx
-	movups	(%ebp),%xmm7
-	movl	240(%edx),%ecx
-	movl	%edx,%ebp
-	movl	%ebx,16(%esp)
-	movl	%ecx,%ebx
-	je	.L071cbc_decrypt
-	movaps	%xmm7,%xmm2
-	cmpl	$16,%eax
-	jb	.L072cbc_enc_tail
-	subl	$16,%eax
-	jmp	.L073cbc_enc_loop
-.align	16
-.L073cbc_enc_loop:
-	movups	(%esi),%xmm7
-	leal	16(%esi),%esi
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	xorps	%xmm0,%xmm7
-	leal	32(%edx),%edx
-	xorps	%xmm7,%xmm2
-.L074enc1_loop_15:
-.byte	102,15,56,220,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L074enc1_loop_15
-.byte	102,15,56,221,209
-	movl	%ebx,%ecx
-	movl	%ebp,%edx
-	movups	%xmm2,(%edi)
-	leal	16(%edi),%edi
-	subl	$16,%eax
-	jnc	.L073cbc_enc_loop
-	addl	$16,%eax
-	jnz	.L072cbc_enc_tail
-	movaps	%xmm2,%xmm7
-	jmp	.L075cbc_ret
-.L072cbc_enc_tail:
-	movl	%eax,%ecx
-.long	2767451785
-	movl	$16,%ecx
-	subl	%eax,%ecx
-	xorl	%eax,%eax
-.long	2868115081
-	leal	-16(%edi),%edi
-	movl	%ebx,%ecx
-	movl	%edi,%esi
-	movl	%ebp,%edx
-	jmp	.L073cbc_enc_loop
-.align	16
-.L071cbc_decrypt:
-	cmpl	$80,%eax
-	jbe	.L076cbc_dec_tail
-	movaps	%xmm7,(%esp)
-	subl	$80,%eax
-	jmp	.L077cbc_dec_loop6_enter
-.align	16
-.L078cbc_dec_loop6:
-	movaps	%xmm0,(%esp)
-	movups	%xmm7,(%edi)
-	leal	16(%edi),%edi
-.L077cbc_dec_loop6_enter:
-	movdqu	(%esi),%xmm2
-	movdqu	16(%esi),%xmm3
-	movdqu	32(%esi),%xmm4
-	movdqu	48(%esi),%xmm5
-	movdqu	64(%esi),%xmm6
-	movdqu	80(%esi),%xmm7
-	call	_aesni_decrypt6
-	movups	(%esi),%xmm1
-	movups	16(%esi),%xmm0
-	xorps	(%esp),%xmm2
-	xorps	%xmm1,%xmm3
-	movups	32(%esi),%xmm1
-	xorps	%xmm0,%xmm4
-	movups	48(%esi),%xmm0
-	xorps	%xmm1,%xmm5
-	movups	64(%esi),%xmm1
-	xorps	%xmm0,%xmm6
-	movups	80(%esi),%xmm0
-	xorps	%xmm1,%xmm7
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	leal	96(%esi),%esi
-	movups	%xmm4,32(%edi)
-	movl	%ebx,%ecx
-	movups	%xmm5,48(%edi)
-	movl	%ebp,%edx
-	movups	%xmm6,64(%edi)
-	leal	80(%edi),%edi
-	subl	$96,%eax
-	ja	.L078cbc_dec_loop6
-	movaps	%xmm7,%xmm2
-	movaps	%xmm0,%xmm7
-	addl	$80,%eax
-	jle	.L079cbc_dec_tail_collected
-	movups	%xmm2,(%edi)
-	leal	16(%edi),%edi
-.L076cbc_dec_tail:
-	movups	(%esi),%xmm2
-	movaps	%xmm2,%xmm6
-	cmpl	$16,%eax
-	jbe	.L080cbc_dec_one
-	movups	16(%esi),%xmm3
-	movaps	%xmm3,%xmm5
-	cmpl	$32,%eax
-	jbe	.L081cbc_dec_two
-	movups	32(%esi),%xmm4
-	cmpl	$48,%eax
-	jbe	.L082cbc_dec_three
-	movups	48(%esi),%xmm5
-	cmpl	$64,%eax
-	jbe	.L083cbc_dec_four
-	movups	64(%esi),%xmm6
-	movaps	%xmm7,(%esp)
-	movups	(%esi),%xmm2
-	xorps	%xmm7,%xmm7
-	call	_aesni_decrypt6
-	movups	(%esi),%xmm1
-	movups	16(%esi),%xmm0
-	xorps	(%esp),%xmm2
-	xorps	%xmm1,%xmm3
-	movups	32(%esi),%xmm1
-	xorps	%xmm0,%xmm4
-	movups	48(%esi),%xmm0
-	xorps	%xmm1,%xmm5
-	movups	64(%esi),%xmm7
-	xorps	%xmm0,%xmm6
-	movups	%xmm2,(%edi)
-	movups	%xmm3,16(%edi)
-	movups	%xmm4,32(%edi)
-	movups	%xmm5,48(%edi)
-	leal	64(%edi),%edi
-	movaps	%xmm6,%xmm2
-	subl	$80,%eax
-	jmp	.L079cbc_dec_tail_collected
-.align	16
-.L080cbc_dec_one:
-	movups	(%edx),%xmm0
-	movups	16(%edx),%xmm1
-	leal	32(%edx),%edx
-	xorps	%xmm0,%xmm2
-.L084dec1_loop_16:
-.byte	102,15,56,222,209
-	decl	%ecx
-	movups	(%edx),%xmm1
-	leal	16(%edx),%edx
-	jnz	.L084dec1_loop_16
-.byte	102,15,56,223,209
-	xorps	%xmm7,%xmm2
-	movaps	%xmm6,%xmm7
-	subl	$16,%eax
-	jmp	.L079cbc_dec_tail_collected
-.align	16
-.L081cbc_dec_two:
-	xorps	%xmm4,%xmm4
-	call	_aesni_decrypt3
-	xorps	%xmm7,%xmm2
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	movaps	%xmm3,%xmm2
-	leal	16(%edi),%edi
-	movaps	%xmm5,%xmm7
-	subl	$32,%eax
-	jmp	.L079cbc_dec_tail_collected
-.align	16
-.L082cbc_dec_three:
-	call	_aesni_decrypt3
-	xorps	%xmm7,%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm5,%xmm4
-	movups	%xmm2,(%edi)
-	movaps	%xmm4,%xmm2
-	movups	%xmm3,16(%edi)
-	leal	32(%edi),%edi
-	movups	32(%esi),%xmm7
-	subl	$48,%eax
-	jmp	.L079cbc_dec_tail_collected
-.align	16
-.L083cbc_dec_four:
-	call	_aesni_decrypt4
-	movups	16(%esi),%xmm1
-	movups	32(%esi),%xmm0
-	xorps	%xmm7,%xmm2
-	movups	48(%esi),%xmm7
-	xorps	%xmm6,%xmm3
-	movups	%xmm2,(%edi)
-	xorps	%xmm1,%xmm4
-	movups	%xmm3,16(%edi)
-	xorps	%xmm0,%xmm5
-	movups	%xmm4,32(%edi)
-	leal	48(%edi),%edi
-	movaps	%xmm5,%xmm2
-	subl	$64,%eax
-.L079cbc_dec_tail_collected:
-	andl	$15,%eax
-	jnz	.L085cbc_dec_tail_partial
-	movups	%xmm2,(%edi)
-	jmp	.L075cbc_ret
-.align	16
-.L085cbc_dec_tail_partial:
-	movaps	%xmm2,(%esp)
-	movl	$16,%ecx
-	movl	%esp,%esi
-	subl	%eax,%ecx
-.long	2767451785
-.L075cbc_ret:
-	movl	16(%esp),%esp
-	movl	36(%esp),%ebp
-	movups	%xmm7,(%ebp)
-.L070cbc_abort:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
-.type	_aesni_set_encrypt_key, at function
-.align	16
-_aesni_set_encrypt_key:
-	testl	%eax,%eax
-	jz	.L086bad_pointer
-	testl	%edx,%edx
-	jz	.L086bad_pointer
-	movups	(%eax),%xmm0
-	xorps	%xmm4,%xmm4
-	leal	16(%edx),%edx
-	cmpl	$256,%ecx
-	je	.L08714rounds
-	cmpl	$192,%ecx
-	je	.L08812rounds
-	cmpl	$128,%ecx
-	jne	.L089bad_keybits
-.align	16
-.L09010rounds:
-	movl	$9,%ecx
-	movups	%xmm0,-16(%edx)
-.byte	102,15,58,223,200,1
-	call	.L091key_128_cold
-.byte	102,15,58,223,200,2
-	call	.L092key_128
-.byte	102,15,58,223,200,4
-	call	.L092key_128
-.byte	102,15,58,223,200,8
-	call	.L092key_128
-.byte	102,15,58,223,200,16
-	call	.L092key_128
-.byte	102,15,58,223,200,32
-	call	.L092key_128
-.byte	102,15,58,223,200,64
-	call	.L092key_128
-.byte	102,15,58,223,200,128
-	call	.L092key_128
-.byte	102,15,58,223,200,27
-	call	.L092key_128
-.byte	102,15,58,223,200,54
-	call	.L092key_128
-	movups	%xmm0,(%edx)
-	movl	%ecx,80(%edx)
-	xorl	%eax,%eax
-	ret
-.align	16
-.L092key_128:
-	movups	%xmm0,(%edx)
-	leal	16(%edx),%edx
-.L091key_128_cold:
-	shufps	$16,%xmm0,%xmm4
-	xorps	%xmm4,%xmm0
-	shufps	$140,%xmm0,%xmm4
-	xorps	%xmm4,%xmm0
-	shufps	$255,%xmm1,%xmm1
-	xorps	%xmm1,%xmm0
-	ret
-.align	16
-.L08812rounds:
-	movq	16(%eax),%xmm2
-	movl	$11,%ecx
-	movups	%xmm0,-16(%edx)
-.byte	102,15,58,223,202,1
-	call	.L093key_192a_cold
-.byte	102,15,58,223,202,2
-	call	.L094key_192b
-.byte	102,15,58,223,202,4
-	call	.L095key_192a
-.byte	102,15,58,223,202,8
-	call	.L094key_192b
-.byte	102,15,58,223,202,16
-	call	.L095key_192a
-.byte	102,15,58,223,202,32
-	call	.L094key_192b
-.byte	102,15,58,223,202,64
-	call	.L095key_192a
-.byte	102,15,58,223,202,128
-	call	.L094key_192b
-	movups	%xmm0,(%edx)
-	movl	%ecx,48(%edx)
-	xorl	%eax,%eax
-	ret
-.align	16
-.L095key_192a:
-	movups	%xmm0,(%edx)
-	leal	16(%edx),%edx
-.align	16
-.L093key_192a_cold:
-	movaps	%xmm2,%xmm5
-.L096key_192b_warm:
-	shufps	$16,%xmm0,%xmm4
-	movdqa	%xmm2,%xmm3
-	xorps	%xmm4,%xmm0
-	shufps	$140,%xmm0,%xmm4
-	pslldq	$4,%xmm3
-	xorps	%xmm4,%xmm0
-	pshufd	$85,%xmm1,%xmm1
-	pxor	%xmm3,%xmm2
-	pxor	%xmm1,%xmm0
-	pshufd	$255,%xmm0,%xmm3
-	pxor	%xmm3,%xmm2
-	ret
-.align	16
-.L094key_192b:
-	movaps	%xmm0,%xmm3
-	shufps	$68,%xmm0,%xmm5
-	movups	%xmm5,(%edx)
-	shufps	$78,%xmm2,%xmm3
-	movups	%xmm3,16(%edx)
-	leal	32(%edx),%edx
-	jmp	.L096key_192b_warm
-.align	16
-.L08714rounds:
-	movups	16(%eax),%xmm2
-	movl	$13,%ecx
-	leal	16(%edx),%edx
-	movups	%xmm0,-32(%edx)
-	movups	%xmm2,-16(%edx)
-.byte	102,15,58,223,202,1
-	call	.L097key_256a_cold
-.byte	102,15,58,223,200,1
-	call	.L098key_256b
-.byte	102,15,58,223,202,2
-	call	.L099key_256a
-.byte	102,15,58,223,200,2
-	call	.L098key_256b
-.byte	102,15,58,223,202,4
-	call	.L099key_256a
-.byte	102,15,58,223,200,4
-	call	.L098key_256b
-.byte	102,15,58,223,202,8
-	call	.L099key_256a
-.byte	102,15,58,223,200,8
-	call	.L098key_256b
-.byte	102,15,58,223,202,16
-	call	.L099key_256a
-.byte	102,15,58,223,200,16
-	call	.L098key_256b
-.byte	102,15,58,223,202,32
-	call	.L099key_256a
-.byte	102,15,58,223,200,32
-	call	.L098key_256b
-.byte	102,15,58,223,202,64
-	call	.L099key_256a
-	movups	%xmm0,(%edx)
-	movl	%ecx,16(%edx)
-	xorl	%eax,%eax
-	ret
-.align	16
-.L099key_256a:
-	movups	%xmm2,(%edx)
-	leal	16(%edx),%edx
-.L097key_256a_cold:
-	shufps	$16,%xmm0,%xmm4
-	xorps	%xmm4,%xmm0
-	shufps	$140,%xmm0,%xmm4
-	xorps	%xmm4,%xmm0
-	shufps	$255,%xmm1,%xmm1
-	xorps	%xmm1,%xmm0
-	ret
-.align	16
-.L098key_256b:
-	movups	%xmm0,(%edx)
-	leal	16(%edx),%edx
-	shufps	$16,%xmm2,%xmm4
-	xorps	%xmm4,%xmm2
-	shufps	$140,%xmm2,%xmm4
-	xorps	%xmm4,%xmm2
-	shufps	$170,%xmm1,%xmm1
-	xorps	%xmm1,%xmm2
-	ret
-.align	4
-.L086bad_pointer:
-	movl	$-1,%eax
-	ret
-.align	4
-.L089bad_keybits:
-	movl	$-2,%eax
-	ret
-.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
-.globl	aesni_set_encrypt_key
-.type	aesni_set_encrypt_key, at function
-.align	16
-aesni_set_encrypt_key:
-.L_aesni_set_encrypt_key_begin:
-	movl	4(%esp),%eax
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	call	_aesni_set_encrypt_key
-	ret
-.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
-.globl	aesni_set_decrypt_key
-.type	aesni_set_decrypt_key, at function
-.align	16
-aesni_set_decrypt_key:
-.L_aesni_set_decrypt_key_begin:
-	movl	4(%esp),%eax
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	call	_aesni_set_encrypt_key
-	movl	12(%esp),%edx
-	shll	$4,%ecx
-	testl	%eax,%eax
-	jnz	.L100dec_key_ret
-	leal	16(%edx,%ecx,1),%eax
-	movups	(%edx),%xmm0
-	movups	(%eax),%xmm1
-	movups	%xmm0,(%eax)
-	movups	%xmm1,(%edx)
-	leal	16(%edx),%edx
-	leal	-16(%eax),%eax
-.L101dec_key_inverse:
-	movups	(%edx),%xmm0
-	movups	(%eax),%xmm1
-.byte	102,15,56,219,192
-.byte	102,15,56,219,201
-	leal	16(%edx),%edx
-	leal	-16(%eax),%eax
-	movups	%xmm0,16(%eax)
-	movups	%xmm1,-16(%edx)
-	cmpl	%edx,%eax
-	ja	.L101dec_key_inverse
-	movups	(%edx),%xmm0
-.byte	102,15,56,219,192
-	movups	%xmm0,(%edx)
-	xorl	%eax,%eax
-.L100dec_key_ret:
-	ret
-.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
-.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
-.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
-.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
-.byte	115,108,46,111,114,103,62,0

Added: trunk/secure/lib/libcrypto/i386/bf-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/bf-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/bf-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1798 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bf-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from bf-586.pl.
+#ifdef PIC
+.file	"bf-586.S"
+.text
+.globl	BF_encrypt
+.type	BF_encrypt, at function
+.align	16
+BF_encrypt:
+.L_BF_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	xorl	%eax,%eax
+	movl	(%ebp),%ebx
+	xorl	%ecx,%ecx
+	xorl	%ebx,%edi
+
+
+	movl	4(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	8(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	12(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	16(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	20(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	24(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	28(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	32(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	36(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	40(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	44(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	48(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	52(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	56(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	60(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	64(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+
+	movl	20(%esp),%eax
+	xorl	%ebx,%edi
+	movl	68(%ebp),%edx
+	xorl	%edx,%esi
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_encrypt,.-.L_BF_encrypt_begin
+.globl	BF_decrypt
+.type	BF_decrypt, at function
+.align	16
+BF_decrypt:
+.L_BF_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	xorl	%eax,%eax
+	movl	68(%ebp),%ebx
+	xorl	%ecx,%ecx
+	xorl	%ebx,%edi
+
+
+	movl	64(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	60(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	56(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	52(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	48(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	44(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	40(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	36(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	32(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	28(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	24(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	20(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	16(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	12(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	8(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	4(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+
+	movl	20(%esp),%eax
+	xorl	%ebx,%edi
+	movl	(%ebp),%edx
+	xorl	%edx,%esi
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_decrypt,.-.L_BF_decrypt_begin
+.globl	BF_cbc_encrypt
+.type	BF_cbc_encrypt, at function
+.align	16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L000decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L001encrypt_finish
+.L002encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L002encrypt_loop
+.L001encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	call	.L004PIC_point
+.L004PIC_point:
+	popl	%edx
+	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L006ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L007ej6:
+	movb	5(%esi),%dh
+.L008ej5:
+	movb	4(%esi),%dl
+.L009ej4:
+	movl	(%esi),%ecx
+	jmp	.L010ejend
+.L011ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L012ej2:
+	movb	1(%esi),%ch
+.L013ej1:
+	movb	(%esi),%cl
+.L010ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L003finish
+.L000decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L014decrypt_finish
+.L015decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L015decrypt_loop
+.L014decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L016dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L017dj6:
+	movb	%dh,5(%edi)
+.L018dj5:
+	movb	%dl,4(%edi)
+.L019dj4:
+	movl	%ecx,(%edi)
+	jmp	.L020djend
+.L021dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L022dj2:
+	movb	%ch,1(%esi)
+.L023dj1:
+	movb	%cl,(%esi)
+.L020djend:
+	jmp	.L003finish
+.L003finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L005cbc_enc_jmp_table:
+.long	0
+.long	.L013ej1-.L004PIC_point
+.long	.L012ej2-.L004PIC_point
+.long	.L011ej3-.L004PIC_point
+.long	.L009ej4-.L004PIC_point
+.long	.L008ej5-.L004PIC_point
+.long	.L007ej6-.L004PIC_point
+.long	.L006ej7-.L004PIC_point
+.align	64
+.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
+#else
+.file	"bf-586.S"
+.text
+.globl	BF_encrypt
+.type	BF_encrypt, at function
+.align	16
+BF_encrypt:
+.L_BF_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	xorl	%eax,%eax
+	movl	(%ebp),%ebx
+	xorl	%ecx,%ecx
+	xorl	%ebx,%edi
+
+
+	movl	4(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	8(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	12(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	16(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	20(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	24(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	28(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	32(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	36(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	40(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	44(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	48(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	52(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	56(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	60(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	64(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+
+	movl	20(%esp),%eax
+	xorl	%ebx,%edi
+	movl	68(%ebp),%edx
+	xorl	%edx,%esi
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_encrypt,.-.L_BF_encrypt_begin
+.globl	BF_decrypt
+.type	BF_decrypt, at function
+.align	16
+BF_decrypt:
+.L_BF_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	xorl	%eax,%eax
+	movl	68(%ebp),%ebx
+	xorl	%ecx,%ecx
+	xorl	%ebx,%edi
+
+
+	movl	64(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	60(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	56(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	52(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	48(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	44(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	40(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	36(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	32(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	28(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	24(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	20(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	16(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	12(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%edi
+
+
+	movl	8(%ebp),%edx
+	movl	%edi,%ebx
+	xorl	%edx,%esi
+	shrl	$16,%ebx
+	movl	%edi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+	xorl	%eax,%eax
+	xorl	%ebx,%esi
+
+
+	movl	4(%ebp),%edx
+	movl	%esi,%ebx
+	xorl	%edx,%edi
+	shrl	$16,%ebx
+	movl	%esi,%edx
+	movb	%bh,%al
+	andl	$255,%ebx
+	movb	%dh,%cl
+	andl	$255,%edx
+	movl	72(%ebp,%eax,4),%eax
+	movl	1096(%ebp,%ebx,4),%ebx
+	addl	%eax,%ebx
+	movl	2120(%ebp,%ecx,4),%eax
+	xorl	%eax,%ebx
+	movl	3144(%ebp,%edx,4),%edx
+	addl	%edx,%ebx
+
+	movl	20(%esp),%eax
+	xorl	%ebx,%edi
+	movl	(%ebp),%edx
+	xorl	%edx,%esi
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_decrypt,.-.L_BF_decrypt_begin
+.globl	BF_cbc_encrypt
+.type	BF_cbc_encrypt, at function
+.align	16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L000decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L001encrypt_finish
+.L002encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L002encrypt_loop
+.L001encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	call	.L004PIC_point
+.L004PIC_point:
+	popl	%edx
+	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L006ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L007ej6:
+	movb	5(%esi),%dh
+.L008ej5:
+	movb	4(%esi),%dl
+.L009ej4:
+	movl	(%esi),%ecx
+	jmp	.L010ejend
+.L011ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L012ej2:
+	movb	1(%esi),%ch
+.L013ej1:
+	movb	(%esi),%cl
+.L010ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L003finish
+.L000decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L014decrypt_finish
+.L015decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L015decrypt_loop
+.L014decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L016dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L017dj6:
+	movb	%dh,5(%edi)
+.L018dj5:
+	movb	%dl,4(%edi)
+.L019dj4:
+	movl	%ecx,(%edi)
+	jmp	.L020djend
+.L021dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L022dj2:
+	movb	%ch,1(%esi)
+.L023dj1:
+	movb	%cl,(%esi)
+.L020djend:
+	jmp	.L003finish
+.L003finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L005cbc_enc_jmp_table:
+.long	0
+.long	.L013ej1-.L004PIC_point
+.long	.L012ej2-.L004PIC_point
+.long	.L011ej3-.L004PIC_point
+.long	.L009ej4-.L004PIC_point
+.long	.L008ej5-.L004PIC_point
+.long	.L007ej6-.L004PIC_point
+.long	.L006ej7-.L004PIC_point
+.align	64
+.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/bf-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/bf-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/bf-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/bf-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,897 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bf-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"bf-586.s"
-.text
-.globl	BF_encrypt
-.type	BF_encrypt, at function
-.align	16
-BF_encrypt:
-.L_BF_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ebp
-	pushl	%esi
-	pushl	%edi
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-	xorl	%eax,%eax
-	movl	(%ebp),%ebx
-	xorl	%ecx,%ecx
-	xorl	%ebx,%edi
-
-
-	movl	4(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	8(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	12(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	16(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	20(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	24(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	28(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	32(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	36(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	40(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	44(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	48(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	52(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	56(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	60(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	64(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-
-	movl	20(%esp),%eax
-	xorl	%ebx,%edi
-	movl	68(%ebp),%edx
-	xorl	%edx,%esi
-	movl	%edi,4(%eax)
-	movl	%esi,(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	BF_encrypt,.-.L_BF_encrypt_begin
-.globl	BF_decrypt
-.type	BF_decrypt, at function
-.align	16
-BF_decrypt:
-.L_BF_decrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ebp
-	pushl	%esi
-	pushl	%edi
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-	xorl	%eax,%eax
-	movl	68(%ebp),%ebx
-	xorl	%ecx,%ecx
-	xorl	%ebx,%edi
-
-
-	movl	64(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	60(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	56(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	52(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	48(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	44(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	40(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	36(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	32(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	28(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	24(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	20(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	16(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	12(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%edi
-
-
-	movl	8(%ebp),%edx
-	movl	%edi,%ebx
-	xorl	%edx,%esi
-	shrl	$16,%ebx
-	movl	%edi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-	xorl	%eax,%eax
-	xorl	%ebx,%esi
-
-
-	movl	4(%ebp),%edx
-	movl	%esi,%ebx
-	xorl	%edx,%edi
-	shrl	$16,%ebx
-	movl	%esi,%edx
-	movb	%bh,%al
-	andl	$255,%ebx
-	movb	%dh,%cl
-	andl	$255,%edx
-	movl	72(%ebp,%eax,4),%eax
-	movl	1096(%ebp,%ebx,4),%ebx
-	addl	%eax,%ebx
-	movl	2120(%ebp,%ecx,4),%eax
-	xorl	%eax,%ebx
-	movl	3144(%ebp,%edx,4),%edx
-	addl	%edx,%ebx
-
-	movl	20(%esp),%eax
-	xorl	%ebx,%edi
-	movl	(%ebp),%edx
-	xorl	%edx,%esi
-	movl	%edi,4(%eax)
-	movl	%esi,(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	BF_decrypt,.-.L_BF_decrypt_begin
-.globl	BF_cbc_encrypt
-.type	BF_cbc_encrypt, at function
-.align	16
-BF_cbc_encrypt:
-.L_BF_cbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	36(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	56(%esp),%ecx
-
-	movl	48(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L000decrypt
-	andl	$4294967288,%ebp
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	jz	.L001encrypt_finish
-.L002encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L002encrypt_loop
-.L001encrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L003finish
-	call	.L004PIC_point
-.L004PIC_point:
-	popl	%edx
-	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L006ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L007ej6:
-	movb	5(%esi),%dh
-.L008ej5:
-	movb	4(%esi),%dl
-.L009ej4:
-	movl	(%esi),%ecx
-	jmp	.L010ejend
-.L011ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L012ej2:
-	movb	1(%esi),%ch
-.L013ej1:
-	movb	(%esi),%cl
-.L010ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L003finish
-.L000decrypt:
-	andl	$4294967288,%ebp
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	jz	.L014decrypt_finish
-.L015decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L015decrypt_loop
-.L014decrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L003finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L016dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L017dj6:
-	movb	%dh,5(%edi)
-.L018dj5:
-	movb	%dl,4(%edi)
-.L019dj4:
-	movl	%ecx,(%edi)
-	jmp	.L020djend
-.L021dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L022dj2:
-	movb	%ch,1(%esi)
-.L023dj1:
-	movb	%cl,(%esi)
-.L020djend:
-	jmp	.L003finish
-.L003finish:
-	movl	60(%esp),%ecx
-	addl	$24,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L005cbc_enc_jmp_table:
-.long	0
-.long	.L013ej1-.L004PIC_point
-.long	.L012ej2-.L004PIC_point
-.long	.L011ej3-.L004PIC_point
-.long	.L009ej4-.L004PIC_point
-.long	.L008ej5-.L004PIC_point
-.long	.L007ej6-.L004PIC_point
-.long	.L006ej7-.L004PIC_point
-.align	64
-.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin

Added: trunk/secure/lib/libcrypto/i386/bf-686.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/bf-686.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/bf-686.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1734 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bf-686.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from bf-686.pl.
+#ifdef PIC
+.file	"bf-686.S"
+.text
+.globl	BF_encrypt
+.type	BF_encrypt, at function
+.align	16
+BF_encrypt:
+.L_BF_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	20(%esp),%eax
+	movl	(%eax),%ecx
+	movl	4(%eax),%edx
+
+
+	movl	24(%esp),%edi
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	xorl	(%edi),%ecx
+
+
+	rorl	$16,%ecx
+	movl	4(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	8(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	12(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	16(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	20(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	24(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	28(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	32(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	36(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	40(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	44(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	48(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	52(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	56(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	60(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	64(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+	xorl	68(%edi),%edx
+	movl	20(%esp),%eax
+	movl	%edx,(%eax)
+	movl	%ecx,4(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_encrypt,.-.L_BF_encrypt_begin
+.globl	BF_decrypt
+.type	BF_decrypt, at function
+.align	16
+BF_decrypt:
+.L_BF_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	20(%esp),%eax
+	movl	(%eax),%ecx
+	movl	4(%eax),%edx
+
+
+	movl	24(%esp),%edi
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	xorl	68(%edi),%ecx
+
+
+	rorl	$16,%ecx
+	movl	64(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	60(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	56(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	52(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	48(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	44(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	40(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	36(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	32(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	28(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	24(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	20(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	16(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	12(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	8(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	4(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+	xorl	(%edi),%edx
+	movl	20(%esp),%eax
+	movl	%edx,(%eax)
+	movl	%ecx,4(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_decrypt,.-.L_BF_decrypt_begin
+.globl	BF_cbc_encrypt
+.type	BF_cbc_encrypt, at function
+.align	16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L000decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L001encrypt_finish
+.L002encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L002encrypt_loop
+.L001encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	call	.L004PIC_point
+.L004PIC_point:
+	popl	%edx
+	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L006ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L007ej6:
+	movb	5(%esi),%dh
+.L008ej5:
+	movb	4(%esi),%dl
+.L009ej4:
+	movl	(%esi),%ecx
+	jmp	.L010ejend
+.L011ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L012ej2:
+	movb	1(%esi),%ch
+.L013ej1:
+	movb	(%esi),%cl
+.L010ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L003finish
+.L000decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L014decrypt_finish
+.L015decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L015decrypt_loop
+.L014decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L016dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L017dj6:
+	movb	%dh,5(%edi)
+.L018dj5:
+	movb	%dl,4(%edi)
+.L019dj4:
+	movl	%ecx,(%edi)
+	jmp	.L020djend
+.L021dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L022dj2:
+	movb	%ch,1(%esi)
+.L023dj1:
+	movb	%cl,(%esi)
+.L020djend:
+	jmp	.L003finish
+.L003finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L005cbc_enc_jmp_table:
+.long	0
+.long	.L013ej1-.L004PIC_point
+.long	.L012ej2-.L004PIC_point
+.long	.L011ej3-.L004PIC_point
+.long	.L009ej4-.L004PIC_point
+.long	.L008ej5-.L004PIC_point
+.long	.L007ej6-.L004PIC_point
+.long	.L006ej7-.L004PIC_point
+.align	64
+.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
+#else
+.file	"bf-686.S"
+.text
+.globl	BF_encrypt
+.type	BF_encrypt, at function
+.align	16
+BF_encrypt:
+.L_BF_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	20(%esp),%eax
+	movl	(%eax),%ecx
+	movl	4(%eax),%edx
+
+
+	movl	24(%esp),%edi
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	xorl	(%edi),%ecx
+
+
+	rorl	$16,%ecx
+	movl	4(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	8(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	12(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	16(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	20(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	24(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	28(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	32(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	36(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	40(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	44(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	48(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	52(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	56(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	60(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	64(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+	xorl	68(%edi),%edx
+	movl	20(%esp),%eax
+	movl	%edx,(%eax)
+	movl	%ecx,4(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_encrypt,.-.L_BF_encrypt_begin
+.globl	BF_decrypt
+.type	BF_decrypt, at function
+.align	16
+BF_decrypt:
+.L_BF_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	20(%esp),%eax
+	movl	(%eax),%ecx
+	movl	4(%eax),%edx
+
+
+	movl	24(%esp),%edi
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	xorl	68(%edi),%ecx
+
+
+	rorl	$16,%ecx
+	movl	64(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	60(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	56(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	52(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	48(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	44(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	40(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	36(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	32(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	28(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	24(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	20(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	16(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	12(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+
+
+	rorl	$16,%ecx
+	movl	8(%edi),%esi
+	movb	%ch,%al
+	movb	%cl,%bl
+	rorl	$16,%ecx
+	xorl	%esi,%edx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%ch,%al
+	movb	%cl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%edx
+
+
+	rorl	$16,%edx
+	movl	4(%edi),%esi
+	movb	%dh,%al
+	movb	%dl,%bl
+	rorl	$16,%edx
+	xorl	%esi,%ecx
+	movl	72(%edi,%eax,4),%esi
+	movl	1096(%edi,%ebx,4),%ebp
+	movb	%dh,%al
+	movb	%dl,%bl
+	addl	%ebp,%esi
+	movl	2120(%edi,%eax,4),%eax
+	xorl	%eax,%esi
+	movl	3144(%edi,%ebx,4),%ebp
+	addl	%ebp,%esi
+	xorl	%eax,%eax
+	xorl	%esi,%ecx
+	xorl	(%edi),%edx
+	movl	20(%esp),%eax
+	movl	%edx,(%eax)
+	movl	%ecx,4(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	BF_decrypt,.-.L_BF_decrypt_begin
+.globl	BF_cbc_encrypt
+.type	BF_cbc_encrypt, at function
+.align	16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L000decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L001encrypt_finish
+.L002encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L002encrypt_loop
+.L001encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	call	.L004PIC_point
+.L004PIC_point:
+	popl	%edx
+	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L006ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L007ej6:
+	movb	5(%esi),%dh
+.L008ej5:
+	movb	4(%esi),%dl
+.L009ej4:
+	movl	(%esi),%ecx
+	jmp	.L010ejend
+.L011ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L012ej2:
+	movb	1(%esi),%ch
+.L013ej1:
+	movb	(%esi),%cl
+.L010ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L003finish
+.L000decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L014decrypt_finish
+.L015decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L015decrypt_loop
+.L014decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L003finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_BF_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L016dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L017dj6:
+	movb	%dh,5(%edi)
+.L018dj5:
+	movb	%dl,4(%edi)
+.L019dj4:
+	movl	%ecx,(%edi)
+	jmp	.L020djend
+.L021dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L022dj2:
+	movb	%ch,1(%esi)
+.L023dj1:
+	movb	%cl,(%esi)
+.L020djend:
+	jmp	.L003finish
+.L003finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L005cbc_enc_jmp_table:
+.long	0
+.long	.L013ej1-.L004PIC_point
+.long	.L012ej2-.L004PIC_point
+.long	.L011ej3-.L004PIC_point
+.long	.L009ej4-.L004PIC_point
+.long	.L008ej5-.L004PIC_point
+.long	.L007ej6-.L004PIC_point
+.long	.L006ej7-.L004PIC_point
+.align	64
+.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/bf-686.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/bf-686.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/bf-686.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/bf-686.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,865 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bf-686.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"bf-686.s"
-.text
-.globl	BF_encrypt
-.type	BF_encrypt, at function
-.align	16
-BF_encrypt:
-.L_BF_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	20(%esp),%eax
-	movl	(%eax),%ecx
-	movl	4(%eax),%edx
-
-
-	movl	24(%esp),%edi
-	xorl	%eax,%eax
-	xorl	%ebx,%ebx
-	xorl	(%edi),%ecx
-
-
-	rorl	$16,%ecx
-	movl	4(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	8(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	12(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	16(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	20(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	24(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	28(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	32(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	36(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	40(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	44(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	48(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	52(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	56(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	60(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	64(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-	xorl	68(%edi),%edx
-	movl	20(%esp),%eax
-	movl	%edx,(%eax)
-	movl	%ecx,4(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	BF_encrypt,.-.L_BF_encrypt_begin
-.globl	BF_decrypt
-.type	BF_decrypt, at function
-.align	16
-BF_decrypt:
-.L_BF_decrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	20(%esp),%eax
-	movl	(%eax),%ecx
-	movl	4(%eax),%edx
-
-
-	movl	24(%esp),%edi
-	xorl	%eax,%eax
-	xorl	%ebx,%ebx
-	xorl	68(%edi),%ecx
-
-
-	rorl	$16,%ecx
-	movl	64(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	60(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	56(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	52(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	48(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	44(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	40(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	36(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	32(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	28(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	24(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	20(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	16(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	12(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-
-
-	rorl	$16,%ecx
-	movl	8(%edi),%esi
-	movb	%ch,%al
-	movb	%cl,%bl
-	rorl	$16,%ecx
-	xorl	%esi,%edx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%ch,%al
-	movb	%cl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%edx
-
-
-	rorl	$16,%edx
-	movl	4(%edi),%esi
-	movb	%dh,%al
-	movb	%dl,%bl
-	rorl	$16,%edx
-	xorl	%esi,%ecx
-	movl	72(%edi,%eax,4),%esi
-	movl	1096(%edi,%ebx,4),%ebp
-	movb	%dh,%al
-	movb	%dl,%bl
-	addl	%ebp,%esi
-	movl	2120(%edi,%eax,4),%eax
-	xorl	%eax,%esi
-	movl	3144(%edi,%ebx,4),%ebp
-	addl	%ebp,%esi
-	xorl	%eax,%eax
-	xorl	%esi,%ecx
-	xorl	(%edi),%edx
-	movl	20(%esp),%eax
-	movl	%edx,(%eax)
-	movl	%ecx,4(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	BF_decrypt,.-.L_BF_decrypt_begin
-.globl	BF_cbc_encrypt
-.type	BF_cbc_encrypt, at function
-.align	16
-BF_cbc_encrypt:
-.L_BF_cbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	36(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	56(%esp),%ecx
-
-	movl	48(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L000decrypt
-	andl	$4294967288,%ebp
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	jz	.L001encrypt_finish
-.L002encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L002encrypt_loop
-.L001encrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L003finish
-	call	.L004PIC_point
-.L004PIC_point:
-	popl	%edx
-	leal	.L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L006ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L007ej6:
-	movb	5(%esi),%dh
-.L008ej5:
-	movb	4(%esi),%dl
-.L009ej4:
-	movl	(%esi),%ecx
-	jmp	.L010ejend
-.L011ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L012ej2:
-	movb	1(%esi),%ch
-.L013ej1:
-	movb	(%esi),%cl
-.L010ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L003finish
-.L000decrypt:
-	andl	$4294967288,%ebp
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	jz	.L014decrypt_finish
-.L015decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L015decrypt_loop
-.L014decrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L003finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_BF_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L016dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L017dj6:
-	movb	%dh,5(%edi)
-.L018dj5:
-	movb	%dl,4(%edi)
-.L019dj4:
-	movl	%ecx,(%edi)
-	jmp	.L020djend
-.L021dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L022dj2:
-	movb	%ch,1(%esi)
-.L023dj1:
-	movb	%cl,(%esi)
-.L020djend:
-	jmp	.L003finish
-.L003finish:
-	movl	60(%esp),%ecx
-	addl	$24,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L005cbc_enc_jmp_table:
-.long	0
-.long	.L013ej1-.L004PIC_point
-.long	.L012ej2-.L004PIC_point
-.long	.L011ej3-.L004PIC_point
-.long	.L009ej4-.L004PIC_point
-.long	.L008ej5-.L004PIC_point
-.long	.L007ej6-.L004PIC_point
-.long	.L006ej7-.L004PIC_point
-.align	64
-.size	BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin

Added: trunk/secure/lib/libcrypto/i386/bn-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/bn-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/bn-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,3060 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bn-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from bn-586.pl.
+#ifdef PIC
+.file	"bn-586.S"
+.text
+.globl	bn_mul_add_words
+.type	bn_mul_add_words, at function
+.align	16
+bn_mul_add_words:
+.L_bn_mul_add_words_begin:
+	call	.L000PIC_me_up
+.L000PIC_me_up:
+	popl	%eax
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L000PIC_me_up](%eax),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	btl	$26,(%eax)
+	jnc	.L001maw_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+	movd	16(%esp),%mm0
+	pxor	%mm1,%mm1
+	jmp	.L002maw_sse2_entry
+.align	16
+.L003maw_sse2_unrolled:
+	movd	(%eax),%mm3
+	paddq	%mm3,%mm1
+	movd	(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	movd	4(%edx),%mm4
+	pmuludq	%mm0,%mm4
+	movd	8(%edx),%mm6
+	pmuludq	%mm0,%mm6
+	movd	12(%edx),%mm7
+	pmuludq	%mm0,%mm7
+	paddq	%mm2,%mm1
+	movd	4(%eax),%mm3
+	paddq	%mm4,%mm3
+	movd	8(%eax),%mm5
+	paddq	%mm6,%mm5
+	movd	12(%eax),%mm4
+	paddq	%mm4,%mm7
+	movd	%mm1,(%eax)
+	movd	16(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	psrlq	$32,%mm1
+	movd	20(%edx),%mm4
+	pmuludq	%mm0,%mm4
+	paddq	%mm3,%mm1
+	movd	24(%edx),%mm6
+	pmuludq	%mm0,%mm6
+	movd	%mm1,4(%eax)
+	psrlq	$32,%mm1
+	movd	28(%edx),%mm3
+	addl	$32,%edx
+	pmuludq	%mm0,%mm3
+	paddq	%mm5,%mm1
+	movd	16(%eax),%mm5
+	paddq	%mm5,%mm2
+	movd	%mm1,8(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm7,%mm1
+	movd	20(%eax),%mm5
+	paddq	%mm5,%mm4
+	movd	%mm1,12(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm2,%mm1
+	movd	24(%eax),%mm5
+	paddq	%mm5,%mm6
+	movd	%mm1,16(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm4,%mm1
+	movd	28(%eax),%mm5
+	paddq	%mm5,%mm3
+	movd	%mm1,20(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm6,%mm1
+	movd	%mm1,24(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm3,%mm1
+	movd	%mm1,28(%eax)
+	leal	32(%eax),%eax
+	psrlq	$32,%mm1
+	subl	$8,%ecx
+	jz	.L004maw_sse2_exit
+.L002maw_sse2_entry:
+	testl	$4294967288,%ecx
+	jnz	.L003maw_sse2_unrolled
+.align	4
+.L005maw_sse2_loop:
+	movd	(%edx),%mm2
+	movd	(%eax),%mm3
+	pmuludq	%mm0,%mm2
+	leal	4(%edx),%edx
+	paddq	%mm3,%mm1
+	paddq	%mm2,%mm1
+	movd	%mm1,(%eax)
+	subl	$1,%ecx
+	psrlq	$32,%mm1
+	leal	4(%eax),%eax
+	jnz	.L005maw_sse2_loop
+.L004maw_sse2_exit:
+	movd	%mm1,%eax
+	emms
+	ret
+.align	16
+.L001maw_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	xorl	%esi,%esi
+	movl	20(%esp),%edi
+	movl	28(%esp),%ecx
+	movl	24(%esp),%ebx
+	andl	$4294967288,%ecx
+	movl	32(%esp),%ebp
+	pushl	%ecx
+	jz	.L006maw_finish
+.align	16
+.L007maw_loop:
+
+	movl	(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+
+	movl	4(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	4(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+
+	movl	8(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	8(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+
+	movl	12(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	12(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+
+	movl	16(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	16(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+
+	movl	20(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	20(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+
+	movl	24(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	24(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+
+	movl	28(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	28(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,28(%edi)
+	movl	%edx,%esi
+
+	subl	$8,%ecx
+	leal	32(%ebx),%ebx
+	leal	32(%edi),%edi
+	jnz	.L007maw_loop
+.L006maw_finish:
+	movl	32(%esp),%ecx
+	andl	$7,%ecx
+	jnz	.L008maw_finish2
+	jmp	.L009maw_end
+.L008maw_finish2:
+
+	movl	(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	4(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	4(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	8(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	8(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	12(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	12(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	16(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	16(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	20(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	20(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+	jz	.L009maw_end
+
+	movl	24(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	24(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+.L009maw_end:
+	movl	%esi,%eax
+	popl	%ecx
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_add_words,.-.L_bn_mul_add_words_begin
+.globl	bn_mul_words
+.type	bn_mul_words, at function
+.align	16
+bn_mul_words:
+.L_bn_mul_words_begin:
+	call	.L010PIC_me_up
+.L010PIC_me_up:
+	popl	%eax
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L010PIC_me_up](%eax),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	btl	$26,(%eax)
+	jnc	.L011mw_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+	movd	16(%esp),%mm0
+	pxor	%mm1,%mm1
+.align	16
+.L012mw_sse2_loop:
+	movd	(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	leal	4(%edx),%edx
+	paddq	%mm2,%mm1
+	movd	%mm1,(%eax)
+	subl	$1,%ecx
+	psrlq	$32,%mm1
+	leal	4(%eax),%eax
+	jnz	.L012mw_sse2_loop
+	movd	%mm1,%eax
+	emms
+	ret
+.align	16
+.L011mw_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	xorl	%esi,%esi
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ebp
+	movl	32(%esp),%ecx
+	andl	$4294967288,%ebp
+	jz	.L013mw_finish
+.L014mw_loop:
+
+	movl	(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+
+	movl	4(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+
+	movl	8(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+
+	movl	12(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+
+	movl	16(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+
+	movl	20(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+
+	movl	24(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+
+	movl	28(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,28(%edi)
+	movl	%edx,%esi
+
+	addl	$32,%ebx
+	addl	$32,%edi
+	subl	$8,%ebp
+	jz	.L013mw_finish
+	jmp	.L014mw_loop
+.L013mw_finish:
+	movl	28(%esp),%ebp
+	andl	$7,%ebp
+	jnz	.L015mw_finish2
+	jmp	.L016mw_end
+.L015mw_finish2:
+
+	movl	(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	4(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	8(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	12(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	16(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	20(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L016mw_end
+
+	movl	24(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+.L016mw_end:
+	movl	%esi,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_words,.-.L_bn_mul_words_begin
+.globl	bn_sqr_words
+.type	bn_sqr_words, at function
+.align	16
+bn_sqr_words:
+.L_bn_sqr_words_begin:
+	call	.L017PIC_me_up
+.L017PIC_me_up:
+	popl	%eax
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L017PIC_me_up](%eax),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	btl	$26,(%eax)
+	jnc	.L018sqr_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+.align	16
+.L019sqr_sse2_loop:
+	movd	(%edx),%mm0
+	pmuludq	%mm0,%mm0
+	leal	4(%edx),%edx
+	movq	%mm0,(%eax)
+	subl	$1,%ecx
+	leal	8(%eax),%eax
+	jnz	.L019sqr_sse2_loop
+	emms
+	ret
+.align	16
+.L018sqr_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%ebx
+	andl	$4294967288,%ebx
+	jz	.L020sw_finish
+.L021sw_loop:
+
+	movl	(%edi),%eax
+	mull	%eax
+	movl	%eax,(%esi)
+	movl	%edx,4(%esi)
+
+	movl	4(%edi),%eax
+	mull	%eax
+	movl	%eax,8(%esi)
+	movl	%edx,12(%esi)
+
+	movl	8(%edi),%eax
+	mull	%eax
+	movl	%eax,16(%esi)
+	movl	%edx,20(%esi)
+
+	movl	12(%edi),%eax
+	mull	%eax
+	movl	%eax,24(%esi)
+	movl	%edx,28(%esi)
+
+	movl	16(%edi),%eax
+	mull	%eax
+	movl	%eax,32(%esi)
+	movl	%edx,36(%esi)
+
+	movl	20(%edi),%eax
+	mull	%eax
+	movl	%eax,40(%esi)
+	movl	%edx,44(%esi)
+
+	movl	24(%edi),%eax
+	mull	%eax
+	movl	%eax,48(%esi)
+	movl	%edx,52(%esi)
+
+	movl	28(%edi),%eax
+	mull	%eax
+	movl	%eax,56(%esi)
+	movl	%edx,60(%esi)
+
+	addl	$32,%edi
+	addl	$64,%esi
+	subl	$8,%ebx
+	jnz	.L021sw_loop
+.L020sw_finish:
+	movl	28(%esp),%ebx
+	andl	$7,%ebx
+	jz	.L022sw_end
+
+	movl	(%edi),%eax
+	mull	%eax
+	movl	%eax,(%esi)
+	decl	%ebx
+	movl	%edx,4(%esi)
+	jz	.L022sw_end
+
+	movl	4(%edi),%eax
+	mull	%eax
+	movl	%eax,8(%esi)
+	decl	%ebx
+	movl	%edx,12(%esi)
+	jz	.L022sw_end
+
+	movl	8(%edi),%eax
+	mull	%eax
+	movl	%eax,16(%esi)
+	decl	%ebx
+	movl	%edx,20(%esi)
+	jz	.L022sw_end
+
+	movl	12(%edi),%eax
+	mull	%eax
+	movl	%eax,24(%esi)
+	decl	%ebx
+	movl	%edx,28(%esi)
+	jz	.L022sw_end
+
+	movl	16(%edi),%eax
+	mull	%eax
+	movl	%eax,32(%esi)
+	decl	%ebx
+	movl	%edx,36(%esi)
+	jz	.L022sw_end
+
+	movl	20(%edi),%eax
+	mull	%eax
+	movl	%eax,40(%esi)
+	decl	%ebx
+	movl	%edx,44(%esi)
+	jz	.L022sw_end
+
+	movl	24(%edi),%eax
+	mull	%eax
+	movl	%eax,48(%esi)
+	movl	%edx,52(%esi)
+.L022sw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sqr_words,.-.L_bn_sqr_words_begin
+.globl	bn_div_words
+.type	bn_div_words, at function
+.align	16
+bn_div_words:
+.L_bn_div_words_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	movl	12(%esp),%ecx
+	divl	%ecx
+	ret
+.size	bn_div_words,.-.L_bn_div_words_begin
+.globl	bn_add_words
+.type	bn_add_words, at function
+.align	16
+bn_add_words:
+.L_bn_add_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L023aw_finish
+.L024aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L024aw_loop
+.L023aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L025aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L025aw_end
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L025aw_end
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L025aw_end
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L025aw_end
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L025aw_end
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L025aw_end
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+.L025aw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_add_words,.-.L_bn_add_words_begin
+.globl	bn_sub_words
+.type	bn_sub_words, at function
+.align	16
+bn_sub_words:
+.L_bn_sub_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L026aw_finish
+.L027aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L027aw_loop
+.L026aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L028aw_end
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L028aw_end
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L028aw_end
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L028aw_end
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L028aw_end
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L028aw_end
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+.L028aw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sub_words,.-.L_bn_sub_words_begin
+.globl	bn_sub_part_words
+.type	bn_sub_part_words, at function
+.align	16
+bn_sub_part_words:
+.L_bn_sub_part_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L029aw_finish
+.L030aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L030aw_loop
+.L029aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L031aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+.L031aw_end:
+	cmpl	$0,36(%esp)
+	je	.L032pw_end
+	movl	36(%esp),%ebp
+	cmpl	$0,%ebp
+	je	.L032pw_end
+	jge	.L033pw_pos
+
+	movl	$0,%edx
+	subl	%ebp,%edx
+	movl	%edx,%ebp
+	andl	$4294967288,%ebp
+	jz	.L034pw_neg_finish
+.L035pw_neg_loop:
+
+	movl	$0,%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	$0,%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	$0,%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	$0,%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	$0,%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	$0,%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	$0,%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	$0,%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L035pw_neg_loop
+.L034pw_neg_finish:
+	movl	36(%esp),%edx
+	movl	$0,%ebp
+	subl	%edx,%ebp
+	andl	$7,%ebp
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L032pw_end
+
+	movl	$0,%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+	jmp	.L032pw_end
+.L033pw_pos:
+	andl	$4294967288,%ebp
+	jz	.L036pw_pos_finish
+.L037pw_pos_loop:
+
+	movl	(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,(%ebx)
+	jnc	.L038pw_nc0
+
+	movl	4(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,4(%ebx)
+	jnc	.L039pw_nc1
+
+	movl	8(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,8(%ebx)
+	jnc	.L040pw_nc2
+
+	movl	12(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,12(%ebx)
+	jnc	.L041pw_nc3
+
+	movl	16(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,16(%ebx)
+	jnc	.L042pw_nc4
+
+	movl	20(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,20(%ebx)
+	jnc	.L043pw_nc5
+
+	movl	24(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,24(%ebx)
+	jnc	.L044pw_nc6
+
+	movl	28(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,28(%ebx)
+	jnc	.L045pw_nc7
+
+	addl	$32,%esi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L037pw_pos_loop
+.L036pw_pos_finish:
+	movl	36(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L032pw_end
+
+	movl	(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,(%ebx)
+	jnc	.L046pw_tail_nc0
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	4(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,4(%ebx)
+	jnc	.L047pw_tail_nc1
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	8(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,8(%ebx)
+	jnc	.L048pw_tail_nc2
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	12(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,12(%ebx)
+	jnc	.L049pw_tail_nc3
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	16(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,16(%ebx)
+	jnc	.L050pw_tail_nc4
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	20(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,20(%ebx)
+	jnc	.L051pw_tail_nc5
+	decl	%ebp
+	jz	.L032pw_end
+
+	movl	24(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,24(%ebx)
+	jnc	.L052pw_tail_nc6
+	movl	$1,%eax
+	jmp	.L032pw_end
+.L053pw_nc_loop:
+	movl	(%esi),%ecx
+	movl	%ecx,(%ebx)
+.L038pw_nc0:
+	movl	4(%esi),%ecx
+	movl	%ecx,4(%ebx)
+.L039pw_nc1:
+	movl	8(%esi),%ecx
+	movl	%ecx,8(%ebx)
+.L040pw_nc2:
+	movl	12(%esi),%ecx
+	movl	%ecx,12(%ebx)
+.L041pw_nc3:
+	movl	16(%esi),%ecx
+	movl	%ecx,16(%ebx)
+.L042pw_nc4:
+	movl	20(%esi),%ecx
+	movl	%ecx,20(%ebx)
+.L043pw_nc5:
+	movl	24(%esi),%ecx
+	movl	%ecx,24(%ebx)
+.L044pw_nc6:
+	movl	28(%esi),%ecx
+	movl	%ecx,28(%ebx)
+.L045pw_nc7:
+
+	addl	$32,%esi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L053pw_nc_loop
+	movl	36(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L054pw_nc_end
+	movl	(%esi),%ecx
+	movl	%ecx,(%ebx)
+.L046pw_tail_nc0:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	4(%esi),%ecx
+	movl	%ecx,4(%ebx)
+.L047pw_tail_nc1:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	8(%esi),%ecx
+	movl	%ecx,8(%ebx)
+.L048pw_tail_nc2:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	12(%esi),%ecx
+	movl	%ecx,12(%ebx)
+.L049pw_tail_nc3:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	16(%esi),%ecx
+	movl	%ecx,16(%ebx)
+.L050pw_tail_nc4:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	20(%esi),%ecx
+	movl	%ecx,20(%ebx)
+.L051pw_tail_nc5:
+	decl	%ebp
+	jz	.L054pw_nc_end
+	movl	24(%esi),%ecx
+	movl	%ecx,24(%ebx)
+.L052pw_tail_nc6:
+.L054pw_nc_end:
+	movl	$0,%eax
+.L032pw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sub_part_words,.-.L_bn_sub_part_words_begin
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"bn-586.S"
+.text
+.globl	bn_mul_add_words
+.type	bn_mul_add_words, at function
+.align	16
+bn_mul_add_words:
+.L_bn_mul_add_words_begin:
+	leal	OPENSSL_ia32cap_P,%eax
+	btl	$26,(%eax)
+	jnc	.L000maw_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+	movd	16(%esp),%mm0
+	pxor	%mm1,%mm1
+	jmp	.L001maw_sse2_entry
+.align	16
+.L002maw_sse2_unrolled:
+	movd	(%eax),%mm3
+	paddq	%mm3,%mm1
+	movd	(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	movd	4(%edx),%mm4
+	pmuludq	%mm0,%mm4
+	movd	8(%edx),%mm6
+	pmuludq	%mm0,%mm6
+	movd	12(%edx),%mm7
+	pmuludq	%mm0,%mm7
+	paddq	%mm2,%mm1
+	movd	4(%eax),%mm3
+	paddq	%mm4,%mm3
+	movd	8(%eax),%mm5
+	paddq	%mm6,%mm5
+	movd	12(%eax),%mm4
+	paddq	%mm4,%mm7
+	movd	%mm1,(%eax)
+	movd	16(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	psrlq	$32,%mm1
+	movd	20(%edx),%mm4
+	pmuludq	%mm0,%mm4
+	paddq	%mm3,%mm1
+	movd	24(%edx),%mm6
+	pmuludq	%mm0,%mm6
+	movd	%mm1,4(%eax)
+	psrlq	$32,%mm1
+	movd	28(%edx),%mm3
+	addl	$32,%edx
+	pmuludq	%mm0,%mm3
+	paddq	%mm5,%mm1
+	movd	16(%eax),%mm5
+	paddq	%mm5,%mm2
+	movd	%mm1,8(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm7,%mm1
+	movd	20(%eax),%mm5
+	paddq	%mm5,%mm4
+	movd	%mm1,12(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm2,%mm1
+	movd	24(%eax),%mm5
+	paddq	%mm5,%mm6
+	movd	%mm1,16(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm4,%mm1
+	movd	28(%eax),%mm5
+	paddq	%mm5,%mm3
+	movd	%mm1,20(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm6,%mm1
+	movd	%mm1,24(%eax)
+	psrlq	$32,%mm1
+	paddq	%mm3,%mm1
+	movd	%mm1,28(%eax)
+	leal	32(%eax),%eax
+	psrlq	$32,%mm1
+	subl	$8,%ecx
+	jz	.L003maw_sse2_exit
+.L001maw_sse2_entry:
+	testl	$4294967288,%ecx
+	jnz	.L002maw_sse2_unrolled
+.align	4
+.L004maw_sse2_loop:
+	movd	(%edx),%mm2
+	movd	(%eax),%mm3
+	pmuludq	%mm0,%mm2
+	leal	4(%edx),%edx
+	paddq	%mm3,%mm1
+	paddq	%mm2,%mm1
+	movd	%mm1,(%eax)
+	subl	$1,%ecx
+	psrlq	$32,%mm1
+	leal	4(%eax),%eax
+	jnz	.L004maw_sse2_loop
+.L003maw_sse2_exit:
+	movd	%mm1,%eax
+	emms
+	ret
+.align	16
+.L000maw_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	xorl	%esi,%esi
+	movl	20(%esp),%edi
+	movl	28(%esp),%ecx
+	movl	24(%esp),%ebx
+	andl	$4294967288,%ecx
+	movl	32(%esp),%ebp
+	pushl	%ecx
+	jz	.L005maw_finish
+.align	16
+.L006maw_loop:
+
+	movl	(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+
+	movl	4(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	4(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+
+	movl	8(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	8(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+
+	movl	12(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	12(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+
+	movl	16(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	16(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+
+	movl	20(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	20(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+
+	movl	24(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	24(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+
+	movl	28(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	28(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,28(%edi)
+	movl	%edx,%esi
+
+	subl	$8,%ecx
+	leal	32(%ebx),%ebx
+	leal	32(%edi),%edi
+	jnz	.L006maw_loop
+.L005maw_finish:
+	movl	32(%esp),%ecx
+	andl	$7,%ecx
+	jnz	.L007maw_finish2
+	jmp	.L008maw_end
+.L007maw_finish2:
+
+	movl	(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	4(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	4(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	8(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	8(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	12(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	12(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	16(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	16(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	20(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	20(%edi),%eax
+	adcl	$0,%edx
+	decl	%ecx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+	jz	.L008maw_end
+
+	movl	24(%ebx),%eax
+	mull	%ebp
+	addl	%esi,%eax
+	adcl	$0,%edx
+	addl	24(%edi),%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+.L008maw_end:
+	movl	%esi,%eax
+	popl	%ecx
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_add_words,.-.L_bn_mul_add_words_begin
+.globl	bn_mul_words
+.type	bn_mul_words, at function
+.align	16
+bn_mul_words:
+.L_bn_mul_words_begin:
+	leal	OPENSSL_ia32cap_P,%eax
+	btl	$26,(%eax)
+	jnc	.L009mw_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+	movd	16(%esp),%mm0
+	pxor	%mm1,%mm1
+.align	16
+.L010mw_sse2_loop:
+	movd	(%edx),%mm2
+	pmuludq	%mm0,%mm2
+	leal	4(%edx),%edx
+	paddq	%mm2,%mm1
+	movd	%mm1,(%eax)
+	subl	$1,%ecx
+	psrlq	$32,%mm1
+	leal	4(%eax),%eax
+	jnz	.L010mw_sse2_loop
+	movd	%mm1,%eax
+	emms
+	ret
+.align	16
+.L009mw_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	xorl	%esi,%esi
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ebp
+	movl	32(%esp),%ecx
+	andl	$4294967288,%ebp
+	jz	.L011mw_finish
+.L012mw_loop:
+
+	movl	(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+
+	movl	4(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+
+	movl	8(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+
+	movl	12(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+
+	movl	16(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+
+	movl	20(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+
+	movl	24(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+
+	movl	28(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,28(%edi)
+	movl	%edx,%esi
+
+	addl	$32,%ebx
+	addl	$32,%edi
+	subl	$8,%ebp
+	jz	.L011mw_finish
+	jmp	.L012mw_loop
+.L011mw_finish:
+	movl	28(%esp),%ebp
+	andl	$7,%ebp
+	jnz	.L013mw_finish2
+	jmp	.L014mw_end
+.L013mw_finish2:
+
+	movl	(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	4(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,4(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	8(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,8(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	12(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,12(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	16(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,16(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	20(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,20(%edi)
+	movl	%edx,%esi
+	decl	%ebp
+	jz	.L014mw_end
+
+	movl	24(%ebx),%eax
+	mull	%ecx
+	addl	%esi,%eax
+	adcl	$0,%edx
+	movl	%eax,24(%edi)
+	movl	%edx,%esi
+.L014mw_end:
+	movl	%esi,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_words,.-.L_bn_mul_words_begin
+.globl	bn_sqr_words
+.type	bn_sqr_words, at function
+.align	16
+bn_sqr_words:
+.L_bn_sqr_words_begin:
+	leal	OPENSSL_ia32cap_P,%eax
+	btl	$26,(%eax)
+	jnc	.L015sqr_non_sse2
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	movl	12(%esp),%ecx
+.align	16
+.L016sqr_sse2_loop:
+	movd	(%edx),%mm0
+	pmuludq	%mm0,%mm0
+	leal	4(%edx),%edx
+	movq	%mm0,(%eax)
+	subl	$1,%ecx
+	leal	8(%eax),%eax
+	jnz	.L016sqr_sse2_loop
+	emms
+	ret
+.align	16
+.L015sqr_non_sse2:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%ebx
+	andl	$4294967288,%ebx
+	jz	.L017sw_finish
+.L018sw_loop:
+
+	movl	(%edi),%eax
+	mull	%eax
+	movl	%eax,(%esi)
+	movl	%edx,4(%esi)
+
+	movl	4(%edi),%eax
+	mull	%eax
+	movl	%eax,8(%esi)
+	movl	%edx,12(%esi)
+
+	movl	8(%edi),%eax
+	mull	%eax
+	movl	%eax,16(%esi)
+	movl	%edx,20(%esi)
+
+	movl	12(%edi),%eax
+	mull	%eax
+	movl	%eax,24(%esi)
+	movl	%edx,28(%esi)
+
+	movl	16(%edi),%eax
+	mull	%eax
+	movl	%eax,32(%esi)
+	movl	%edx,36(%esi)
+
+	movl	20(%edi),%eax
+	mull	%eax
+	movl	%eax,40(%esi)
+	movl	%edx,44(%esi)
+
+	movl	24(%edi),%eax
+	mull	%eax
+	movl	%eax,48(%esi)
+	movl	%edx,52(%esi)
+
+	movl	28(%edi),%eax
+	mull	%eax
+	movl	%eax,56(%esi)
+	movl	%edx,60(%esi)
+
+	addl	$32,%edi
+	addl	$64,%esi
+	subl	$8,%ebx
+	jnz	.L018sw_loop
+.L017sw_finish:
+	movl	28(%esp),%ebx
+	andl	$7,%ebx
+	jz	.L019sw_end
+
+	movl	(%edi),%eax
+	mull	%eax
+	movl	%eax,(%esi)
+	decl	%ebx
+	movl	%edx,4(%esi)
+	jz	.L019sw_end
+
+	movl	4(%edi),%eax
+	mull	%eax
+	movl	%eax,8(%esi)
+	decl	%ebx
+	movl	%edx,12(%esi)
+	jz	.L019sw_end
+
+	movl	8(%edi),%eax
+	mull	%eax
+	movl	%eax,16(%esi)
+	decl	%ebx
+	movl	%edx,20(%esi)
+	jz	.L019sw_end
+
+	movl	12(%edi),%eax
+	mull	%eax
+	movl	%eax,24(%esi)
+	decl	%ebx
+	movl	%edx,28(%esi)
+	jz	.L019sw_end
+
+	movl	16(%edi),%eax
+	mull	%eax
+	movl	%eax,32(%esi)
+	decl	%ebx
+	movl	%edx,36(%esi)
+	jz	.L019sw_end
+
+	movl	20(%edi),%eax
+	mull	%eax
+	movl	%eax,40(%esi)
+	decl	%ebx
+	movl	%edx,44(%esi)
+	jz	.L019sw_end
+
+	movl	24(%edi),%eax
+	mull	%eax
+	movl	%eax,48(%esi)
+	movl	%edx,52(%esi)
+.L019sw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sqr_words,.-.L_bn_sqr_words_begin
+.globl	bn_div_words
+.type	bn_div_words, at function
+.align	16
+bn_div_words:
+.L_bn_div_words_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	movl	12(%esp),%ecx
+	divl	%ecx
+	ret
+.size	bn_div_words,.-.L_bn_div_words_begin
+.globl	bn_add_words
+.type	bn_add_words, at function
+.align	16
+bn_add_words:
+.L_bn_add_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L020aw_finish
+.L021aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L021aw_loop
+.L020aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L022aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L022aw_end
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L022aw_end
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L022aw_end
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L022aw_end
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L022aw_end
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L022aw_end
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	addl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	addl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+.L022aw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_add_words,.-.L_bn_add_words_begin
+.globl	bn_sub_words
+.type	bn_sub_words, at function
+.align	16
+bn_sub_words:
+.L_bn_sub_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L023aw_finish
+.L024aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L024aw_loop
+.L023aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L025aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L025aw_end
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L025aw_end
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L025aw_end
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L025aw_end
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L025aw_end
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L025aw_end
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+.L025aw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sub_words,.-.L_bn_sub_words_begin
+.globl	bn_sub_part_words
+.type	bn_sub_part_words, at function
+.align	16
+bn_sub_part_words:
+.L_bn_sub_part_words_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+	movl	20(%esp),%ebx
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	andl	$4294967288,%ebp
+	jz	.L026aw_finish
+.L027aw_loop:
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	4(%esi),%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	8(%esi),%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	12(%esi),%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	16(%esi),%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	20(%esi),%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	24(%esi),%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	28(%esi),%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%esi
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L027aw_loop
+.L026aw_finish:
+	movl	32(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+	decl	%ebp
+	jz	.L028aw_end
+
+	movl	(%esi),%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+	addl	$4,%esi
+	addl	$4,%edi
+	addl	$4,%ebx
+.L028aw_end:
+	cmpl	$0,36(%esp)
+	je	.L029pw_end
+	movl	36(%esp),%ebp
+	cmpl	$0,%ebp
+	je	.L029pw_end
+	jge	.L030pw_pos
+
+	movl	$0,%edx
+	subl	%ebp,%edx
+	movl	%edx,%ebp
+	andl	$4294967288,%ebp
+	jz	.L031pw_neg_finish
+.L032pw_neg_loop:
+
+	movl	$0,%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,(%ebx)
+
+	movl	$0,%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,4(%ebx)
+
+	movl	$0,%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,8(%ebx)
+
+	movl	$0,%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,12(%ebx)
+
+	movl	$0,%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,16(%ebx)
+
+	movl	$0,%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,20(%ebx)
+
+	movl	$0,%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+
+	movl	$0,%ecx
+	movl	28(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,28(%ebx)
+
+	addl	$32,%edi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L032pw_neg_loop
+.L031pw_neg_finish:
+	movl	36(%esp),%edx
+	movl	$0,%ebp
+	subl	%edx,%ebp
+	andl	$7,%ebp
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	4(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,4(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	8(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,8(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	12(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,12(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	16(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,16(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	20(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	decl	%ebp
+	movl	%ecx,20(%ebx)
+	jz	.L029pw_end
+
+	movl	$0,%ecx
+	movl	24(%edi),%edx
+	subl	%eax,%ecx
+	movl	$0,%eax
+	adcl	%eax,%eax
+	subl	%edx,%ecx
+	adcl	$0,%eax
+	movl	%ecx,24(%ebx)
+	jmp	.L029pw_end
+.L030pw_pos:
+	andl	$4294967288,%ebp
+	jz	.L033pw_pos_finish
+.L034pw_pos_loop:
+
+	movl	(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,(%ebx)
+	jnc	.L035pw_nc0
+
+	movl	4(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,4(%ebx)
+	jnc	.L036pw_nc1
+
+	movl	8(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,8(%ebx)
+	jnc	.L037pw_nc2
+
+	movl	12(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,12(%ebx)
+	jnc	.L038pw_nc3
+
+	movl	16(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,16(%ebx)
+	jnc	.L039pw_nc4
+
+	movl	20(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,20(%ebx)
+	jnc	.L040pw_nc5
+
+	movl	24(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,24(%ebx)
+	jnc	.L041pw_nc6
+
+	movl	28(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,28(%ebx)
+	jnc	.L042pw_nc7
+
+	addl	$32,%esi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L034pw_pos_loop
+.L033pw_pos_finish:
+	movl	36(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L029pw_end
+
+	movl	(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,(%ebx)
+	jnc	.L043pw_tail_nc0
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	4(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,4(%ebx)
+	jnc	.L044pw_tail_nc1
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	8(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,8(%ebx)
+	jnc	.L045pw_tail_nc2
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	12(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,12(%ebx)
+	jnc	.L046pw_tail_nc3
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	16(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,16(%ebx)
+	jnc	.L047pw_tail_nc4
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	20(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,20(%ebx)
+	jnc	.L048pw_tail_nc5
+	decl	%ebp
+	jz	.L029pw_end
+
+	movl	24(%esi),%ecx
+	subl	%eax,%ecx
+	movl	%ecx,24(%ebx)
+	jnc	.L049pw_tail_nc6
+	movl	$1,%eax
+	jmp	.L029pw_end
+.L050pw_nc_loop:
+	movl	(%esi),%ecx
+	movl	%ecx,(%ebx)
+.L035pw_nc0:
+	movl	4(%esi),%ecx
+	movl	%ecx,4(%ebx)
+.L036pw_nc1:
+	movl	8(%esi),%ecx
+	movl	%ecx,8(%ebx)
+.L037pw_nc2:
+	movl	12(%esi),%ecx
+	movl	%ecx,12(%ebx)
+.L038pw_nc3:
+	movl	16(%esi),%ecx
+	movl	%ecx,16(%ebx)
+.L039pw_nc4:
+	movl	20(%esi),%ecx
+	movl	%ecx,20(%ebx)
+.L040pw_nc5:
+	movl	24(%esi),%ecx
+	movl	%ecx,24(%ebx)
+.L041pw_nc6:
+	movl	28(%esi),%ecx
+	movl	%ecx,28(%ebx)
+.L042pw_nc7:
+
+	addl	$32,%esi
+	addl	$32,%ebx
+	subl	$8,%ebp
+	jnz	.L050pw_nc_loop
+	movl	36(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L051pw_nc_end
+	movl	(%esi),%ecx
+	movl	%ecx,(%ebx)
+.L043pw_tail_nc0:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	4(%esi),%ecx
+	movl	%ecx,4(%ebx)
+.L044pw_tail_nc1:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	8(%esi),%ecx
+	movl	%ecx,8(%ebx)
+.L045pw_tail_nc2:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	12(%esi),%ecx
+	movl	%ecx,12(%ebx)
+.L046pw_tail_nc3:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	16(%esi),%ecx
+	movl	%ecx,16(%ebx)
+.L047pw_tail_nc4:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	20(%esi),%ecx
+	movl	%ecx,20(%ebx)
+.L048pw_tail_nc5:
+	decl	%ebp
+	jz	.L051pw_nc_end
+	movl	24(%esi),%ecx
+	movl	%ecx,24(%ebx)
+.L049pw_tail_nc6:
+.L051pw_nc_end:
+	movl	$0,%eax
+.L029pw_end:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_sub_part_words,.-.L_bn_sub_part_words_begin
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/bn-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/bn-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/bn-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/bn-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1522 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/bn-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"bn-586.s"
-.text
-.globl	bn_mul_add_words
-.type	bn_mul_add_words, at function
-.align	16
-bn_mul_add_words:
-.L_bn_mul_add_words_begin:
-	leal	OPENSSL_ia32cap_P,%eax
-	btl	$26,(%eax)
-	jnc	.L000maw_non_sse2
-	movl	4(%esp),%eax
-	movl	8(%esp),%edx
-	movl	12(%esp),%ecx
-	movd	16(%esp),%mm0
-	pxor	%mm1,%mm1
-	jmp	.L001maw_sse2_entry
-.align	16
-.L002maw_sse2_unrolled:
-	movd	(%eax),%mm3
-	paddq	%mm3,%mm1
-	movd	(%edx),%mm2
-	pmuludq	%mm0,%mm2
-	movd	4(%edx),%mm4
-	pmuludq	%mm0,%mm4
-	movd	8(%edx),%mm6
-	pmuludq	%mm0,%mm6
-	movd	12(%edx),%mm7
-	pmuludq	%mm0,%mm7
-	paddq	%mm2,%mm1
-	movd	4(%eax),%mm3
-	paddq	%mm4,%mm3
-	movd	8(%eax),%mm5
-	paddq	%mm6,%mm5
-	movd	12(%eax),%mm4
-	paddq	%mm4,%mm7
-	movd	%mm1,(%eax)
-	movd	16(%edx),%mm2
-	pmuludq	%mm0,%mm2
-	psrlq	$32,%mm1
-	movd	20(%edx),%mm4
-	pmuludq	%mm0,%mm4
-	paddq	%mm3,%mm1
-	movd	24(%edx),%mm6
-	pmuludq	%mm0,%mm6
-	movd	%mm1,4(%eax)
-	psrlq	$32,%mm1
-	movd	28(%edx),%mm3
-	addl	$32,%edx
-	pmuludq	%mm0,%mm3
-	paddq	%mm5,%mm1
-	movd	16(%eax),%mm5
-	paddq	%mm5,%mm2
-	movd	%mm1,8(%eax)
-	psrlq	$32,%mm1
-	paddq	%mm7,%mm1
-	movd	20(%eax),%mm5
-	paddq	%mm5,%mm4
-	movd	%mm1,12(%eax)
-	psrlq	$32,%mm1
-	paddq	%mm2,%mm1
-	movd	24(%eax),%mm5
-	paddq	%mm5,%mm6
-	movd	%mm1,16(%eax)
-	psrlq	$32,%mm1
-	paddq	%mm4,%mm1
-	movd	28(%eax),%mm5
-	paddq	%mm5,%mm3
-	movd	%mm1,20(%eax)
-	psrlq	$32,%mm1
-	paddq	%mm6,%mm1
-	movd	%mm1,24(%eax)
-	psrlq	$32,%mm1
-	paddq	%mm3,%mm1
-	movd	%mm1,28(%eax)
-	leal	32(%eax),%eax
-	psrlq	$32,%mm1
-	subl	$8,%ecx
-	jz	.L003maw_sse2_exit
-.L001maw_sse2_entry:
-	testl	$4294967288,%ecx
-	jnz	.L002maw_sse2_unrolled
-.align	4
-.L004maw_sse2_loop:
-	movd	(%edx),%mm2
-	movd	(%eax),%mm3
-	pmuludq	%mm0,%mm2
-	leal	4(%edx),%edx
-	paddq	%mm3,%mm1
-	paddq	%mm2,%mm1
-	movd	%mm1,(%eax)
-	subl	$1,%ecx
-	psrlq	$32,%mm1
-	leal	4(%eax),%eax
-	jnz	.L004maw_sse2_loop
-.L003maw_sse2_exit:
-	movd	%mm1,%eax
-	emms
-	ret
-.align	16
-.L000maw_non_sse2:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	xorl	%esi,%esi
-	movl	20(%esp),%edi
-	movl	28(%esp),%ecx
-	movl	24(%esp),%ebx
-	andl	$4294967288,%ecx
-	movl	32(%esp),%ebp
-	pushl	%ecx
-	jz	.L005maw_finish
-.align	16
-.L006maw_loop:
-
-	movl	(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,(%edi)
-	movl	%edx,%esi
-
-	movl	4(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	4(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,4(%edi)
-	movl	%edx,%esi
-
-	movl	8(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	8(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,8(%edi)
-	movl	%edx,%esi
-
-	movl	12(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	12(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,12(%edi)
-	movl	%edx,%esi
-
-	movl	16(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	16(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,16(%edi)
-	movl	%edx,%esi
-
-	movl	20(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	20(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,20(%edi)
-	movl	%edx,%esi
-
-	movl	24(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	24(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,24(%edi)
-	movl	%edx,%esi
-
-	movl	28(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	28(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,28(%edi)
-	movl	%edx,%esi
-
-	subl	$8,%ecx
-	leal	32(%ebx),%ebx
-	leal	32(%edi),%edi
-	jnz	.L006maw_loop
-.L005maw_finish:
-	movl	32(%esp),%ecx
-	andl	$7,%ecx
-	jnz	.L007maw_finish2
-	jmp	.L008maw_end
-.L007maw_finish2:
-
-	movl	(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	4(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	4(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,4(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	8(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	8(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,8(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	12(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	12(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,12(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	16(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	16(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,16(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	20(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	20(%edi),%eax
-	adcl	$0,%edx
-	decl	%ecx
-	movl	%eax,20(%edi)
-	movl	%edx,%esi
-	jz	.L008maw_end
-
-	movl	24(%ebx),%eax
-	mull	%ebp
-	addl	%esi,%eax
-	adcl	$0,%edx
-	addl	24(%edi),%eax
-	adcl	$0,%edx
-	movl	%eax,24(%edi)
-	movl	%edx,%esi
-.L008maw_end:
-	movl	%esi,%eax
-	popl	%ecx
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_mul_add_words,.-.L_bn_mul_add_words_begin
-.globl	bn_mul_words
-.type	bn_mul_words, at function
-.align	16
-bn_mul_words:
-.L_bn_mul_words_begin:
-	leal	OPENSSL_ia32cap_P,%eax
-	btl	$26,(%eax)
-	jnc	.L009mw_non_sse2
-	movl	4(%esp),%eax
-	movl	8(%esp),%edx
-	movl	12(%esp),%ecx
-	movd	16(%esp),%mm0
-	pxor	%mm1,%mm1
-.align	16
-.L010mw_sse2_loop:
-	movd	(%edx),%mm2
-	pmuludq	%mm0,%mm2
-	leal	4(%edx),%edx
-	paddq	%mm2,%mm1
-	movd	%mm1,(%eax)
-	subl	$1,%ecx
-	psrlq	$32,%mm1
-	leal	4(%eax),%eax
-	jnz	.L010mw_sse2_loop
-	movd	%mm1,%eax
-	emms
-	ret
-.align	16
-.L009mw_non_sse2:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	xorl	%esi,%esi
-	movl	20(%esp),%edi
-	movl	24(%esp),%ebx
-	movl	28(%esp),%ebp
-	movl	32(%esp),%ecx
-	andl	$4294967288,%ebp
-	jz	.L011mw_finish
-.L012mw_loop:
-
-	movl	(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,(%edi)
-	movl	%edx,%esi
-
-	movl	4(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,4(%edi)
-	movl	%edx,%esi
-
-	movl	8(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,8(%edi)
-	movl	%edx,%esi
-
-	movl	12(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,12(%edi)
-	movl	%edx,%esi
-
-	movl	16(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,16(%edi)
-	movl	%edx,%esi
-
-	movl	20(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,20(%edi)
-	movl	%edx,%esi
-
-	movl	24(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,24(%edi)
-	movl	%edx,%esi
-
-	movl	28(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,28(%edi)
-	movl	%edx,%esi
-
-	addl	$32,%ebx
-	addl	$32,%edi
-	subl	$8,%ebp
-	jz	.L011mw_finish
-	jmp	.L012mw_loop
-.L011mw_finish:
-	movl	28(%esp),%ebp
-	andl	$7,%ebp
-	jnz	.L013mw_finish2
-	jmp	.L014mw_end
-.L013mw_finish2:
-
-	movl	(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	4(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,4(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	8(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,8(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	12(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,12(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	16(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,16(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	20(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,20(%edi)
-	movl	%edx,%esi
-	decl	%ebp
-	jz	.L014mw_end
-
-	movl	24(%ebx),%eax
-	mull	%ecx
-	addl	%esi,%eax
-	adcl	$0,%edx
-	movl	%eax,24(%edi)
-	movl	%edx,%esi
-.L014mw_end:
-	movl	%esi,%eax
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_mul_words,.-.L_bn_mul_words_begin
-.globl	bn_sqr_words
-.type	bn_sqr_words, at function
-.align	16
-bn_sqr_words:
-.L_bn_sqr_words_begin:
-	leal	OPENSSL_ia32cap_P,%eax
-	btl	$26,(%eax)
-	jnc	.L015sqr_non_sse2
-	movl	4(%esp),%eax
-	movl	8(%esp),%edx
-	movl	12(%esp),%ecx
-.align	16
-.L016sqr_sse2_loop:
-	movd	(%edx),%mm0
-	pmuludq	%mm0,%mm0
-	leal	4(%edx),%edx
-	movq	%mm0,(%eax)
-	subl	$1,%ecx
-	leal	8(%eax),%eax
-	jnz	.L016sqr_sse2_loop
-	emms
-	ret
-.align	16
-.L015sqr_non_sse2:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%ebx
-	andl	$4294967288,%ebx
-	jz	.L017sw_finish
-.L018sw_loop:
-
-	movl	(%edi),%eax
-	mull	%eax
-	movl	%eax,(%esi)
-	movl	%edx,4(%esi)
-
-	movl	4(%edi),%eax
-	mull	%eax
-	movl	%eax,8(%esi)
-	movl	%edx,12(%esi)
-
-	movl	8(%edi),%eax
-	mull	%eax
-	movl	%eax,16(%esi)
-	movl	%edx,20(%esi)
-
-	movl	12(%edi),%eax
-	mull	%eax
-	movl	%eax,24(%esi)
-	movl	%edx,28(%esi)
-
-	movl	16(%edi),%eax
-	mull	%eax
-	movl	%eax,32(%esi)
-	movl	%edx,36(%esi)
-
-	movl	20(%edi),%eax
-	mull	%eax
-	movl	%eax,40(%esi)
-	movl	%edx,44(%esi)
-
-	movl	24(%edi),%eax
-	mull	%eax
-	movl	%eax,48(%esi)
-	movl	%edx,52(%esi)
-
-	movl	28(%edi),%eax
-	mull	%eax
-	movl	%eax,56(%esi)
-	movl	%edx,60(%esi)
-
-	addl	$32,%edi
-	addl	$64,%esi
-	subl	$8,%ebx
-	jnz	.L018sw_loop
-.L017sw_finish:
-	movl	28(%esp),%ebx
-	andl	$7,%ebx
-	jz	.L019sw_end
-
-	movl	(%edi),%eax
-	mull	%eax
-	movl	%eax,(%esi)
-	decl	%ebx
-	movl	%edx,4(%esi)
-	jz	.L019sw_end
-
-	movl	4(%edi),%eax
-	mull	%eax
-	movl	%eax,8(%esi)
-	decl	%ebx
-	movl	%edx,12(%esi)
-	jz	.L019sw_end
-
-	movl	8(%edi),%eax
-	mull	%eax
-	movl	%eax,16(%esi)
-	decl	%ebx
-	movl	%edx,20(%esi)
-	jz	.L019sw_end
-
-	movl	12(%edi),%eax
-	mull	%eax
-	movl	%eax,24(%esi)
-	decl	%ebx
-	movl	%edx,28(%esi)
-	jz	.L019sw_end
-
-	movl	16(%edi),%eax
-	mull	%eax
-	movl	%eax,32(%esi)
-	decl	%ebx
-	movl	%edx,36(%esi)
-	jz	.L019sw_end
-
-	movl	20(%edi),%eax
-	mull	%eax
-	movl	%eax,40(%esi)
-	decl	%ebx
-	movl	%edx,44(%esi)
-	jz	.L019sw_end
-
-	movl	24(%edi),%eax
-	mull	%eax
-	movl	%eax,48(%esi)
-	movl	%edx,52(%esi)
-.L019sw_end:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_sqr_words,.-.L_bn_sqr_words_begin
-.globl	bn_div_words
-.type	bn_div_words, at function
-.align	16
-bn_div_words:
-.L_bn_div_words_begin:
-	movl	4(%esp),%edx
-	movl	8(%esp),%eax
-	movl	12(%esp),%ecx
-	divl	%ecx
-	ret
-.size	bn_div_words,.-.L_bn_div_words_begin
-.globl	bn_add_words
-.type	bn_add_words, at function
-.align	16
-bn_add_words:
-.L_bn_add_words_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	movl	20(%esp),%ebx
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	32(%esp),%ebp
-	xorl	%eax,%eax
-	andl	$4294967288,%ebp
-	jz	.L020aw_finish
-.L021aw_loop:
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-
-	movl	4(%esi),%ecx
-	movl	4(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,4(%ebx)
-
-	movl	8(%esi),%ecx
-	movl	8(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,8(%ebx)
-
-	movl	12(%esi),%ecx
-	movl	12(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,12(%ebx)
-
-	movl	16(%esi),%ecx
-	movl	16(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,16(%ebx)
-
-	movl	20(%esi),%ecx
-	movl	20(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,20(%ebx)
-
-	movl	24(%esi),%ecx
-	movl	24(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-
-	movl	28(%esi),%ecx
-	movl	28(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,28(%ebx)
-
-	addl	$32,%esi
-	addl	$32,%edi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L021aw_loop
-.L020aw_finish:
-	movl	32(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L022aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,(%ebx)
-	jz	.L022aw_end
-
-	movl	4(%esi),%ecx
-	movl	4(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,4(%ebx)
-	jz	.L022aw_end
-
-	movl	8(%esi),%ecx
-	movl	8(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,8(%ebx)
-	jz	.L022aw_end
-
-	movl	12(%esi),%ecx
-	movl	12(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,12(%ebx)
-	jz	.L022aw_end
-
-	movl	16(%esi),%ecx
-	movl	16(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,16(%ebx)
-	jz	.L022aw_end
-
-	movl	20(%esi),%ecx
-	movl	20(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,20(%ebx)
-	jz	.L022aw_end
-
-	movl	24(%esi),%ecx
-	movl	24(%edi),%edx
-	addl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	addl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-.L022aw_end:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_add_words,.-.L_bn_add_words_begin
-.globl	bn_sub_words
-.type	bn_sub_words, at function
-.align	16
-bn_sub_words:
-.L_bn_sub_words_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	movl	20(%esp),%ebx
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	32(%esp),%ebp
-	xorl	%eax,%eax
-	andl	$4294967288,%ebp
-	jz	.L023aw_finish
-.L024aw_loop:
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-
-	movl	4(%esi),%ecx
-	movl	4(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,4(%ebx)
-
-	movl	8(%esi),%ecx
-	movl	8(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,8(%ebx)
-
-	movl	12(%esi),%ecx
-	movl	12(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,12(%ebx)
-
-	movl	16(%esi),%ecx
-	movl	16(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,16(%ebx)
-
-	movl	20(%esi),%ecx
-	movl	20(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,20(%ebx)
-
-	movl	24(%esi),%ecx
-	movl	24(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-
-	movl	28(%esi),%ecx
-	movl	28(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,28(%ebx)
-
-	addl	$32,%esi
-	addl	$32,%edi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L024aw_loop
-.L023aw_finish:
-	movl	32(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L025aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,(%ebx)
-	jz	.L025aw_end
-
-	movl	4(%esi),%ecx
-	movl	4(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,4(%ebx)
-	jz	.L025aw_end
-
-	movl	8(%esi),%ecx
-	movl	8(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,8(%ebx)
-	jz	.L025aw_end
-
-	movl	12(%esi),%ecx
-	movl	12(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,12(%ebx)
-	jz	.L025aw_end
-
-	movl	16(%esi),%ecx
-	movl	16(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,16(%ebx)
-	jz	.L025aw_end
-
-	movl	20(%esi),%ecx
-	movl	20(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,20(%ebx)
-	jz	.L025aw_end
-
-	movl	24(%esi),%ecx
-	movl	24(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-.L025aw_end:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_sub_words,.-.L_bn_sub_words_begin
-.globl	bn_sub_part_words
-.type	bn_sub_part_words, at function
-.align	16
-bn_sub_part_words:
-.L_bn_sub_part_words_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-	movl	20(%esp),%ebx
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	32(%esp),%ebp
-	xorl	%eax,%eax
-	andl	$4294967288,%ebp
-	jz	.L026aw_finish
-.L027aw_loop:
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-
-	movl	4(%esi),%ecx
-	movl	4(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,4(%ebx)
-
-	movl	8(%esi),%ecx
-	movl	8(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,8(%ebx)
-
-	movl	12(%esi),%ecx
-	movl	12(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,12(%ebx)
-
-	movl	16(%esi),%ecx
-	movl	16(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,16(%ebx)
-
-	movl	20(%esi),%ecx
-	movl	20(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,20(%ebx)
-
-	movl	24(%esi),%ecx
-	movl	24(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-
-	movl	28(%esi),%ecx
-	movl	28(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,28(%ebx)
-
-	addl	$32,%esi
-	addl	$32,%edi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L027aw_loop
-.L026aw_finish:
-	movl	32(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-	decl	%ebp
-	jz	.L028aw_end
-
-	movl	(%esi),%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-	addl	$4,%esi
-	addl	$4,%edi
-	addl	$4,%ebx
-.L028aw_end:
-	cmpl	$0,36(%esp)
-	je	.L029pw_end
-	movl	36(%esp),%ebp
-	cmpl	$0,%ebp
-	je	.L029pw_end
-	jge	.L030pw_pos
-
-	movl	$0,%edx
-	subl	%ebp,%edx
-	movl	%edx,%ebp
-	andl	$4294967288,%ebp
-	jz	.L031pw_neg_finish
-.L032pw_neg_loop:
-
-	movl	$0,%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,(%ebx)
-
-	movl	$0,%ecx
-	movl	4(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,4(%ebx)
-
-	movl	$0,%ecx
-	movl	8(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,8(%ebx)
-
-	movl	$0,%ecx
-	movl	12(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,12(%ebx)
-
-	movl	$0,%ecx
-	movl	16(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,16(%ebx)
-
-	movl	$0,%ecx
-	movl	20(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,20(%ebx)
-
-	movl	$0,%ecx
-	movl	24(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-
-	movl	$0,%ecx
-	movl	28(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,28(%ebx)
-
-	addl	$32,%edi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L032pw_neg_loop
-.L031pw_neg_finish:
-	movl	36(%esp),%edx
-	movl	$0,%ebp
-	subl	%edx,%ebp
-	andl	$7,%ebp
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	4(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,4(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	8(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,8(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	12(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,12(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	16(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,16(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	20(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	decl	%ebp
-	movl	%ecx,20(%ebx)
-	jz	.L029pw_end
-
-	movl	$0,%ecx
-	movl	24(%edi),%edx
-	subl	%eax,%ecx
-	movl	$0,%eax
-	adcl	%eax,%eax
-	subl	%edx,%ecx
-	adcl	$0,%eax
-	movl	%ecx,24(%ebx)
-	jmp	.L029pw_end
-.L030pw_pos:
-	andl	$4294967288,%ebp
-	jz	.L033pw_pos_finish
-.L034pw_pos_loop:
-
-	movl	(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,(%ebx)
-	jnc	.L035pw_nc0
-
-	movl	4(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,4(%ebx)
-	jnc	.L036pw_nc1
-
-	movl	8(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,8(%ebx)
-	jnc	.L037pw_nc2
-
-	movl	12(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,12(%ebx)
-	jnc	.L038pw_nc3
-
-	movl	16(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,16(%ebx)
-	jnc	.L039pw_nc4
-
-	movl	20(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,20(%ebx)
-	jnc	.L040pw_nc5
-
-	movl	24(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,24(%ebx)
-	jnc	.L041pw_nc6
-
-	movl	28(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,28(%ebx)
-	jnc	.L042pw_nc7
-
-	addl	$32,%esi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L034pw_pos_loop
-.L033pw_pos_finish:
-	movl	36(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L029pw_end
-
-	movl	(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,(%ebx)
-	jnc	.L043pw_tail_nc0
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	4(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,4(%ebx)
-	jnc	.L044pw_tail_nc1
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	8(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,8(%ebx)
-	jnc	.L045pw_tail_nc2
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	12(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,12(%ebx)
-	jnc	.L046pw_tail_nc3
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	16(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,16(%ebx)
-	jnc	.L047pw_tail_nc4
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	20(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,20(%ebx)
-	jnc	.L048pw_tail_nc5
-	decl	%ebp
-	jz	.L029pw_end
-
-	movl	24(%esi),%ecx
-	subl	%eax,%ecx
-	movl	%ecx,24(%ebx)
-	jnc	.L049pw_tail_nc6
-	movl	$1,%eax
-	jmp	.L029pw_end
-.L050pw_nc_loop:
-	movl	(%esi),%ecx
-	movl	%ecx,(%ebx)
-.L035pw_nc0:
-	movl	4(%esi),%ecx
-	movl	%ecx,4(%ebx)
-.L036pw_nc1:
-	movl	8(%esi),%ecx
-	movl	%ecx,8(%ebx)
-.L037pw_nc2:
-	movl	12(%esi),%ecx
-	movl	%ecx,12(%ebx)
-.L038pw_nc3:
-	movl	16(%esi),%ecx
-	movl	%ecx,16(%ebx)
-.L039pw_nc4:
-	movl	20(%esi),%ecx
-	movl	%ecx,20(%ebx)
-.L040pw_nc5:
-	movl	24(%esi),%ecx
-	movl	%ecx,24(%ebx)
-.L041pw_nc6:
-	movl	28(%esi),%ecx
-	movl	%ecx,28(%ebx)
-.L042pw_nc7:
-
-	addl	$32,%esi
-	addl	$32,%ebx
-	subl	$8,%ebp
-	jnz	.L050pw_nc_loop
-	movl	36(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L051pw_nc_end
-	movl	(%esi),%ecx
-	movl	%ecx,(%ebx)
-.L043pw_tail_nc0:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	4(%esi),%ecx
-	movl	%ecx,4(%ebx)
-.L044pw_tail_nc1:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	8(%esi),%ecx
-	movl	%ecx,8(%ebx)
-.L045pw_tail_nc2:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	12(%esi),%ecx
-	movl	%ecx,12(%ebx)
-.L046pw_tail_nc3:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	16(%esi),%ecx
-	movl	%ecx,16(%ebx)
-.L047pw_tail_nc4:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	20(%esi),%ecx
-	movl	%ecx,20(%ebx)
-.L048pw_tail_nc5:
-	decl	%ebp
-	jz	.L051pw_nc_end
-	movl	24(%esi),%ecx
-	movl	%ecx,24(%ebx)
-.L049pw_tail_nc6:
-.L051pw_nc_end:
-	movl	$0,%eax
-.L029pw_end:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_sub_part_words,.-.L_bn_sub_part_words_begin
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/cast-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/cast-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/cast-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1872 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/cast-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from cast-586.pl.
+#ifdef PIC
+.file	"cast-586.S"
+.text
+.globl	CAST_encrypt
+.type	CAST_encrypt, at function
+.align	16
+CAST_encrypt:
+.L_CAST_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+	movl	128(%ebp),%eax
+	pushl	%eax
+	xorl	%eax,%eax
+
+	movl	(%ebp),%edx
+	movl	4(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	8(%ebp),%edx
+	movl	12(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	16(%ebp),%edx
+	movl	20(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	24(%ebp),%edx
+	movl	28(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	32(%ebp),%edx
+	movl	36(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	40(%ebp),%edx
+	movl	44(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	48(%ebp),%edx
+	movl	52(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	56(%ebp),%edx
+	movl	60(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	64(%ebp),%edx
+	movl	68(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	72(%ebp),%edx
+	movl	76(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	80(%ebp),%edx
+	movl	84(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	88(%ebp),%edx
+	movl	92(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	popl	%edx
+	orl	%edx,%edx
+	jnz	.L000cast_enc_done
+
+	movl	96(%ebp),%edx
+	movl	100(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	104(%ebp),%edx
+	movl	108(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	112(%ebp),%edx
+	movl	116(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	120(%ebp),%edx
+	movl	124(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+.L000cast_enc_done:
+	nop
+	movl	20(%esp),%eax
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	CAST_encrypt,.-.L_CAST_encrypt_begin
+.globl	CAST_decrypt
+.type	CAST_decrypt, at function
+.align	16
+CAST_decrypt:
+.L_CAST_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+	movl	128(%ebp),%eax
+	orl	%eax,%eax
+	jnz	.L001cast_dec_skip
+	xorl	%eax,%eax
+
+	movl	120(%ebp),%edx
+	movl	124(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	112(%ebp),%edx
+	movl	116(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	104(%ebp),%edx
+	movl	108(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	96(%ebp),%edx
+	movl	100(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+.L001cast_dec_skip:
+
+	movl	88(%ebp),%edx
+	movl	92(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	80(%ebp),%edx
+	movl	84(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	72(%ebp),%edx
+	movl	76(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	64(%ebp),%edx
+	movl	68(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	56(%ebp),%edx
+	movl	60(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	48(%ebp),%edx
+	movl	52(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	40(%ebp),%edx
+	movl	44(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	32(%ebp),%edx
+	movl	36(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	24(%ebp),%edx
+	movl	28(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	16(%ebp),%edx
+	movl	20(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	8(%ebp),%edx
+	movl	12(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	(%ebp),%edx
+	movl	4(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+	nop
+	movl	20(%esp),%eax
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	CAST_decrypt,.-.L_CAST_decrypt_begin
+.globl	CAST_cbc_encrypt
+.type	CAST_cbc_encrypt, at function
+.align	16
+CAST_cbc_encrypt:
+.L_CAST_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L002decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L003encrypt_finish
+.L004encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L004encrypt_loop
+.L003encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L005finish
+	call	.L006PIC_point
+.L006PIC_point:
+	popl	%edx
+	leal	.L007cbc_enc_jmp_table-.L006PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L008ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L009ej6:
+	movb	5(%esi),%dh
+.L010ej5:
+	movb	4(%esi),%dl
+.L011ej4:
+	movl	(%esi),%ecx
+	jmp	.L012ejend
+.L013ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L014ej2:
+	movb	1(%esi),%ch
+.L015ej1:
+	movb	(%esi),%cl
+.L012ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L005finish
+.L002decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L016decrypt_finish
+.L017decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L017decrypt_loop
+.L016decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L005finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L018dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L019dj6:
+	movb	%dh,5(%edi)
+.L020dj5:
+	movb	%dl,4(%edi)
+.L021dj4:
+	movl	%ecx,(%edi)
+	jmp	.L022djend
+.L023dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L024dj2:
+	movb	%ch,1(%esi)
+.L025dj1:
+	movb	%cl,(%esi)
+.L022djend:
+	jmp	.L005finish
+.L005finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L007cbc_enc_jmp_table:
+.long	0
+.long	.L015ej1-.L006PIC_point
+.long	.L014ej2-.L006PIC_point
+.long	.L013ej3-.L006PIC_point
+.long	.L011ej4-.L006PIC_point
+.long	.L010ej5-.L006PIC_point
+.long	.L009ej6-.L006PIC_point
+.long	.L008ej7-.L006PIC_point
+.align	64
+.size	CAST_cbc_encrypt,.-.L_CAST_cbc_encrypt_begin
+#else
+.file	"cast-586.S"
+.text
+.globl	CAST_encrypt
+.type	CAST_encrypt, at function
+.align	16
+CAST_encrypt:
+.L_CAST_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+	movl	128(%ebp),%eax
+	pushl	%eax
+	xorl	%eax,%eax
+
+	movl	(%ebp),%edx
+	movl	4(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	8(%ebp),%edx
+	movl	12(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	16(%ebp),%edx
+	movl	20(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	24(%ebp),%edx
+	movl	28(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	32(%ebp),%edx
+	movl	36(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	40(%ebp),%edx
+	movl	44(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	48(%ebp),%edx
+	movl	52(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	56(%ebp),%edx
+	movl	60(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	64(%ebp),%edx
+	movl	68(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	72(%ebp),%edx
+	movl	76(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	80(%ebp),%edx
+	movl	84(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	88(%ebp),%edx
+	movl	92(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	popl	%edx
+	orl	%edx,%edx
+	jnz	.L000cast_enc_done
+
+	movl	96(%ebp),%edx
+	movl	100(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	104(%ebp),%edx
+	movl	108(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	112(%ebp),%edx
+	movl	116(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	120(%ebp),%edx
+	movl	124(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+.L000cast_enc_done:
+	nop
+	movl	20(%esp),%eax
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	CAST_encrypt,.-.L_CAST_encrypt_begin
+.globl	CAST_decrypt
+.type	CAST_decrypt, at function
+.align	16
+CAST_decrypt:
+.L_CAST_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ebp
+	pushl	%esi
+	pushl	%edi
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+	movl	128(%ebp),%eax
+	orl	%eax,%eax
+	jnz	.L001cast_dec_skip
+	xorl	%eax,%eax
+
+	movl	120(%ebp),%edx
+	movl	124(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	112(%ebp),%edx
+	movl	116(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	104(%ebp),%edx
+	movl	108(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	96(%ebp),%edx
+	movl	100(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+.L001cast_dec_skip:
+
+	movl	88(%ebp),%edx
+	movl	92(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	80(%ebp),%edx
+	movl	84(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	72(%ebp),%edx
+	movl	76(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	64(%ebp),%edx
+	movl	68(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	56(%ebp),%edx
+	movl	60(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	48(%ebp),%edx
+	movl	52(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	40(%ebp),%edx
+	movl	44(%ebp),%ecx
+	subl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	32(%ebp),%edx
+	movl	36(%ebp),%ecx
+	xorl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	24(%ebp),%edx
+	movl	28(%ebp),%ecx
+	addl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	16(%ebp),%edx
+	movl	20(%ebp),%ecx
+	subl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	subl	%ebx,%ecx
+	xorl	%ecx,%esi
+
+	movl	8(%ebp),%edx
+	movl	12(%ebp),%ecx
+	xorl	%esi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	addl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	xorl	%ebx,%ecx
+	xorl	%ecx,%edi
+
+	movl	(%ebp),%edx
+	movl	4(%ebp),%ecx
+	addl	%edi,%edx
+	roll	%cl,%edx
+	movl	%edx,%ebx
+	xorl	%ecx,%ecx
+	movb	%dh,%cl
+	andl	$255,%ebx
+	shrl	$16,%edx
+	xorl	%eax,%eax
+	movb	%dh,%al
+	andl	$255,%edx
+	movl	CAST_S_table0(,%ecx,4),%ecx
+	movl	CAST_S_table1(,%ebx,4),%ebx
+	xorl	%ebx,%ecx
+	movl	CAST_S_table2(,%eax,4),%ebx
+	subl	%ebx,%ecx
+	movl	CAST_S_table3(,%edx,4),%ebx
+	addl	%ebx,%ecx
+	xorl	%ecx,%esi
+	nop
+	movl	20(%esp),%eax
+	movl	%edi,4(%eax)
+	movl	%esi,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	CAST_decrypt,.-.L_CAST_decrypt_begin
+.globl	CAST_cbc_encrypt
+.type	CAST_cbc_encrypt, at function
+.align	16
+CAST_cbc_encrypt:
+.L_CAST_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L002decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L003encrypt_finish
+.L004encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L004encrypt_loop
+.L003encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L005finish
+	call	.L006PIC_point
+.L006PIC_point:
+	popl	%edx
+	leal	.L007cbc_enc_jmp_table-.L006PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L008ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L009ej6:
+	movb	5(%esi),%dh
+.L010ej5:
+	movb	4(%esi),%dl
+.L011ej4:
+	movl	(%esi),%ecx
+	jmp	.L012ejend
+.L013ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L014ej2:
+	movb	1(%esi),%ch
+.L015ej1:
+	movb	(%esi),%cl
+.L012ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L005finish
+.L002decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L016decrypt_finish
+.L017decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L017decrypt_loop
+.L016decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L005finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_CAST_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	bswap	%eax
+	bswap	%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L018dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L019dj6:
+	movb	%dh,5(%edi)
+.L020dj5:
+	movb	%dl,4(%edi)
+.L021dj4:
+	movl	%ecx,(%edi)
+	jmp	.L022djend
+.L023dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L024dj2:
+	movb	%ch,1(%esi)
+.L025dj1:
+	movb	%cl,(%esi)
+.L022djend:
+	jmp	.L005finish
+.L005finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L007cbc_enc_jmp_table:
+.long	0
+.long	.L015ej1-.L006PIC_point
+.long	.L014ej2-.L006PIC_point
+.long	.L013ej3-.L006PIC_point
+.long	.L011ej4-.L006PIC_point
+.long	.L010ej5-.L006PIC_point
+.long	.L009ej6-.L006PIC_point
+.long	.L008ej7-.L006PIC_point
+.align	64
+.size	CAST_cbc_encrypt,.-.L_CAST_cbc_encrypt_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/cast-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/cast-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/cast-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/cast-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,934 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/cast-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"cast-586.s"
-.text
-.globl	CAST_encrypt
-.type	CAST_encrypt, at function
-.align	16
-CAST_encrypt:
-.L_CAST_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ebp
-	pushl	%esi
-	pushl	%edi
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-
-	movl	128(%ebp),%eax
-	pushl	%eax
-	xorl	%eax,%eax
-
-	movl	(%ebp),%edx
-	movl	4(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	8(%ebp),%edx
-	movl	12(%ebp),%ecx
-	xorl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	16(%ebp),%edx
-	movl	20(%ebp),%ecx
-	subl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	24(%ebp),%edx
-	movl	28(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	32(%ebp),%edx
-	movl	36(%ebp),%ecx
-	xorl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	40(%ebp),%edx
-	movl	44(%ebp),%ecx
-	subl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	48(%ebp),%edx
-	movl	52(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	56(%ebp),%edx
-	movl	60(%ebp),%ecx
-	xorl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	64(%ebp),%edx
-	movl	68(%ebp),%ecx
-	subl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	72(%ebp),%edx
-	movl	76(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	80(%ebp),%edx
-	movl	84(%ebp),%ecx
-	xorl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	88(%ebp),%edx
-	movl	92(%ebp),%ecx
-	subl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	popl	%edx
-	orl	%edx,%edx
-	jnz	.L000cast_enc_done
-
-	movl	96(%ebp),%edx
-	movl	100(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	104(%ebp),%edx
-	movl	108(%ebp),%ecx
-	xorl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	112(%ebp),%edx
-	movl	116(%ebp),%ecx
-	subl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	120(%ebp),%edx
-	movl	124(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-.L000cast_enc_done:
-	nop
-	movl	20(%esp),%eax
-	movl	%edi,4(%eax)
-	movl	%esi,(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	CAST_encrypt,.-.L_CAST_encrypt_begin
-.globl	CAST_decrypt
-.type	CAST_decrypt, at function
-.align	16
-CAST_decrypt:
-.L_CAST_decrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ebp
-	pushl	%esi
-	pushl	%edi
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-
-	movl	128(%ebp),%eax
-	orl	%eax,%eax
-	jnz	.L001cast_dec_skip
-	xorl	%eax,%eax
-
-	movl	120(%ebp),%edx
-	movl	124(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	112(%ebp),%edx
-	movl	116(%ebp),%ecx
-	subl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	104(%ebp),%edx
-	movl	108(%ebp),%ecx
-	xorl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	96(%ebp),%edx
-	movl	100(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-.L001cast_dec_skip:
-
-	movl	88(%ebp),%edx
-	movl	92(%ebp),%ecx
-	subl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	80(%ebp),%edx
-	movl	84(%ebp),%ecx
-	xorl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	72(%ebp),%edx
-	movl	76(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	64(%ebp),%edx
-	movl	68(%ebp),%ecx
-	subl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	56(%ebp),%edx
-	movl	60(%ebp),%ecx
-	xorl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	48(%ebp),%edx
-	movl	52(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	40(%ebp),%edx
-	movl	44(%ebp),%ecx
-	subl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	32(%ebp),%edx
-	movl	36(%ebp),%ecx
-	xorl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	24(%ebp),%edx
-	movl	28(%ebp),%ecx
-	addl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	16(%ebp),%edx
-	movl	20(%ebp),%ecx
-	subl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	subl	%ebx,%ecx
-	xorl	%ecx,%esi
-
-	movl	8(%ebp),%edx
-	movl	12(%ebp),%ecx
-	xorl	%esi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	addl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	xorl	%ebx,%ecx
-	xorl	%ecx,%edi
-
-	movl	(%ebp),%edx
-	movl	4(%ebp),%ecx
-	addl	%edi,%edx
-	roll	%cl,%edx
-	movl	%edx,%ebx
-	xorl	%ecx,%ecx
-	movb	%dh,%cl
-	andl	$255,%ebx
-	shrl	$16,%edx
-	xorl	%eax,%eax
-	movb	%dh,%al
-	andl	$255,%edx
-	movl	CAST_S_table0(,%ecx,4),%ecx
-	movl	CAST_S_table1(,%ebx,4),%ebx
-	xorl	%ebx,%ecx
-	movl	CAST_S_table2(,%eax,4),%ebx
-	subl	%ebx,%ecx
-	movl	CAST_S_table3(,%edx,4),%ebx
-	addl	%ebx,%ecx
-	xorl	%ecx,%esi
-	nop
-	movl	20(%esp),%eax
-	movl	%edi,4(%eax)
-	movl	%esi,(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	CAST_decrypt,.-.L_CAST_decrypt_begin
-.globl	CAST_cbc_encrypt
-.type	CAST_cbc_encrypt, at function
-.align	16
-CAST_cbc_encrypt:
-.L_CAST_cbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	36(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	56(%esp),%ecx
-
-	movl	48(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L002decrypt
-	andl	$4294967288,%ebp
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	jz	.L003encrypt_finish
-.L004encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_CAST_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L004encrypt_loop
-.L003encrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L005finish
-	call	.L006PIC_point
-.L006PIC_point:
-	popl	%edx
-	leal	.L007cbc_enc_jmp_table-.L006PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L008ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L009ej6:
-	movb	5(%esi),%dh
-.L010ej5:
-	movb	4(%esi),%dl
-.L011ej4:
-	movl	(%esi),%ecx
-	jmp	.L012ejend
-.L013ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L014ej2:
-	movb	1(%esi),%ch
-.L015ej1:
-	movb	(%esi),%cl
-.L012ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_CAST_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L005finish
-.L002decrypt:
-	andl	$4294967288,%ebp
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	jz	.L016decrypt_finish
-.L017decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_CAST_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L017decrypt_loop
-.L016decrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L005finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_CAST_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	bswap	%eax
-	bswap	%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L018dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L019dj6:
-	movb	%dh,5(%edi)
-.L020dj5:
-	movb	%dl,4(%edi)
-.L021dj4:
-	movl	%ecx,(%edi)
-	jmp	.L022djend
-.L023dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L024dj2:
-	movb	%ch,1(%esi)
-.L025dj1:
-	movb	%cl,(%esi)
-.L022djend:
-	jmp	.L005finish
-.L005finish:
-	movl	60(%esp),%ecx
-	addl	$24,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L007cbc_enc_jmp_table:
-.long	0
-.long	.L015ej1-.L006PIC_point
-.long	.L014ej2-.L006PIC_point
-.long	.L013ej3-.L006PIC_point
-.long	.L011ej4-.L006PIC_point
-.long	.L010ej5-.L006PIC_point
-.long	.L009ej6-.L006PIC_point
-.long	.L008ej7-.L006PIC_point
-.align	64
-.size	CAST_cbc_encrypt,.-.L_CAST_cbc_encrypt_begin

Added: trunk/secure/lib/libcrypto/i386/cmll-x86.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/cmll-x86.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/cmll-x86.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,4756 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/cmll-x86.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from cmll-x86.pl.
+#ifdef PIC
+.file	"cmll-x86.S"
+.text
+.globl	Camellia_EncryptBlock_Rounds
+.type	Camellia_EncryptBlock_Rounds, at function
+.align	16
+Camellia_EncryptBlock_Rounds:
+.L_Camellia_EncryptBlock_Rounds_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	leal	(%edi,%eax,1),%eax
+	movl	%ebx,20(%esp)
+	movl	%eax,16(%esp)
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L000pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	32(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_EncryptBlock_Rounds,.-.L_Camellia_EncryptBlock_Rounds_begin
+.globl	Camellia_EncryptBlock
+.type	Camellia_EncryptBlock, at function
+.align	16
+Camellia_EncryptBlock:
+.L_Camellia_EncryptBlock_begin:
+	movl	$128,%eax
+	subl	4(%esp),%eax
+	movl	$3,%eax
+	adcl	$0,%eax
+	movl	%eax,4(%esp)
+	jmp	.L_Camellia_EncryptBlock_Rounds_begin
+.size	Camellia_EncryptBlock,.-.L_Camellia_EncryptBlock_begin
+.globl	Camellia_encrypt
+.type	Camellia_encrypt, at function
+.align	16
+Camellia_encrypt:
+.L_Camellia_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	movl	272(%edi),%eax
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	leal	(%edi,%eax,1),%eax
+	movl	%ebx,20(%esp)
+	movl	%eax,16(%esp)
+	call	.L001pic_point
+.L001pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L001pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	24(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_encrypt,.-.L_Camellia_encrypt_begin
+.type	_x86_Camellia_encrypt, at function
+.align	16
+_x86_Camellia_encrypt:
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	16(%edi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+.align	16
+.L002loop:
+	xorl	%esi,%eax
+	xorl	20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	24(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	32(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	40(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	48(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	52(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	56(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	60(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	64(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	addl	$64,%edi
+	cmpl	20(%esp),%edi
+	je	.L003done
+	andl	%eax,%esi
+	movl	16(%esp),%edx
+	roll	$1,%esi
+	movl	%edx,%ecx
+	xorl	%esi,%ebx
+	orl	12(%edi),%ecx
+	movl	%ebx,8(%esp)
+	xorl	12(%esp),%ecx
+	movl	4(%edi),%esi
+	movl	%ecx,12(%esp)
+	orl	%ebx,%esi
+	andl	8(%edi),%ecx
+	xorl	%esi,%eax
+	roll	$1,%ecx
+	movl	%eax,4(%esp)
+	xorl	%ecx,%edx
+	movl	16(%edi),%esi
+	movl	%edx,16(%esp)
+	jmp	.L002loop
+.align	8
+.L003done:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	xorl	%esi,%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.size	_x86_Camellia_encrypt,.-_x86_Camellia_encrypt
+.globl	Camellia_DecryptBlock_Rounds
+.type	Camellia_DecryptBlock_Rounds, at function
+.align	16
+Camellia_DecryptBlock_Rounds:
+.L_Camellia_DecryptBlock_Rounds_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	movl	%edi,16(%esp)
+	leal	(%edi,%eax,1),%edi
+	movl	%ebx,20(%esp)
+	call	.L004pic_point
+.L004pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L004pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	32(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_DecryptBlock_Rounds,.-.L_Camellia_DecryptBlock_Rounds_begin
+.globl	Camellia_DecryptBlock
+.type	Camellia_DecryptBlock, at function
+.align	16
+Camellia_DecryptBlock:
+.L_Camellia_DecryptBlock_begin:
+	movl	$128,%eax
+	subl	4(%esp),%eax
+	movl	$3,%eax
+	adcl	$0,%eax
+	movl	%eax,4(%esp)
+	jmp	.L_Camellia_DecryptBlock_Rounds_begin
+.size	Camellia_DecryptBlock,.-.L_Camellia_DecryptBlock_begin
+.globl	Camellia_decrypt
+.type	Camellia_decrypt, at function
+.align	16
+Camellia_decrypt:
+.L_Camellia_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	movl	272(%edi),%eax
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	movl	%edi,16(%esp)
+	leal	(%edi,%eax,1),%edi
+	movl	%ebx,20(%esp)
+	call	.L005pic_point
+.L005pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L005pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	24(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_decrypt,.-.L_Camellia_decrypt_begin
+.type	_x86_Camellia_decrypt, at function
+.align	16
+_x86_Camellia_decrypt:
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	-8(%edi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+.align	16
+.L006loop:
+	xorl	%esi,%eax
+	xorl	-4(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-16(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-12(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-24(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	-20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-32(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-40(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	-36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-48(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-56(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	subl	$64,%edi
+	cmpl	20(%esp),%edi
+	je	.L007done
+	andl	%eax,%esi
+	movl	16(%esp),%edx
+	roll	$1,%esi
+	movl	%edx,%ecx
+	xorl	%esi,%ebx
+	orl	4(%edi),%ecx
+	movl	%ebx,8(%esp)
+	xorl	12(%esp),%ecx
+	movl	12(%edi),%esi
+	movl	%ecx,12(%esp)
+	orl	%ebx,%esi
+	andl	(%edi),%ecx
+	xorl	%esi,%eax
+	roll	$1,%ecx
+	movl	%eax,4(%esp)
+	xorl	%ecx,%edx
+	movl	-8(%edi),%esi
+	movl	%edx,16(%esp)
+	jmp	.L006loop
+.align	8
+.L007done:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	12(%edi),%edx
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	ret
+.size	_x86_Camellia_decrypt,.-_x86_Camellia_decrypt
+.globl	Camellia_Ekeygen
+.type	Camellia_Ekeygen, at function
+.align	16
+Camellia_Ekeygen:
+.L_Camellia_Ekeygen_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$16,%esp
+	movl	36(%esp),%ebp
+	movl	40(%esp),%esi
+	movl	44(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	cmpl	$128,%ebp
+	je	.L0081st128
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	cmpl	$192,%ebp
+	je	.L0091st192
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	jmp	.L0101st256
+.align	4
+.L0091st192:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	notl	%ecx
+	notl	%edx
+.align	4
+.L0101st256:
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,32(%edi)
+	movl	%ebx,36(%edi)
+	movl	%ecx,40(%edi)
+	movl	%edx,44(%edi)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+.align	4
+.L0081st128:
+	call	.L011pic_point
+.L011pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L011pic_point(%ebp),%ebp
+	leal	.LCamellia_SIGMA-.LCamellia_SBOX(%ebp),%edi
+	movl	(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	4(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	8(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	12(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	16(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	44(%esp),%esi
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	16(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	24(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	32(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	36(%esp),%esi
+	cmpl	$128,%esi
+	jne	.L0122nd256
+	movl	44(%esp),%edi
+	leal	128(%edi),%edi
+	movl	%eax,-112(%edi)
+	movl	%ebx,-108(%edi)
+	movl	%ecx,-104(%edi)
+	movl	%edx,-100(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-80(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-76(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-72(%edi)
+	movl	%edx,-68(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-64(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-60(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-56(%edi)
+	movl	%edx,-52(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-32(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-28(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-16(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-12(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-8(%edi)
+	movl	%edx,-4(%edi)
+	movl	%ebx,%ebp
+	shll	$2,%ebx
+	movl	%ecx,%esi
+	shrl	$30,%esi
+	shll	$2,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$2,%edx
+	movl	%ebx,32(%edi)
+	shrl	$30,%esi
+	orl	%esi,%ecx
+	shrl	$30,%ebp
+	movl	%eax,%esi
+	shrl	$30,%esi
+	movl	%ecx,36(%edi)
+	shll	$2,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,40(%edi)
+	movl	%eax,44(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,64(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,68(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,72(%edi)
+	movl	%eax,76(%edi)
+	movl	-128(%edi),%ebx
+	movl	-124(%edi),%ecx
+	movl	-120(%edi),%edx
+	movl	-116(%edi),%eax
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-96(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-92(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-88(%edi)
+	movl	%eax,-84(%edi)
+	movl	%ebx,%ebp
+	shll	$30,%ebx
+	movl	%ecx,%esi
+	shrl	$2,%esi
+	shll	$30,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$30,%edx
+	movl	%ebx,-48(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ecx
+	shrl	$2,%ebp
+	movl	%eax,%esi
+	shrl	$2,%esi
+	movl	%ecx,-44(%edi)
+	shll	$30,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-40(%edi)
+	movl	%eax,-36(%edi)
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-24(%edi)
+	movl	%eax,-20(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,4(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,8(%edi)
+	movl	%eax,12(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,16(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,20(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,24(%edi)
+	movl	%eax,28(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,48(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,52(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,56(%edi)
+	movl	%eax,60(%edi)
+	movl	$3,%eax
+	jmp	.L013done
+.align	16
+.L0122nd256:
+	movl	44(%esp),%esi
+	movl	%eax,48(%esi)
+	movl	%ebx,52(%esi)
+	movl	%ecx,56(%esi)
+	movl	%edx,60(%esi)
+	xorl	32(%esi),%eax
+	xorl	36(%esi),%ebx
+	xorl	40(%esi),%ecx
+	xorl	44(%esi),%edx
+	movl	32(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	40(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	48(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	44(%esp),%edi
+	leal	128(%edi),%edi
+	movl	%eax,-112(%edi)
+	movl	%ebx,-108(%edi)
+	movl	%ecx,-104(%edi)
+	movl	%edx,-100(%edi)
+	movl	%eax,%ebp
+	shll	$30,%eax
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	shll	$30,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$30,%ecx
+	movl	%eax,-48(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ebx
+	shrl	$2,%ebp
+	movl	%edx,%esi
+	shrl	$2,%esi
+	movl	%ebx,-44(%edi)
+	shll	$30,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-40(%edi)
+	movl	%edx,-36(%edi)
+	movl	%eax,%ebp
+	shll	$30,%eax
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	shll	$30,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$30,%ecx
+	movl	%eax,32(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ebx
+	shrl	$2,%ebp
+	movl	%edx,%esi
+	shrl	$2,%esi
+	movl	%ebx,36(%edi)
+	shll	$30,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,40(%edi)
+	movl	%edx,44(%edi)
+	movl	%ebx,%ebp
+	shll	$19,%ebx
+	movl	%ecx,%esi
+	shrl	$13,%esi
+	shll	$19,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$19,%edx
+	movl	%ebx,128(%edi)
+	shrl	$13,%esi
+	orl	%esi,%ecx
+	shrl	$13,%ebp
+	movl	%eax,%esi
+	shrl	$13,%esi
+	movl	%ecx,132(%edi)
+	shll	$19,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,136(%edi)
+	movl	%eax,140(%edi)
+	movl	-96(%edi),%ebx
+	movl	-92(%edi),%ecx
+	movl	-88(%edi),%edx
+	movl	-84(%edi),%eax
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-96(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-92(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-88(%edi)
+	movl	%eax,-84(%edi)
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-64(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-60(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-56(%edi)
+	movl	%eax,-52(%edi)
+	movl	%ebx,%ebp
+	shll	$30,%ebx
+	movl	%ecx,%esi
+	shrl	$2,%esi
+	shll	$30,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$30,%edx
+	movl	%ebx,16(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ecx
+	shrl	$2,%ebp
+	movl	%eax,%esi
+	shrl	$2,%esi
+	movl	%ecx,20(%edi)
+	shll	$30,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,24(%edi)
+	movl	%eax,28(%edi)
+	movl	%ecx,%ebp
+	shll	$2,%ecx
+	movl	%edx,%esi
+	shrl	$30,%esi
+	shll	$2,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$2,%eax
+	movl	%ecx,80(%edi)
+	shrl	$30,%esi
+	orl	%esi,%edx
+	shrl	$30,%ebp
+	movl	%ebx,%esi
+	shrl	$30,%esi
+	movl	%edx,84(%edi)
+	shll	$2,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,88(%edi)
+	movl	%ebx,92(%edi)
+	movl	-80(%edi),%ecx
+	movl	-76(%edi),%edx
+	movl	-72(%edi),%eax
+	movl	-68(%edi),%ebx
+	movl	%ecx,%ebp
+	shll	$15,%ecx
+	movl	%edx,%esi
+	shrl	$17,%esi
+	shll	$15,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$15,%eax
+	movl	%ecx,-80(%edi)
+	shrl	$17,%esi
+	orl	%esi,%edx
+	shrl	$17,%ebp
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	movl	%edx,-76(%edi)
+	shll	$15,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,-72(%edi)
+	movl	%ebx,-68(%edi)
+	movl	%ecx,%ebp
+	shll	$30,%ecx
+	movl	%edx,%esi
+	shrl	$2,%esi
+	shll	$30,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$30,%eax
+	movl	%ecx,-16(%edi)
+	shrl	$2,%esi
+	orl	%esi,%edx
+	shrl	$2,%ebp
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	movl	%edx,-12(%edi)
+	shll	$30,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,-8(%edi)
+	movl	%ebx,-4(%edi)
+	movl	%edx,64(%edi)
+	movl	%eax,68(%edi)
+	movl	%ebx,72(%edi)
+	movl	%ecx,76(%edi)
+	movl	%edx,%ebp
+	shll	$17,%edx
+	movl	%eax,%esi
+	shrl	$15,%esi
+	shll	$17,%eax
+	orl	%esi,%edx
+	movl	%ebx,%esi
+	shll	$17,%ebx
+	movl	%edx,96(%edi)
+	shrl	$15,%esi
+	orl	%esi,%eax
+	shrl	$15,%ebp
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	movl	%eax,100(%edi)
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	orl	%ebp,%ecx
+	movl	%ebx,104(%edi)
+	movl	%ecx,108(%edi)
+	movl	-128(%edi),%edx
+	movl	-124(%edi),%eax
+	movl	-120(%edi),%ebx
+	movl	-116(%edi),%ecx
+	movl	%eax,%ebp
+	shll	$13,%eax
+	movl	%ebx,%esi
+	shrl	$19,%esi
+	shll	$13,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$13,%ecx
+	movl	%eax,-32(%edi)
+	shrl	$19,%esi
+	orl	%esi,%ebx
+	shrl	$19,%ebp
+	movl	%edx,%esi
+	shrl	$19,%esi
+	movl	%ebx,-28(%edi)
+	shll	$13,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-24(%edi)
+	movl	%edx,-20(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,4(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	%eax,%ebp
+	shll	$17,%eax
+	movl	%ebx,%esi
+	shrl	$15,%esi
+	shll	$17,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$17,%ecx
+	movl	%eax,48(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ebx
+	shrl	$15,%ebp
+	movl	%edx,%esi
+	shrl	$15,%esi
+	movl	%ebx,52(%edi)
+	shll	$17,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,56(%edi)
+	movl	%edx,60(%edi)
+	movl	%ebx,%ebp
+	shll	$2,%ebx
+	movl	%ecx,%esi
+	shrl	$30,%esi
+	shll	$2,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$2,%edx
+	movl	%ebx,112(%edi)
+	shrl	$30,%esi
+	orl	%esi,%ecx
+	shrl	$30,%ebp
+	movl	%eax,%esi
+	shrl	$30,%esi
+	movl	%ecx,116(%edi)
+	shll	$2,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,120(%edi)
+	movl	%eax,124(%edi)
+	movl	$4,%eax
+.L013done:
+	leal	144(%edi),%edx
+	addl	$16,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
+.globl	private_Camellia_set_key
+.type	private_Camellia_set_key, at function
+.align	16
+private_Camellia_set_key:
+.L_private_Camellia_set_key_begin:
+	pushl	%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%edx
+	movl	$-1,%eax
+	testl	%ecx,%ecx
+	jz	.L014done
+	testl	%edx,%edx
+	jz	.L014done
+	movl	$-2,%eax
+	cmpl	$256,%ebx
+	je	.L015arg_ok
+	cmpl	$192,%ebx
+	je	.L015arg_ok
+	cmpl	$128,%ebx
+	jne	.L014done
+.align	4
+.L015arg_ok:
+	pushl	%edx
+	pushl	%ecx
+	pushl	%ebx
+	call	.L_Camellia_Ekeygen_begin
+	addl	$12,%esp
+	movl	%eax,(%edx)
+	xorl	%eax,%eax
+.align	4
+.L014done:
+	popl	%ebx
+	ret
+.size	private_Camellia_set_key,.-.L_private_Camellia_set_key_begin
+.align	64
+.LCamellia_SIGMA:
+.long	2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
+.align	64
+.LCamellia_SBOX:
+.long	1886416896,1886388336
+.long	2189591040,741081132
+.long	741092352,3014852787
+.long	3974949888,3233808576
+.long	3014898432,3840147684
+.long	656877312,1465319511
+.long	3233857536,3941204202
+.long	3857048832,2930639022
+.long	3840205824,589496355
+.long	2240120064,1802174571
+.long	1465341696,1162149957
+.long	892679424,2779054245
+.long	3941263872,3991732461
+.long	202116096,1330577487
+.long	2930683392,488439837
+.long	1094795520,2459041938
+.long	589505280,2256928902
+.long	4025478912,2947481775
+.long	1802201856,2088501372
+.long	2475922176,522125343
+.long	1162167552,1044250686
+.long	421075200,3705405660
+.long	2779096320,1583218782
+.long	555819264,185270283
+.long	3991792896,2795896998
+.long	235802112,960036921
+.long	1330597632,3587506389
+.long	1313754624,1566376029
+.long	488447232,3654877401
+.long	1701143808,1515847770
+.long	2459079168,1364262993
+.long	3183328512,1819017324
+.long	2256963072,2341142667
+.long	3099113472,2593783962
+.long	2947526400,4227531003
+.long	2408550144,2964324528
+.long	2088532992,1953759348
+.long	3958106880,724238379
+.long	522133248,4042260720
+.long	3469659648,2223243396
+.long	1044266496,3755933919
+.long	808464384,3419078859
+.long	3705461760,875823156
+.long	1600085760,1987444854
+.long	1583242752,1835860077
+.long	3318072576,2846425257
+.long	185273088,3520135377
+.long	437918208,67371012
+.long	2795939328,336855060
+.long	3789676800,976879674
+.long	960051456,3739091166
+.long	3402287616,286326801
+.long	3587560704,842137650
+.long	1195853568,2627469468
+.long	1566399744,1397948499
+.long	1027423488,4075946226
+.long	3654932736,4278059262
+.long	16843008,3486449871
+.long	1515870720,3284336835
+.long	3604403712,2054815866
+.long	1364283648,606339108
+.long	1448498688,3907518696
+.long	1819044864,1616904288
+.long	1296911616,1768489065
+.long	2341178112,2863268010
+.long	218959104,2694840480
+.long	2593823232,2711683233
+.long	1717986816,1650589794
+.long	4227595008,1414791252
+.long	3435973632,505282590
+.long	2964369408,3772776672
+.long	757935360,1684275300
+.long	1953788928,269484048
+.long	303174144,0
+.long	724249344,2745368739
+.long	538976256,1970602101
+.long	4042321920,2324299914
+.long	2981212416,3873833190
+.long	2223277056,151584777
+.long	2576980224,3722248413
+.long	3755990784,2273771655
+.long	1280068608,2206400643
+.long	3419130624,3452764365
+.long	3267543552,2425356432
+.long	875836416,1936916595
+.long	2122219008,4143317238
+.long	1987474944,2644312221
+.long	84215040,3216965823
+.long	1835887872,1381105746
+.long	3082270464,3638034648
+.long	2846468352,3368550600
+.long	825307392,3334865094
+.long	3520188672,2172715137
+.long	387389184,1869545583
+.long	67372032,320012307
+.long	3621246720,1667432547
+.long	336860160,3924361449
+.long	1482184704,2812739751
+.long	976894464,2677997727
+.long	1633771776,3166437564
+.long	3739147776,690552873
+.long	454761216,4193845497
+.long	286331136,791609391
+.long	471604224,3031695540
+.long	842150400,2021130360
+.long	252645120,101056518
+.long	2627509248,3890675943
+.long	370546176,1903231089
+.long	1397969664,3570663636
+.long	404232192,2880110763
+.long	4076007936,2290614408
+.long	572662272,2374828173
+.long	4278124032,1920073842
+.long	1145324544,3115909305
+.long	3486502656,4177002744
+.long	2998055424,2896953516
+.long	3284386560,909508662
+.long	3048584448,707395626
+.long	2054846976,1010565180
+.long	2442236160,4059103473
+.long	606348288,1077936192
+.long	134744064,3553820883
+.long	3907577856,3149594811
+.long	2829625344,1128464451
+.long	1616928768,353697813
+.long	4244438016,2913796269
+.long	1768515840,2004287607
+.long	1347440640,2155872384
+.long	2863311360,2189557890
+.long	3503345664,3974889708
+.long	2694881280,656867367
+.long	2105376000,3856990437
+.long	2711724288,2240086149
+.long	2307492096,892665909
+.long	1650614784,202113036
+.long	2543294208,1094778945
+.long	1414812672,4025417967
+.long	1532713728,2475884691
+.long	505290240,421068825
+.long	2509608192,555810849
+.long	3772833792,235798542
+.long	4294967040,1313734734
+.long	1684300800,1701118053
+.long	3537031680,3183280317
+.long	269488128,3099066552
+.long	3301229568,2408513679
+.long	0,3958046955
+.long	1212696576,3469607118
+.long	2745410304,808452144
+.long	4160222976,1600061535
+.long	1970631936,3318022341
+.long	3688618752,437911578
+.long	2324335104,3789619425
+.long	50529024,3402236106
+.long	3873891840,1195835463
+.long	3671775744,1027407933
+.long	151587072,16842753
+.long	1061109504,3604349142
+.long	3722304768,1448476758
+.long	2492765184,1296891981
+.long	2273806080,218955789
+.long	1549556736,1717960806
+.long	2206434048,3435921612
+.long	33686016,757923885
+.long	3452816640,303169554
+.long	1246382592,538968096
+.long	2425393152,2981167281
+.long	858993408,2576941209
+.long	1936945920,1280049228
+.long	1734829824,3267494082
+.long	4143379968,2122186878
+.long	4092850944,84213765
+.long	2644352256,3082223799
+.long	2139062016,825294897
+.long	3217014528,387383319
+.long	3806519808,3621191895
+.long	1381126656,1482162264
+.long	2610666240,1633747041
+.long	3638089728,454754331
+.long	640034304,471597084
+.long	3368601600,252641295
+.long	926365440,370540566
+.long	3334915584,404226072
+.long	993737472,572653602
+.long	2172748032,1145307204
+.long	2526451200,2998010034
+.long	1869573888,3048538293
+.long	1263225600,2442199185
+.long	320017152,134742024
+.long	3200171520,2829582504
+.long	1667457792,4244373756
+.long	774778368,1347420240
+.long	3924420864,3503292624
+.long	2038003968,2105344125
+.long	2812782336,2307457161
+.long	2358021120,2543255703
+.long	2678038272,1532690523
+.long	1852730880,2509570197
+.long	3166485504,4294902015
+.long	2391707136,3536978130
+.long	690563328,3301179588
+.long	4126536960,1212678216
+.long	4193908992,4160159991
+.long	3065427456,3688562907
+.long	791621376,50528259
+.long	4261281024,3671720154
+.long	3031741440,1061093439
+.long	1499027712,2492727444
+.long	2021160960,1549533276
+.long	2560137216,33685506
+.long	101058048,1246363722
+.long	1785358848,858980403
+.long	3890734848,1734803559
+.long	1179010560,4092788979
+.long	1903259904,2139029631
+.long	3132799488,3806462178
+.long	3570717696,2610626715
+.long	623191296,640024614
+.long	2880154368,926351415
+.long	1111638528,993722427
+.long	2290649088,2526412950
+.long	2728567296,1263206475
+.long	2374864128,3200123070
+.long	4210752000,774766638
+.long	1920102912,2037973113
+.long	117901056,2357985420
+.long	3115956480,1852702830
+.long	1431655680,2391670926
+.long	4177065984,4126474485
+.long	4008635904,3065381046
+.long	2896997376,4261216509
+.long	168430080,1499005017
+.long	909522432,2560098456
+.long	1229539584,1785331818
+.long	707406336,1178992710
+.long	1751672832,3132752058
+.long	1010580480,623181861
+.long	943208448,1111621698
+.long	4059164928,2728525986
+.long	2762253312,4210688250
+.long	1077952512,117899271
+.long	673720320,1431634005
+.long	3553874688,4008575214
+.long	2071689984,168427530
+.long	3149642496,1229520969
+.long	3385444608,1751646312
+.long	1128481536,943194168
+.long	3250700544,2762211492
+.long	353703168,673710120
+.long	3823362816,2071658619
+.long	2913840384,3385393353
+.long	4109693952,3250651329
+.long	2004317952,3823304931
+.long	3351758592,4109631732
+.long	2155905024,3351707847
+.long	2661195264,2661154974
+.long	14737632,939538488
+.long	328965,1090535745
+.long	5789784,369104406
+.long	14277081,1979741814
+.long	6776679,3640711641
+.long	5131854,2466288531
+.long	8487297,1610637408
+.long	13355979,4060148466
+.long	13224393,1912631922
+.long	723723,3254829762
+.long	11447982,2868947883
+.long	6974058,2583730842
+.long	14013909,1962964341
+.long	1579032,100664838
+.long	6118749,1459640151
+.long	8553090,2684395680
+.long	4605510,2432733585
+.long	14671839,4144035831
+.long	14079702,3036722613
+.long	2565927,3372272073
+.long	9079434,2717950626
+.long	3289650,2348846220
+.long	4934475,3523269330
+.long	4342338,2415956112
+.long	14408667,4127258358
+.long	1842204,117442311
+.long	10395294,2801837991
+.long	10263708,654321447
+.long	3815994,2382401166
+.long	13290186,2986390194
+.long	2434341,1224755529
+.long	8092539,3724599006
+.long	855309,1124090691
+.long	7434609,1543527516
+.long	6250335,3607156695
+.long	2039583,3338717127
+.long	16316664,1040203326
+.long	14145495,4110480885
+.long	4079166,2399178639
+.long	10329501,1728079719
+.long	8158332,520101663
+.long	6316128,402659352
+.long	12171705,1845522030
+.long	12500670,2936057775
+.long	12369084,788541231
+.long	9145227,3791708898
+.long	1447446,2231403909
+.long	3421236,218107149
+.long	5066061,1392530259
+.long	12829635,4026593520
+.long	7500402,2617285788
+.long	9803157,1694524773
+.long	11250603,3925928682
+.long	9342606,2734728099
+.long	12237498,2919280302
+.long	8026746,2650840734
+.long	11776947,3959483628
+.long	131586,2147516544
+.long	11842740,754986285
+.long	11382189,1795189611
+.long	10658466,2818615464
+.long	11316396,721431339
+.long	14211288,905983542
+.long	10132122,2785060518
+.long	1513239,3305162181
+.long	1710618,2248181382
+.long	3487029,1291865421
+.long	13421772,855651123
+.long	16250871,4244700669
+.long	10066329,1711302246
+.long	6381921,1476417624
+.long	5921370,2516620950
+.long	15263976,973093434
+.long	2368548,150997257
+.long	5658198,2499843477
+.long	4210752,268439568
+.long	14803425,2013296760
+.long	6513507,3623934168
+.long	592137,1107313218
+.long	3355443,3422604492
+.long	12566463,4009816047
+.long	10000536,637543974
+.long	9934743,3842041317
+.long	8750469,1627414881
+.long	6842472,436214298
+.long	16579836,1056980799
+.long	15527148,989870907
+.long	657930,2181071490
+.long	14342874,3053500086
+.long	7303023,3674266587
+.long	5460819,3556824276
+.long	6447714,2550175896
+.long	10724259,3892373736
+.long	3026478,2332068747
+.long	526344,33554946
+.long	11513775,3942706155
+.long	2631720,167774730
+.long	11579568,738208812
+.long	7631988,486546717
+.long	12763842,2952835248
+.long	12434877,1862299503
+.long	3552822,2365623693
+.long	2236962,2281736328
+.long	3684408,234884622
+.long	6579300,419436825
+.long	1973790,2264958855
+.long	3750201,1308642894
+.long	2894892,184552203
+.long	10921638,2835392937
+.long	3158064,201329676
+.long	15066597,2030074233
+.long	4473924,285217041
+.long	16645629,2130739071
+.long	8947848,570434082
+.long	10461087,3875596263
+.long	6645093,1493195097
+.long	8882055,3774931425
+.long	7039851,3657489114
+.long	16053492,1023425853
+.long	2302755,3355494600
+.long	4737096,301994514
+.long	1052688,67109892
+.long	13750737,1946186868
+.long	5329233,1409307732
+.long	12632256,805318704
+.long	16382457,2113961598
+.long	13816530,3019945140
+.long	10526880,671098920
+.long	5592405,1426085205
+.long	10592673,1744857192
+.long	4276545,1342197840
+.long	16448250,3187719870
+.long	4408131,3489714384
+.long	1250067,3288384708
+.long	12895428,822096177
+.long	3092271,3405827019
+.long	11053224,704653866
+.long	11974326,2902502829
+.long	3947580,251662095
+.long	2829099,3389049546
+.long	12698049,1879076976
+.long	16777215,4278255615
+.long	13158600,838873650
+.long	10855845,1761634665
+.long	2105376,134219784
+.long	9013641,1644192354
+.long	0,0
+.long	9474192,603989028
+.long	4671303,3506491857
+.long	15724527,4211145723
+.long	15395562,3120609978
+.long	12040119,3976261101
+.long	1381653,1157645637
+.long	394758,2164294017
+.long	13487565,1929409395
+.long	11908533,1828744557
+.long	1184274,2214626436
+.long	8289918,2667618207
+.long	12303291,3993038574
+.long	2697513,1241533002
+.long	986895,3271607235
+.long	12105912,771763758
+.long	460551,3238052289
+.long	263172,16777473
+.long	10197915,3858818790
+.long	9737364,620766501
+.long	2171169,1207978056
+.long	6710886,2566953369
+.long	15132390,3103832505
+.long	13553358,3003167667
+.long	15592941,2063629179
+.long	15198183,4177590777
+.long	3881787,3456159438
+.long	16711422,3204497343
+.long	8355711,3741376479
+.long	12961221,1895854449
+.long	10790052,687876393
+.long	3618615,3439381965
+.long	11645361,1811967084
+.long	5000268,318771987
+.long	9539985,1677747300
+.long	7237230,2600508315
+.long	9276813,1660969827
+.long	7763574,2634063261
+.long	197379,3221274816
+.long	2960685,1258310475
+.long	14606046,3070277559
+.long	9868950,2768283045
+.long	2500134,2298513801
+.long	8224125,1593859935
+.long	13027014,2969612721
+.long	6052956,385881879
+.long	13882323,4093703412
+.long	15921906,3154164924
+.long	5197647,3540046803
+.long	1644825,1174423110
+.long	4144959,3472936911
+.long	14474460,922761015
+.long	7960953,1577082462
+.long	1907997,1191200583
+.long	5395026,2483066004
+.long	15461355,4194368250
+.long	15987699,4227923196
+.long	7171437,1526750043
+.long	6184542,2533398423
+.long	16514043,4261478142
+.long	6908265,1509972570
+.long	11711154,2885725356
+.long	15790320,1006648380
+.long	3223857,1275087948
+.long	789516,50332419
+.long	13948116,889206069
+.long	13619151,4076925939
+.long	9211020,587211555
+.long	14869218,3087055032
+.long	7697781,1560304989
+.long	11119017,1778412138
+.long	4868682,2449511058
+.long	5723991,3573601749
+.long	8684676,553656609
+.long	1118481,1140868164
+.long	4539717,1358975313
+.long	1776411,3321939654
+.long	16119285,2097184125
+.long	15000804,956315961
+.long	921102,2197848963
+.long	7566195,3691044060
+.long	11184810,2852170410
+.long	15856113,2080406652
+.long	14540253,1996519287
+.long	5855577,1442862678
+.long	1315860,83887365
+.long	7105644,452991771
+.long	9605778,2751505572
+.long	5526612,352326933
+.long	13684944,872428596
+.long	7895160,503324190
+.long	7368816,469769244
+.long	14935011,4160813304
+.long	4802889,1375752786
+.long	8421504,536879136
+.long	5263440,335549460
+.long	10987431,3909151209
+.long	16185078,3170942397
+.long	7829367,3707821533
+.long	9671571,3825263844
+.long	8816262,2701173153
+.long	8618883,3758153952
+.long	2763306,2315291274
+.long	13092807,4043370993
+.long	5987163,3590379222
+.long	15329769,2046851706
+.long	15658734,3137387451
+.long	9408399,3808486371
+.long	65793,1073758272
+.long	4013373,1325420367
+.globl	Camellia_cbc_encrypt
+.type	Camellia_cbc_encrypt, at function
+.align	16
+Camellia_cbc_encrypt:
+.L_Camellia_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	.L016enc_out
+	pushfl
+	cld
+	movl	24(%esp),%eax
+	movl	28(%esp),%ebx
+	movl	36(%esp),%edx
+	movl	40(%esp),%ebp
+	leal	-64(%esp),%esi
+	andl	$-64,%esi
+	leal	-127(%edx),%edi
+	subl	%esi,%edi
+	negl	%edi
+	andl	$960,%edi
+	subl	%edi,%esi
+	movl	44(%esp),%edi
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%esi,20(%esp)
+	movl	%eax,24(%esp)
+	movl	%ebx,28(%esp)
+	movl	%ecx,32(%esp)
+	movl	%edx,36(%esp)
+	movl	%ebp,40(%esp)
+	call	.L017pic_point
+.L017pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L017pic_point(%ebp),%ebp
+	movl	$32,%esi
+.align	4
+.L018prefetch_sbox:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	leal	128(%ebp),%ebp
+	decl	%esi
+	jnz	.L018prefetch_sbox
+	movl	36(%esp),%eax
+	subl	$4096,%ebp
+	movl	24(%esp),%esi
+	movl	272(%eax),%edx
+	cmpl	$0,%edi
+	je	.L019DECRYPT
+	movl	32(%esp),%ecx
+	movl	40(%esp),%edi
+	shll	$6,%edx
+	leal	(%eax,%edx,1),%edx
+	movl	%edx,16(%esp)
+	testl	$4294967280,%ecx
+	jz	.L020enc_tail
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4
+.L021enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	bswap	%eax
+	xorl	12(%esi),%edx
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	movl	%eax,(%edi)
+	bswap	%edx
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,28(%esp)
+	subl	$16,%ecx
+	testl	$4294967280,%ecx
+	movl	%ecx,32(%esp)
+	jnz	.L021enc_loop
+	testl	$15,%ecx
+	jnz	.L020enc_tail
+	movl	40(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	20(%esp),%esp
+	popfl
+.L016enc_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4
+.L020enc_tail:
+	movl	%edi,%eax
+	movl	28(%esp),%edi
+	pushl	%eax
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	.L022enc_in_place
+.align	4
+.long	2767451785
+	jmp	.L023enc_skip_in_place
+.L022enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+.L023enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2868115081
+	popl	%edi
+	movl	28(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,32(%esp)
+	jmp	.L021enc_loop
+.align	16
+.L019DECRYPT:
+	shll	$6,%edx
+	leal	(%eax,%edx,1),%edx
+	movl	%eax,16(%esp)
+	movl	%edx,36(%esp)
+	cmpl	28(%esp),%esi
+	je	.L024dec_in_place
+	movl	40(%esp),%edi
+	movl	%edi,44(%esp)
+.align	4
+.L025dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	44(%esp),%edi
+	movl	32(%esp),%esi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	xorl	(%edi),%eax
+	bswap	%edx
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	.L026dec_partial
+	movl	%esi,32(%esp)
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	%esi,44(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,28(%esp)
+	jnz	.L025dec_loop
+	movl	44(%esp),%edi
+.L027dec_end:
+	movl	40(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	.L028dec_out
+.align	4
+.L026dec_partial:
+	leal	44(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%ecx
+	movl	%edi,%esi
+	movl	28(%esp),%edi
+.long	2767451785
+	movl	24(%esp),%edi
+	jmp	.L027dec_end
+.align	4
+.L024dec_in_place:
+.L029dec_in_place_loop:
+	leal	44(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	bswap	%eax
+	movl	%edx,12(%edi)
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	40(%esp),%edi
+	movl	28(%esp),%esi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	xorl	(%edi),%eax
+	bswap	%edx
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,28(%esp)
+	leal	44(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	24(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	movl	32(%esp),%ecx
+	subl	$16,%ecx
+	jc	.L030dec_in_place_partial
+	movl	%ecx,32(%esp)
+	jnz	.L029dec_in_place_loop
+	jmp	.L028dec_out
+.align	4
+.L030dec_in_place_partial:
+	movl	28(%esp),%edi
+	leal	44(%esp),%esi
+	leal	(%edi,%ecx,1),%edi
+	leal	16(%esi,%ecx,1),%esi
+	negl	%ecx
+.long	2767451785
+.align	4
+.L028dec_out:
+	movl	20(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_cbc_encrypt,.-.L_Camellia_cbc_encrypt_begin
+.byte	67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte	115,108,46,111,114,103,62,0
+#else
+.file	"cmll-x86.S"
+.text
+.globl	Camellia_EncryptBlock_Rounds
+.type	Camellia_EncryptBlock_Rounds, at function
+.align	16
+Camellia_EncryptBlock_Rounds:
+.L_Camellia_EncryptBlock_Rounds_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	leal	(%edi,%eax,1),%eax
+	movl	%ebx,20(%esp)
+	movl	%eax,16(%esp)
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L000pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	32(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_EncryptBlock_Rounds,.-.L_Camellia_EncryptBlock_Rounds_begin
+.globl	Camellia_EncryptBlock
+.type	Camellia_EncryptBlock, at function
+.align	16
+Camellia_EncryptBlock:
+.L_Camellia_EncryptBlock_begin:
+	movl	$128,%eax
+	subl	4(%esp),%eax
+	movl	$3,%eax
+	adcl	$0,%eax
+	movl	%eax,4(%esp)
+	jmp	.L_Camellia_EncryptBlock_Rounds_begin
+.size	Camellia_EncryptBlock,.-.L_Camellia_EncryptBlock_begin
+.globl	Camellia_encrypt
+.type	Camellia_encrypt, at function
+.align	16
+Camellia_encrypt:
+.L_Camellia_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	movl	272(%edi),%eax
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	leal	(%edi,%eax,1),%eax
+	movl	%ebx,20(%esp)
+	movl	%eax,16(%esp)
+	call	.L001pic_point
+.L001pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L001pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	24(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_encrypt,.-.L_Camellia_encrypt_begin
+.type	_x86_Camellia_encrypt, at function
+.align	16
+_x86_Camellia_encrypt:
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	16(%edi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+.align	16
+.L002loop:
+	xorl	%esi,%eax
+	xorl	20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	24(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	32(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	40(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	48(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	52(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	56(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	60(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	64(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	addl	$64,%edi
+	cmpl	20(%esp),%edi
+	je	.L003done
+	andl	%eax,%esi
+	movl	16(%esp),%edx
+	roll	$1,%esi
+	movl	%edx,%ecx
+	xorl	%esi,%ebx
+	orl	12(%edi),%ecx
+	movl	%ebx,8(%esp)
+	xorl	12(%esp),%ecx
+	movl	4(%edi),%esi
+	movl	%ecx,12(%esp)
+	orl	%ebx,%esi
+	andl	8(%edi),%ecx
+	xorl	%esi,%eax
+	roll	$1,%ecx
+	movl	%eax,4(%esp)
+	xorl	%ecx,%edx
+	movl	16(%edi),%esi
+	movl	%edx,16(%esp)
+	jmp	.L002loop
+.align	8
+.L003done:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	xorl	%esi,%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	ret
+.size	_x86_Camellia_encrypt,.-_x86_Camellia_encrypt
+.globl	Camellia_DecryptBlock_Rounds
+.type	Camellia_DecryptBlock_Rounds, at function
+.align	16
+Camellia_DecryptBlock_Rounds:
+.L_Camellia_DecryptBlock_Rounds_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	movl	%edi,16(%esp)
+	leal	(%edi,%eax,1),%edi
+	movl	%ebx,20(%esp)
+	call	.L004pic_point
+.L004pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L004pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	32(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_DecryptBlock_Rounds,.-.L_Camellia_DecryptBlock_Rounds_begin
+.globl	Camellia_DecryptBlock
+.type	Camellia_DecryptBlock, at function
+.align	16
+Camellia_DecryptBlock:
+.L_Camellia_DecryptBlock_begin:
+	movl	$128,%eax
+	subl	4(%esp),%eax
+	movl	$3,%eax
+	adcl	$0,%eax
+	movl	%eax,4(%esp)
+	jmp	.L_Camellia_DecryptBlock_Rounds_begin
+.size	Camellia_DecryptBlock,.-.L_Camellia_DecryptBlock_begin
+.globl	Camellia_decrypt
+.type	Camellia_decrypt, at function
+.align	16
+Camellia_decrypt:
+.L_Camellia_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%esp,%ebx
+	subl	$28,%esp
+	andl	$-64,%esp
+	movl	272(%edi),%eax
+	leal	-127(%edi),%ecx
+	subl	%esp,%ecx
+	negl	%ecx
+	andl	$960,%ecx
+	subl	%ecx,%esp
+	addl	$4,%esp
+	shll	$6,%eax
+	movl	%edi,16(%esp)
+	leal	(%edi,%eax,1),%edi
+	movl	%ebx,20(%esp)
+	call	.L005pic_point
+.L005pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L005pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	20(%esp),%esp
+	bswap	%eax
+	movl	24(%esp),%esi
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_decrypt,.-.L_Camellia_decrypt_begin
+.type	_x86_Camellia_decrypt, at function
+.align	16
+_x86_Camellia_decrypt:
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	-8(%edi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+.align	16
+.L006loop:
+	xorl	%esi,%eax
+	xorl	-4(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-16(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-12(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-24(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	-20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-32(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-40(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	xorl	%esi,%eax
+	xorl	-36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	16(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	12(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	-48(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,16(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,12(%esp)
+	xorl	%esi,%ecx
+	xorl	-44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	-56(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,8(%esp)
+	xorl	%edx,%eax
+	movl	%eax,4(%esp)
+	subl	$64,%edi
+	cmpl	20(%esp),%edi
+	je	.L007done
+	andl	%eax,%esi
+	movl	16(%esp),%edx
+	roll	$1,%esi
+	movl	%edx,%ecx
+	xorl	%esi,%ebx
+	orl	4(%edi),%ecx
+	movl	%ebx,8(%esp)
+	xorl	12(%esp),%ecx
+	movl	12(%edi),%esi
+	movl	%ecx,12(%esp)
+	orl	%ebx,%esi
+	andl	(%edi),%ecx
+	xorl	%esi,%eax
+	roll	$1,%ecx
+	movl	%eax,4(%esp)
+	xorl	%ecx,%edx
+	movl	-8(%edi),%esi
+	movl	%edx,16(%esp)
+	jmp	.L006loop
+.align	8
+.L007done:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	12(%edi),%edx
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	ret
+.size	_x86_Camellia_decrypt,.-_x86_Camellia_decrypt
+.globl	Camellia_Ekeygen
+.type	Camellia_Ekeygen, at function
+.align	16
+Camellia_Ekeygen:
+.L_Camellia_Ekeygen_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$16,%esp
+	movl	36(%esp),%ebp
+	movl	40(%esp),%esi
+	movl	44(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	cmpl	$128,%ebp
+	je	.L0081st128
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	cmpl	$192,%ebp
+	je	.L0091st192
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	jmp	.L0101st256
+.align	4
+.L0091st192:
+	movl	%eax,%ecx
+	movl	%ebx,%edx
+	notl	%ecx
+	notl	%edx
+.align	4
+.L0101st256:
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,32(%edi)
+	movl	%ebx,36(%edi)
+	movl	%ecx,40(%edi)
+	movl	%edx,44(%edi)
+	xorl	(%edi),%eax
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+.align	4
+.L0081st128:
+	call	.L011pic_point
+.L011pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L011pic_point(%ebp),%ebp
+	leal	.LCamellia_SIGMA-.LCamellia_SBOX(%ebp),%edi
+	movl	(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	4(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	8(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	12(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	16(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	44(%esp),%esi
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	xorl	12(%esi),%edx
+	movl	16(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	20(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	24(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	28(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	32(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	36(%esp),%esi
+	cmpl	$128,%esi
+	jne	.L0122nd256
+	movl	44(%esp),%edi
+	leal	128(%edi),%edi
+	movl	%eax,-112(%edi)
+	movl	%ebx,-108(%edi)
+	movl	%ecx,-104(%edi)
+	movl	%edx,-100(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-80(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-76(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-72(%edi)
+	movl	%edx,-68(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-64(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-60(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-56(%edi)
+	movl	%edx,-52(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-32(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-28(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,-16(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,-12(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-8(%edi)
+	movl	%edx,-4(%edi)
+	movl	%ebx,%ebp
+	shll	$2,%ebx
+	movl	%ecx,%esi
+	shrl	$30,%esi
+	shll	$2,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$2,%edx
+	movl	%ebx,32(%edi)
+	shrl	$30,%esi
+	orl	%esi,%ecx
+	shrl	$30,%ebp
+	movl	%eax,%esi
+	shrl	$30,%esi
+	movl	%ecx,36(%edi)
+	shll	$2,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,40(%edi)
+	movl	%eax,44(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,64(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,68(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,72(%edi)
+	movl	%eax,76(%edi)
+	movl	-128(%edi),%ebx
+	movl	-124(%edi),%ecx
+	movl	-120(%edi),%edx
+	movl	-116(%edi),%eax
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-96(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-92(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-88(%edi)
+	movl	%eax,-84(%edi)
+	movl	%ebx,%ebp
+	shll	$30,%ebx
+	movl	%ecx,%esi
+	shrl	$2,%esi
+	shll	$30,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$30,%edx
+	movl	%ebx,-48(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ecx
+	shrl	$2,%ebp
+	movl	%eax,%esi
+	shrl	$2,%esi
+	movl	%ecx,-44(%edi)
+	shll	$30,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-40(%edi)
+	movl	%eax,-36(%edi)
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-24(%edi)
+	movl	%eax,-20(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,4(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,8(%edi)
+	movl	%eax,12(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,16(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,20(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,24(%edi)
+	movl	%eax,28(%edi)
+	movl	%ebx,%ebp
+	shll	$17,%ebx
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$17,%edx
+	movl	%ebx,48(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ecx
+	shrl	$15,%ebp
+	movl	%eax,%esi
+	shrl	$15,%esi
+	movl	%ecx,52(%edi)
+	shll	$17,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,56(%edi)
+	movl	%eax,60(%edi)
+	movl	$3,%eax
+	jmp	.L013done
+.align	16
+.L0122nd256:
+	movl	44(%esp),%esi
+	movl	%eax,48(%esi)
+	movl	%ebx,52(%esi)
+	movl	%ecx,56(%esi)
+	movl	%edx,60(%esi)
+	xorl	32(%esi),%eax
+	xorl	36(%esi),%ebx
+	xorl	40(%esi),%ecx
+	xorl	44(%esi),%edx
+	movl	32(%edi),%esi
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	xorl	%esi,%eax
+	xorl	36(%edi),%ebx
+	movzbl	%ah,%esi
+	movl	2052(%ebp,%esi,8),%edx
+	movzbl	%al,%esi
+	xorl	4(%ebp,%esi,8),%edx
+	shrl	$16,%eax
+	movzbl	%bl,%esi
+	movl	(%ebp,%esi,8),%ecx
+	movzbl	%ah,%esi
+	xorl	(%ebp,%esi,8),%edx
+	movzbl	%bh,%esi
+	xorl	4(%ebp,%esi,8),%ecx
+	shrl	$16,%ebx
+	movzbl	%al,%eax
+	xorl	2048(%ebp,%eax,8),%edx
+	movzbl	%bh,%esi
+	movl	12(%esp),%eax
+	xorl	%edx,%ecx
+	rorl	$8,%edx
+	xorl	2048(%ebp,%esi,8),%ecx
+	movzbl	%bl,%esi
+	movl	8(%esp),%ebx
+	xorl	%eax,%edx
+	xorl	2052(%ebp,%esi,8),%ecx
+	movl	40(%edi),%esi
+	xorl	%ecx,%edx
+	movl	%edx,12(%esp)
+	xorl	%ebx,%ecx
+	movl	%ecx,8(%esp)
+	xorl	%esi,%ecx
+	xorl	44(%edi),%edx
+	movzbl	%ch,%esi
+	movl	2052(%ebp,%esi,8),%ebx
+	movzbl	%cl,%esi
+	xorl	4(%ebp,%esi,8),%ebx
+	shrl	$16,%ecx
+	movzbl	%dl,%esi
+	movl	(%ebp,%esi,8),%eax
+	movzbl	%ch,%esi
+	xorl	(%ebp,%esi,8),%ebx
+	movzbl	%dh,%esi
+	xorl	4(%ebp,%esi,8),%eax
+	shrl	$16,%edx
+	movzbl	%cl,%ecx
+	xorl	2048(%ebp,%ecx,8),%ebx
+	movzbl	%dh,%esi
+	movl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	rorl	$8,%ebx
+	xorl	2048(%ebp,%esi,8),%eax
+	movzbl	%dl,%esi
+	movl	(%esp),%edx
+	xorl	%ecx,%ebx
+	xorl	2052(%ebp,%esi,8),%eax
+	movl	48(%edi),%esi
+	xorl	%eax,%ebx
+	movl	%ebx,4(%esp)
+	xorl	%edx,%eax
+	movl	%eax,(%esp)
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	44(%esp),%edi
+	leal	128(%edi),%edi
+	movl	%eax,-112(%edi)
+	movl	%ebx,-108(%edi)
+	movl	%ecx,-104(%edi)
+	movl	%edx,-100(%edi)
+	movl	%eax,%ebp
+	shll	$30,%eax
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	shll	$30,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$30,%ecx
+	movl	%eax,-48(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ebx
+	shrl	$2,%ebp
+	movl	%edx,%esi
+	shrl	$2,%esi
+	movl	%ebx,-44(%edi)
+	shll	$30,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-40(%edi)
+	movl	%edx,-36(%edi)
+	movl	%eax,%ebp
+	shll	$30,%eax
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	shll	$30,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$30,%ecx
+	movl	%eax,32(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ebx
+	shrl	$2,%ebp
+	movl	%edx,%esi
+	shrl	$2,%esi
+	movl	%ebx,36(%edi)
+	shll	$30,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,40(%edi)
+	movl	%edx,44(%edi)
+	movl	%ebx,%ebp
+	shll	$19,%ebx
+	movl	%ecx,%esi
+	shrl	$13,%esi
+	shll	$19,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$19,%edx
+	movl	%ebx,128(%edi)
+	shrl	$13,%esi
+	orl	%esi,%ecx
+	shrl	$13,%ebp
+	movl	%eax,%esi
+	shrl	$13,%esi
+	movl	%ecx,132(%edi)
+	shll	$19,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,136(%edi)
+	movl	%eax,140(%edi)
+	movl	-96(%edi),%ebx
+	movl	-92(%edi),%ecx
+	movl	-88(%edi),%edx
+	movl	-84(%edi),%eax
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-96(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-92(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-88(%edi)
+	movl	%eax,-84(%edi)
+	movl	%ebx,%ebp
+	shll	$15,%ebx
+	movl	%ecx,%esi
+	shrl	$17,%esi
+	shll	$15,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$15,%edx
+	movl	%ebx,-64(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ecx
+	shrl	$17,%ebp
+	movl	%eax,%esi
+	shrl	$17,%esi
+	movl	%ecx,-60(%edi)
+	shll	$15,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,-56(%edi)
+	movl	%eax,-52(%edi)
+	movl	%ebx,%ebp
+	shll	$30,%ebx
+	movl	%ecx,%esi
+	shrl	$2,%esi
+	shll	$30,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$30,%edx
+	movl	%ebx,16(%edi)
+	shrl	$2,%esi
+	orl	%esi,%ecx
+	shrl	$2,%ebp
+	movl	%eax,%esi
+	shrl	$2,%esi
+	movl	%ecx,20(%edi)
+	shll	$30,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,24(%edi)
+	movl	%eax,28(%edi)
+	movl	%ecx,%ebp
+	shll	$2,%ecx
+	movl	%edx,%esi
+	shrl	$30,%esi
+	shll	$2,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$2,%eax
+	movl	%ecx,80(%edi)
+	shrl	$30,%esi
+	orl	%esi,%edx
+	shrl	$30,%ebp
+	movl	%ebx,%esi
+	shrl	$30,%esi
+	movl	%edx,84(%edi)
+	shll	$2,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,88(%edi)
+	movl	%ebx,92(%edi)
+	movl	-80(%edi),%ecx
+	movl	-76(%edi),%edx
+	movl	-72(%edi),%eax
+	movl	-68(%edi),%ebx
+	movl	%ecx,%ebp
+	shll	$15,%ecx
+	movl	%edx,%esi
+	shrl	$17,%esi
+	shll	$15,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$15,%eax
+	movl	%ecx,-80(%edi)
+	shrl	$17,%esi
+	orl	%esi,%edx
+	shrl	$17,%ebp
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	movl	%edx,-76(%edi)
+	shll	$15,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,-72(%edi)
+	movl	%ebx,-68(%edi)
+	movl	%ecx,%ebp
+	shll	$30,%ecx
+	movl	%edx,%esi
+	shrl	$2,%esi
+	shll	$30,%edx
+	orl	%esi,%ecx
+	movl	%eax,%esi
+	shll	$30,%eax
+	movl	%ecx,-16(%edi)
+	shrl	$2,%esi
+	orl	%esi,%edx
+	shrl	$2,%ebp
+	movl	%ebx,%esi
+	shrl	$2,%esi
+	movl	%edx,-12(%edi)
+	shll	$30,%ebx
+	orl	%esi,%eax
+	orl	%ebp,%ebx
+	movl	%eax,-8(%edi)
+	movl	%ebx,-4(%edi)
+	movl	%edx,64(%edi)
+	movl	%eax,68(%edi)
+	movl	%ebx,72(%edi)
+	movl	%ecx,76(%edi)
+	movl	%edx,%ebp
+	shll	$17,%edx
+	movl	%eax,%esi
+	shrl	$15,%esi
+	shll	$17,%eax
+	orl	%esi,%edx
+	movl	%ebx,%esi
+	shll	$17,%ebx
+	movl	%edx,96(%edi)
+	shrl	$15,%esi
+	orl	%esi,%eax
+	shrl	$15,%ebp
+	movl	%ecx,%esi
+	shrl	$15,%esi
+	movl	%eax,100(%edi)
+	shll	$17,%ecx
+	orl	%esi,%ebx
+	orl	%ebp,%ecx
+	movl	%ebx,104(%edi)
+	movl	%ecx,108(%edi)
+	movl	-128(%edi),%edx
+	movl	-124(%edi),%eax
+	movl	-120(%edi),%ebx
+	movl	-116(%edi),%ecx
+	movl	%eax,%ebp
+	shll	$13,%eax
+	movl	%ebx,%esi
+	shrl	$19,%esi
+	shll	$13,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$13,%ecx
+	movl	%eax,-32(%edi)
+	shrl	$19,%esi
+	orl	%esi,%ebx
+	shrl	$19,%ebp
+	movl	%edx,%esi
+	shrl	$19,%esi
+	movl	%ebx,-28(%edi)
+	shll	$13,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,-24(%edi)
+	movl	%edx,-20(%edi)
+	movl	%eax,%ebp
+	shll	$15,%eax
+	movl	%ebx,%esi
+	shrl	$17,%esi
+	shll	$15,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$15,%ecx
+	movl	%eax,(%edi)
+	shrl	$17,%esi
+	orl	%esi,%ebx
+	shrl	$17,%ebp
+	movl	%edx,%esi
+	shrl	$17,%esi
+	movl	%ebx,4(%edi)
+	shll	$15,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	%eax,%ebp
+	shll	$17,%eax
+	movl	%ebx,%esi
+	shrl	$15,%esi
+	shll	$17,%ebx
+	orl	%esi,%eax
+	movl	%ecx,%esi
+	shll	$17,%ecx
+	movl	%eax,48(%edi)
+	shrl	$15,%esi
+	orl	%esi,%ebx
+	shrl	$15,%ebp
+	movl	%edx,%esi
+	shrl	$15,%esi
+	movl	%ebx,52(%edi)
+	shll	$17,%edx
+	orl	%esi,%ecx
+	orl	%ebp,%edx
+	movl	%ecx,56(%edi)
+	movl	%edx,60(%edi)
+	movl	%ebx,%ebp
+	shll	$2,%ebx
+	movl	%ecx,%esi
+	shrl	$30,%esi
+	shll	$2,%ecx
+	orl	%esi,%ebx
+	movl	%edx,%esi
+	shll	$2,%edx
+	movl	%ebx,112(%edi)
+	shrl	$30,%esi
+	orl	%esi,%ecx
+	shrl	$30,%ebp
+	movl	%eax,%esi
+	shrl	$30,%esi
+	movl	%ecx,116(%edi)
+	shll	$2,%eax
+	orl	%esi,%edx
+	orl	%ebp,%eax
+	movl	%edx,120(%edi)
+	movl	%eax,124(%edi)
+	movl	$4,%eax
+.L013done:
+	leal	144(%edi),%edx
+	addl	$16,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
+.globl	private_Camellia_set_key
+.type	private_Camellia_set_key, at function
+.align	16
+private_Camellia_set_key:
+.L_private_Camellia_set_key_begin:
+	pushl	%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%ebx
+	movl	16(%esp),%edx
+	movl	$-1,%eax
+	testl	%ecx,%ecx
+	jz	.L014done
+	testl	%edx,%edx
+	jz	.L014done
+	movl	$-2,%eax
+	cmpl	$256,%ebx
+	je	.L015arg_ok
+	cmpl	$192,%ebx
+	je	.L015arg_ok
+	cmpl	$128,%ebx
+	jne	.L014done
+.align	4
+.L015arg_ok:
+	pushl	%edx
+	pushl	%ecx
+	pushl	%ebx
+	call	.L_Camellia_Ekeygen_begin
+	addl	$12,%esp
+	movl	%eax,(%edx)
+	xorl	%eax,%eax
+.align	4
+.L014done:
+	popl	%ebx
+	ret
+.size	private_Camellia_set_key,.-.L_private_Camellia_set_key_begin
+.align	64
+.LCamellia_SIGMA:
+.long	2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
+.align	64
+.LCamellia_SBOX:
+.long	1886416896,1886388336
+.long	2189591040,741081132
+.long	741092352,3014852787
+.long	3974949888,3233808576
+.long	3014898432,3840147684
+.long	656877312,1465319511
+.long	3233857536,3941204202
+.long	3857048832,2930639022
+.long	3840205824,589496355
+.long	2240120064,1802174571
+.long	1465341696,1162149957
+.long	892679424,2779054245
+.long	3941263872,3991732461
+.long	202116096,1330577487
+.long	2930683392,488439837
+.long	1094795520,2459041938
+.long	589505280,2256928902
+.long	4025478912,2947481775
+.long	1802201856,2088501372
+.long	2475922176,522125343
+.long	1162167552,1044250686
+.long	421075200,3705405660
+.long	2779096320,1583218782
+.long	555819264,185270283
+.long	3991792896,2795896998
+.long	235802112,960036921
+.long	1330597632,3587506389
+.long	1313754624,1566376029
+.long	488447232,3654877401
+.long	1701143808,1515847770
+.long	2459079168,1364262993
+.long	3183328512,1819017324
+.long	2256963072,2341142667
+.long	3099113472,2593783962
+.long	2947526400,4227531003
+.long	2408550144,2964324528
+.long	2088532992,1953759348
+.long	3958106880,724238379
+.long	522133248,4042260720
+.long	3469659648,2223243396
+.long	1044266496,3755933919
+.long	808464384,3419078859
+.long	3705461760,875823156
+.long	1600085760,1987444854
+.long	1583242752,1835860077
+.long	3318072576,2846425257
+.long	185273088,3520135377
+.long	437918208,67371012
+.long	2795939328,336855060
+.long	3789676800,976879674
+.long	960051456,3739091166
+.long	3402287616,286326801
+.long	3587560704,842137650
+.long	1195853568,2627469468
+.long	1566399744,1397948499
+.long	1027423488,4075946226
+.long	3654932736,4278059262
+.long	16843008,3486449871
+.long	1515870720,3284336835
+.long	3604403712,2054815866
+.long	1364283648,606339108
+.long	1448498688,3907518696
+.long	1819044864,1616904288
+.long	1296911616,1768489065
+.long	2341178112,2863268010
+.long	218959104,2694840480
+.long	2593823232,2711683233
+.long	1717986816,1650589794
+.long	4227595008,1414791252
+.long	3435973632,505282590
+.long	2964369408,3772776672
+.long	757935360,1684275300
+.long	1953788928,269484048
+.long	303174144,0
+.long	724249344,2745368739
+.long	538976256,1970602101
+.long	4042321920,2324299914
+.long	2981212416,3873833190
+.long	2223277056,151584777
+.long	2576980224,3722248413
+.long	3755990784,2273771655
+.long	1280068608,2206400643
+.long	3419130624,3452764365
+.long	3267543552,2425356432
+.long	875836416,1936916595
+.long	2122219008,4143317238
+.long	1987474944,2644312221
+.long	84215040,3216965823
+.long	1835887872,1381105746
+.long	3082270464,3638034648
+.long	2846468352,3368550600
+.long	825307392,3334865094
+.long	3520188672,2172715137
+.long	387389184,1869545583
+.long	67372032,320012307
+.long	3621246720,1667432547
+.long	336860160,3924361449
+.long	1482184704,2812739751
+.long	976894464,2677997727
+.long	1633771776,3166437564
+.long	3739147776,690552873
+.long	454761216,4193845497
+.long	286331136,791609391
+.long	471604224,3031695540
+.long	842150400,2021130360
+.long	252645120,101056518
+.long	2627509248,3890675943
+.long	370546176,1903231089
+.long	1397969664,3570663636
+.long	404232192,2880110763
+.long	4076007936,2290614408
+.long	572662272,2374828173
+.long	4278124032,1920073842
+.long	1145324544,3115909305
+.long	3486502656,4177002744
+.long	2998055424,2896953516
+.long	3284386560,909508662
+.long	3048584448,707395626
+.long	2054846976,1010565180
+.long	2442236160,4059103473
+.long	606348288,1077936192
+.long	134744064,3553820883
+.long	3907577856,3149594811
+.long	2829625344,1128464451
+.long	1616928768,353697813
+.long	4244438016,2913796269
+.long	1768515840,2004287607
+.long	1347440640,2155872384
+.long	2863311360,2189557890
+.long	3503345664,3974889708
+.long	2694881280,656867367
+.long	2105376000,3856990437
+.long	2711724288,2240086149
+.long	2307492096,892665909
+.long	1650614784,202113036
+.long	2543294208,1094778945
+.long	1414812672,4025417967
+.long	1532713728,2475884691
+.long	505290240,421068825
+.long	2509608192,555810849
+.long	3772833792,235798542
+.long	4294967040,1313734734
+.long	1684300800,1701118053
+.long	3537031680,3183280317
+.long	269488128,3099066552
+.long	3301229568,2408513679
+.long	0,3958046955
+.long	1212696576,3469607118
+.long	2745410304,808452144
+.long	4160222976,1600061535
+.long	1970631936,3318022341
+.long	3688618752,437911578
+.long	2324335104,3789619425
+.long	50529024,3402236106
+.long	3873891840,1195835463
+.long	3671775744,1027407933
+.long	151587072,16842753
+.long	1061109504,3604349142
+.long	3722304768,1448476758
+.long	2492765184,1296891981
+.long	2273806080,218955789
+.long	1549556736,1717960806
+.long	2206434048,3435921612
+.long	33686016,757923885
+.long	3452816640,303169554
+.long	1246382592,538968096
+.long	2425393152,2981167281
+.long	858993408,2576941209
+.long	1936945920,1280049228
+.long	1734829824,3267494082
+.long	4143379968,2122186878
+.long	4092850944,84213765
+.long	2644352256,3082223799
+.long	2139062016,825294897
+.long	3217014528,387383319
+.long	3806519808,3621191895
+.long	1381126656,1482162264
+.long	2610666240,1633747041
+.long	3638089728,454754331
+.long	640034304,471597084
+.long	3368601600,252641295
+.long	926365440,370540566
+.long	3334915584,404226072
+.long	993737472,572653602
+.long	2172748032,1145307204
+.long	2526451200,2998010034
+.long	1869573888,3048538293
+.long	1263225600,2442199185
+.long	320017152,134742024
+.long	3200171520,2829582504
+.long	1667457792,4244373756
+.long	774778368,1347420240
+.long	3924420864,3503292624
+.long	2038003968,2105344125
+.long	2812782336,2307457161
+.long	2358021120,2543255703
+.long	2678038272,1532690523
+.long	1852730880,2509570197
+.long	3166485504,4294902015
+.long	2391707136,3536978130
+.long	690563328,3301179588
+.long	4126536960,1212678216
+.long	4193908992,4160159991
+.long	3065427456,3688562907
+.long	791621376,50528259
+.long	4261281024,3671720154
+.long	3031741440,1061093439
+.long	1499027712,2492727444
+.long	2021160960,1549533276
+.long	2560137216,33685506
+.long	101058048,1246363722
+.long	1785358848,858980403
+.long	3890734848,1734803559
+.long	1179010560,4092788979
+.long	1903259904,2139029631
+.long	3132799488,3806462178
+.long	3570717696,2610626715
+.long	623191296,640024614
+.long	2880154368,926351415
+.long	1111638528,993722427
+.long	2290649088,2526412950
+.long	2728567296,1263206475
+.long	2374864128,3200123070
+.long	4210752000,774766638
+.long	1920102912,2037973113
+.long	117901056,2357985420
+.long	3115956480,1852702830
+.long	1431655680,2391670926
+.long	4177065984,4126474485
+.long	4008635904,3065381046
+.long	2896997376,4261216509
+.long	168430080,1499005017
+.long	909522432,2560098456
+.long	1229539584,1785331818
+.long	707406336,1178992710
+.long	1751672832,3132752058
+.long	1010580480,623181861
+.long	943208448,1111621698
+.long	4059164928,2728525986
+.long	2762253312,4210688250
+.long	1077952512,117899271
+.long	673720320,1431634005
+.long	3553874688,4008575214
+.long	2071689984,168427530
+.long	3149642496,1229520969
+.long	3385444608,1751646312
+.long	1128481536,943194168
+.long	3250700544,2762211492
+.long	353703168,673710120
+.long	3823362816,2071658619
+.long	2913840384,3385393353
+.long	4109693952,3250651329
+.long	2004317952,3823304931
+.long	3351758592,4109631732
+.long	2155905024,3351707847
+.long	2661195264,2661154974
+.long	14737632,939538488
+.long	328965,1090535745
+.long	5789784,369104406
+.long	14277081,1979741814
+.long	6776679,3640711641
+.long	5131854,2466288531
+.long	8487297,1610637408
+.long	13355979,4060148466
+.long	13224393,1912631922
+.long	723723,3254829762
+.long	11447982,2868947883
+.long	6974058,2583730842
+.long	14013909,1962964341
+.long	1579032,100664838
+.long	6118749,1459640151
+.long	8553090,2684395680
+.long	4605510,2432733585
+.long	14671839,4144035831
+.long	14079702,3036722613
+.long	2565927,3372272073
+.long	9079434,2717950626
+.long	3289650,2348846220
+.long	4934475,3523269330
+.long	4342338,2415956112
+.long	14408667,4127258358
+.long	1842204,117442311
+.long	10395294,2801837991
+.long	10263708,654321447
+.long	3815994,2382401166
+.long	13290186,2986390194
+.long	2434341,1224755529
+.long	8092539,3724599006
+.long	855309,1124090691
+.long	7434609,1543527516
+.long	6250335,3607156695
+.long	2039583,3338717127
+.long	16316664,1040203326
+.long	14145495,4110480885
+.long	4079166,2399178639
+.long	10329501,1728079719
+.long	8158332,520101663
+.long	6316128,402659352
+.long	12171705,1845522030
+.long	12500670,2936057775
+.long	12369084,788541231
+.long	9145227,3791708898
+.long	1447446,2231403909
+.long	3421236,218107149
+.long	5066061,1392530259
+.long	12829635,4026593520
+.long	7500402,2617285788
+.long	9803157,1694524773
+.long	11250603,3925928682
+.long	9342606,2734728099
+.long	12237498,2919280302
+.long	8026746,2650840734
+.long	11776947,3959483628
+.long	131586,2147516544
+.long	11842740,754986285
+.long	11382189,1795189611
+.long	10658466,2818615464
+.long	11316396,721431339
+.long	14211288,905983542
+.long	10132122,2785060518
+.long	1513239,3305162181
+.long	1710618,2248181382
+.long	3487029,1291865421
+.long	13421772,855651123
+.long	16250871,4244700669
+.long	10066329,1711302246
+.long	6381921,1476417624
+.long	5921370,2516620950
+.long	15263976,973093434
+.long	2368548,150997257
+.long	5658198,2499843477
+.long	4210752,268439568
+.long	14803425,2013296760
+.long	6513507,3623934168
+.long	592137,1107313218
+.long	3355443,3422604492
+.long	12566463,4009816047
+.long	10000536,637543974
+.long	9934743,3842041317
+.long	8750469,1627414881
+.long	6842472,436214298
+.long	16579836,1056980799
+.long	15527148,989870907
+.long	657930,2181071490
+.long	14342874,3053500086
+.long	7303023,3674266587
+.long	5460819,3556824276
+.long	6447714,2550175896
+.long	10724259,3892373736
+.long	3026478,2332068747
+.long	526344,33554946
+.long	11513775,3942706155
+.long	2631720,167774730
+.long	11579568,738208812
+.long	7631988,486546717
+.long	12763842,2952835248
+.long	12434877,1862299503
+.long	3552822,2365623693
+.long	2236962,2281736328
+.long	3684408,234884622
+.long	6579300,419436825
+.long	1973790,2264958855
+.long	3750201,1308642894
+.long	2894892,184552203
+.long	10921638,2835392937
+.long	3158064,201329676
+.long	15066597,2030074233
+.long	4473924,285217041
+.long	16645629,2130739071
+.long	8947848,570434082
+.long	10461087,3875596263
+.long	6645093,1493195097
+.long	8882055,3774931425
+.long	7039851,3657489114
+.long	16053492,1023425853
+.long	2302755,3355494600
+.long	4737096,301994514
+.long	1052688,67109892
+.long	13750737,1946186868
+.long	5329233,1409307732
+.long	12632256,805318704
+.long	16382457,2113961598
+.long	13816530,3019945140
+.long	10526880,671098920
+.long	5592405,1426085205
+.long	10592673,1744857192
+.long	4276545,1342197840
+.long	16448250,3187719870
+.long	4408131,3489714384
+.long	1250067,3288384708
+.long	12895428,822096177
+.long	3092271,3405827019
+.long	11053224,704653866
+.long	11974326,2902502829
+.long	3947580,251662095
+.long	2829099,3389049546
+.long	12698049,1879076976
+.long	16777215,4278255615
+.long	13158600,838873650
+.long	10855845,1761634665
+.long	2105376,134219784
+.long	9013641,1644192354
+.long	0,0
+.long	9474192,603989028
+.long	4671303,3506491857
+.long	15724527,4211145723
+.long	15395562,3120609978
+.long	12040119,3976261101
+.long	1381653,1157645637
+.long	394758,2164294017
+.long	13487565,1929409395
+.long	11908533,1828744557
+.long	1184274,2214626436
+.long	8289918,2667618207
+.long	12303291,3993038574
+.long	2697513,1241533002
+.long	986895,3271607235
+.long	12105912,771763758
+.long	460551,3238052289
+.long	263172,16777473
+.long	10197915,3858818790
+.long	9737364,620766501
+.long	2171169,1207978056
+.long	6710886,2566953369
+.long	15132390,3103832505
+.long	13553358,3003167667
+.long	15592941,2063629179
+.long	15198183,4177590777
+.long	3881787,3456159438
+.long	16711422,3204497343
+.long	8355711,3741376479
+.long	12961221,1895854449
+.long	10790052,687876393
+.long	3618615,3439381965
+.long	11645361,1811967084
+.long	5000268,318771987
+.long	9539985,1677747300
+.long	7237230,2600508315
+.long	9276813,1660969827
+.long	7763574,2634063261
+.long	197379,3221274816
+.long	2960685,1258310475
+.long	14606046,3070277559
+.long	9868950,2768283045
+.long	2500134,2298513801
+.long	8224125,1593859935
+.long	13027014,2969612721
+.long	6052956,385881879
+.long	13882323,4093703412
+.long	15921906,3154164924
+.long	5197647,3540046803
+.long	1644825,1174423110
+.long	4144959,3472936911
+.long	14474460,922761015
+.long	7960953,1577082462
+.long	1907997,1191200583
+.long	5395026,2483066004
+.long	15461355,4194368250
+.long	15987699,4227923196
+.long	7171437,1526750043
+.long	6184542,2533398423
+.long	16514043,4261478142
+.long	6908265,1509972570
+.long	11711154,2885725356
+.long	15790320,1006648380
+.long	3223857,1275087948
+.long	789516,50332419
+.long	13948116,889206069
+.long	13619151,4076925939
+.long	9211020,587211555
+.long	14869218,3087055032
+.long	7697781,1560304989
+.long	11119017,1778412138
+.long	4868682,2449511058
+.long	5723991,3573601749
+.long	8684676,553656609
+.long	1118481,1140868164
+.long	4539717,1358975313
+.long	1776411,3321939654
+.long	16119285,2097184125
+.long	15000804,956315961
+.long	921102,2197848963
+.long	7566195,3691044060
+.long	11184810,2852170410
+.long	15856113,2080406652
+.long	14540253,1996519287
+.long	5855577,1442862678
+.long	1315860,83887365
+.long	7105644,452991771
+.long	9605778,2751505572
+.long	5526612,352326933
+.long	13684944,872428596
+.long	7895160,503324190
+.long	7368816,469769244
+.long	14935011,4160813304
+.long	4802889,1375752786
+.long	8421504,536879136
+.long	5263440,335549460
+.long	10987431,3909151209
+.long	16185078,3170942397
+.long	7829367,3707821533
+.long	9671571,3825263844
+.long	8816262,2701173153
+.long	8618883,3758153952
+.long	2763306,2315291274
+.long	13092807,4043370993
+.long	5987163,3590379222
+.long	15329769,2046851706
+.long	15658734,3137387451
+.long	9408399,3808486371
+.long	65793,1073758272
+.long	4013373,1325420367
+.globl	Camellia_cbc_encrypt
+.type	Camellia_cbc_encrypt, at function
+.align	16
+Camellia_cbc_encrypt:
+.L_Camellia_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ecx
+	cmpl	$0,%ecx
+	je	.L016enc_out
+	pushfl
+	cld
+	movl	24(%esp),%eax
+	movl	28(%esp),%ebx
+	movl	36(%esp),%edx
+	movl	40(%esp),%ebp
+	leal	-64(%esp),%esi
+	andl	$-64,%esi
+	leal	-127(%edx),%edi
+	subl	%esi,%edi
+	negl	%edi
+	andl	$960,%edi
+	subl	%edi,%esi
+	movl	44(%esp),%edi
+	xchgl	%esi,%esp
+	addl	$4,%esp
+	movl	%esi,20(%esp)
+	movl	%eax,24(%esp)
+	movl	%ebx,28(%esp)
+	movl	%ecx,32(%esp)
+	movl	%edx,36(%esp)
+	movl	%ebp,40(%esp)
+	call	.L017pic_point
+.L017pic_point:
+	popl	%ebp
+	leal	.LCamellia_SBOX-.L017pic_point(%ebp),%ebp
+	movl	$32,%esi
+.align	4
+.L018prefetch_sbox:
+	movl	(%ebp),%eax
+	movl	32(%ebp),%ebx
+	movl	64(%ebp),%ecx
+	movl	96(%ebp),%edx
+	leal	128(%ebp),%ebp
+	decl	%esi
+	jnz	.L018prefetch_sbox
+	movl	36(%esp),%eax
+	subl	$4096,%ebp
+	movl	24(%esp),%esi
+	movl	272(%eax),%edx
+	cmpl	$0,%edi
+	je	.L019DECRYPT
+	movl	32(%esp),%ecx
+	movl	40(%esp),%edi
+	shll	$6,%edx
+	leal	(%eax,%edx,1),%edx
+	movl	%edx,16(%esp)
+	testl	$4294967280,%ecx
+	jz	.L020enc_tail
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+.align	4
+.L021enc_loop:
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	xorl	(%esi),%eax
+	xorl	4(%esi),%ebx
+	xorl	8(%esi),%ecx
+	bswap	%eax
+	xorl	12(%esi),%edx
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_encrypt
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	movl	%eax,(%edi)
+	bswap	%edx
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	32(%esp),%ecx
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	leal	16(%edi),%edx
+	movl	%edx,28(%esp)
+	subl	$16,%ecx
+	testl	$4294967280,%ecx
+	movl	%ecx,32(%esp)
+	jnz	.L021enc_loop
+	testl	$15,%ecx
+	jnz	.L020enc_tail
+	movl	40(%esp),%esi
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	20(%esp),%esp
+	popfl
+.L016enc_out:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+	pushfl
+.align	4
+.L020enc_tail:
+	movl	%edi,%eax
+	movl	28(%esp),%edi
+	pushl	%eax
+	movl	$16,%ebx
+	subl	%ecx,%ebx
+	cmpl	%esi,%edi
+	je	.L022enc_in_place
+.align	4
+.long	2767451785
+	jmp	.L023enc_skip_in_place
+.L022enc_in_place:
+	leal	(%edi,%ecx,1),%edi
+.L023enc_skip_in_place:
+	movl	%ebx,%ecx
+	xorl	%eax,%eax
+.align	4
+.long	2868115081
+	popl	%edi
+	movl	28(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	$16,32(%esp)
+	jmp	.L021enc_loop
+.align	16
+.L019DECRYPT:
+	shll	$6,%edx
+	leal	(%eax,%edx,1),%edx
+	movl	%eax,16(%esp)
+	movl	%edx,36(%esp)
+	cmpl	28(%esp),%esi
+	je	.L024dec_in_place
+	movl	40(%esp),%edi
+	movl	%edi,44(%esp)
+.align	4
+.L025dec_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	bswap	%eax
+	movl	12(%esi),%edx
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	44(%esp),%edi
+	movl	32(%esp),%esi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	xorl	(%edi),%eax
+	bswap	%edx
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	subl	$16,%esi
+	jc	.L026dec_partial
+	movl	%esi,32(%esp)
+	movl	24(%esp),%esi
+	movl	28(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	%esi,44(%esp)
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	leal	16(%edi),%edi
+	movl	%edi,28(%esp)
+	jnz	.L025dec_loop
+	movl	44(%esp),%edi
+.L027dec_end:
+	movl	40(%esp),%esi
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	jmp	.L028dec_out
+.align	4
+.L026dec_partial:
+	leal	44(%esp),%edi
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	leal	16(%esi),%ecx
+	movl	%edi,%esi
+	movl	28(%esp),%edi
+.long	2767451785
+	movl	24(%esp),%edi
+	jmp	.L027dec_end
+.align	4
+.L024dec_in_place:
+.L029dec_in_place_loop:
+	leal	44(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	bswap	%eax
+	movl	%edx,12(%edi)
+	bswap	%ebx
+	movl	36(%esp),%edi
+	bswap	%ecx
+	bswap	%edx
+	call	_x86_Camellia_decrypt
+	movl	40(%esp),%edi
+	movl	28(%esp),%esi
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	xorl	(%edi),%eax
+	bswap	%edx
+	xorl	4(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	12(%edi),%edx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	leal	16(%esi),%esi
+	movl	%esi,28(%esp)
+	leal	44(%esp),%esi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,12(%edi)
+	movl	24(%esp),%esi
+	leal	16(%esi),%esi
+	movl	%esi,24(%esp)
+	movl	32(%esp),%ecx
+	subl	$16,%ecx
+	jc	.L030dec_in_place_partial
+	movl	%ecx,32(%esp)
+	jnz	.L029dec_in_place_loop
+	jmp	.L028dec_out
+.align	4
+.L030dec_in_place_partial:
+	movl	28(%esp),%edi
+	leal	44(%esp),%esi
+	leal	(%edi,%ecx,1),%edi
+	leal	16(%esi,%ecx,1),%esi
+	negl	%ecx
+.long	2767451785
+.align	4
+.L028dec_out:
+	movl	20(%esp),%esp
+	popfl
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	Camellia_cbc_encrypt,.-.L_Camellia_cbc_encrypt_begin
+.byte	67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte	115,108,46,111,114,103,62,0
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/cmll-x86.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/cmll-x86.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/cmll-x86.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/cmll-x86.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,2376 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/cmll-x86.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"cmll-586.s"
-.text
-.globl	Camellia_EncryptBlock_Rounds
-.type	Camellia_EncryptBlock_Rounds, at function
-.align	16
-Camellia_EncryptBlock_Rounds:
-.L_Camellia_EncryptBlock_Rounds_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%eax
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%ebx
-	subl	$28,%esp
-	andl	$-64,%esp
-	leal	-127(%edi),%ecx
-	subl	%esp,%ecx
-	negl	%ecx
-	andl	$960,%ecx
-	subl	%ecx,%esp
-	addl	$4,%esp
-	shll	$6,%eax
-	leal	(%edi,%eax,1),%eax
-	movl	%ebx,20(%esp)
-	movl	%eax,16(%esp)
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L000pic_point(%ebp),%ebp
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	bswap	%eax
-	movl	12(%esi),%edx
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_encrypt
-	movl	20(%esp),%esp
-	bswap	%eax
-	movl	32(%esp),%esi
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_EncryptBlock_Rounds,.-.L_Camellia_EncryptBlock_Rounds_begin
-.globl	Camellia_EncryptBlock
-.type	Camellia_EncryptBlock, at function
-.align	16
-Camellia_EncryptBlock:
-.L_Camellia_EncryptBlock_begin:
-	movl	$128,%eax
-	subl	4(%esp),%eax
-	movl	$3,%eax
-	adcl	$0,%eax
-	movl	%eax,4(%esp)
-	jmp	.L_Camellia_EncryptBlock_Rounds_begin
-.size	Camellia_EncryptBlock,.-.L_Camellia_EncryptBlock_begin
-.globl	Camellia_encrypt
-.type	Camellia_encrypt, at function
-.align	16
-Camellia_encrypt:
-.L_Camellia_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%ebx
-	subl	$28,%esp
-	andl	$-64,%esp
-	movl	272(%edi),%eax
-	leal	-127(%edi),%ecx
-	subl	%esp,%ecx
-	negl	%ecx
-	andl	$960,%ecx
-	subl	%ecx,%esp
-	addl	$4,%esp
-	shll	$6,%eax
-	leal	(%edi,%eax,1),%eax
-	movl	%ebx,20(%esp)
-	movl	%eax,16(%esp)
-	call	.L001pic_point
-.L001pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L001pic_point(%ebp),%ebp
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	bswap	%eax
-	movl	12(%esi),%edx
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_encrypt
-	movl	20(%esp),%esp
-	bswap	%eax
-	movl	24(%esp),%esi
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_encrypt,.-.L_Camellia_encrypt_begin
-.type	_x86_Camellia_encrypt, at function
-.align	16
-_x86_Camellia_encrypt:
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	16(%edi),%esi
-	movl	%eax,4(%esp)
-	movl	%ebx,8(%esp)
-	movl	%ecx,12(%esp)
-	movl	%edx,16(%esp)
-.align	16
-.L002loop:
-	xorl	%esi,%eax
-	xorl	20(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	24(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	28(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	32(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	xorl	%esi,%eax
-	xorl	36(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	40(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	44(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	48(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	xorl	%esi,%eax
-	xorl	52(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	56(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	60(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	64(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	addl	$64,%edi
-	cmpl	20(%esp),%edi
-	je	.L003done
-	andl	%eax,%esi
-	movl	16(%esp),%edx
-	roll	$1,%esi
-	movl	%edx,%ecx
-	xorl	%esi,%ebx
-	orl	12(%edi),%ecx
-	movl	%ebx,8(%esp)
-	xorl	12(%esp),%ecx
-	movl	4(%edi),%esi
-	movl	%ecx,12(%esp)
-	orl	%ebx,%esi
-	andl	8(%edi),%ecx
-	xorl	%esi,%eax
-	roll	$1,%ecx
-	movl	%eax,4(%esp)
-	xorl	%ecx,%edx
-	movl	16(%edi),%esi
-	movl	%edx,16(%esp)
-	jmp	.L002loop
-.align	8
-.L003done:
-	movl	%eax,%ecx
-	movl	%ebx,%edx
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	xorl	%esi,%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	ret
-.size	_x86_Camellia_encrypt,.-_x86_Camellia_encrypt
-.globl	Camellia_DecryptBlock_Rounds
-.type	Camellia_DecryptBlock_Rounds, at function
-.align	16
-Camellia_DecryptBlock_Rounds:
-.L_Camellia_DecryptBlock_Rounds_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%eax
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%ebx
-	subl	$28,%esp
-	andl	$-64,%esp
-	leal	-127(%edi),%ecx
-	subl	%esp,%ecx
-	negl	%ecx
-	andl	$960,%ecx
-	subl	%ecx,%esp
-	addl	$4,%esp
-	shll	$6,%eax
-	movl	%edi,16(%esp)
-	leal	(%edi,%eax,1),%edi
-	movl	%ebx,20(%esp)
-	call	.L004pic_point
-.L004pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L004pic_point(%ebp),%ebp
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	bswap	%eax
-	movl	12(%esi),%edx
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_decrypt
-	movl	20(%esp),%esp
-	bswap	%eax
-	movl	32(%esp),%esi
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_DecryptBlock_Rounds,.-.L_Camellia_DecryptBlock_Rounds_begin
-.globl	Camellia_DecryptBlock
-.type	Camellia_DecryptBlock, at function
-.align	16
-Camellia_DecryptBlock:
-.L_Camellia_DecryptBlock_begin:
-	movl	$128,%eax
-	subl	4(%esp),%eax
-	movl	$3,%eax
-	adcl	$0,%eax
-	movl	%eax,4(%esp)
-	jmp	.L_Camellia_DecryptBlock_Rounds_begin
-.size	Camellia_DecryptBlock,.-.L_Camellia_DecryptBlock_begin
-.globl	Camellia_decrypt
-.type	Camellia_decrypt, at function
-.align	16
-Camellia_decrypt:
-.L_Camellia_decrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%esp,%ebx
-	subl	$28,%esp
-	andl	$-64,%esp
-	movl	272(%edi),%eax
-	leal	-127(%edi),%ecx
-	subl	%esp,%ecx
-	negl	%ecx
-	andl	$960,%ecx
-	subl	%ecx,%esp
-	addl	$4,%esp
-	shll	$6,%eax
-	movl	%edi,16(%esp)
-	leal	(%edi,%eax,1),%edi
-	movl	%ebx,20(%esp)
-	call	.L005pic_point
-.L005pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L005pic_point(%ebp),%ebp
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	bswap	%eax
-	movl	12(%esi),%edx
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_decrypt
-	movl	20(%esp),%esp
-	bswap	%eax
-	movl	24(%esp),%esi
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_decrypt,.-.L_Camellia_decrypt_begin
-.type	_x86_Camellia_decrypt, at function
-.align	16
-_x86_Camellia_decrypt:
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	-8(%edi),%esi
-	movl	%eax,4(%esp)
-	movl	%ebx,8(%esp)
-	movl	%ecx,12(%esp)
-	movl	%edx,16(%esp)
-.align	16
-.L006loop:
-	xorl	%esi,%eax
-	xorl	-4(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	-16(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	-12(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	-24(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	xorl	%esi,%eax
-	xorl	-20(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	-32(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	-28(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	-40(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	xorl	%esi,%eax
-	xorl	-36(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	16(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	12(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	-48(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,16(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,12(%esp)
-	xorl	%esi,%ecx
-	xorl	-44(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	-56(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,8(%esp)
-	xorl	%edx,%eax
-	movl	%eax,4(%esp)
-	subl	$64,%edi
-	cmpl	20(%esp),%edi
-	je	.L007done
-	andl	%eax,%esi
-	movl	16(%esp),%edx
-	roll	$1,%esi
-	movl	%edx,%ecx
-	xorl	%esi,%ebx
-	orl	4(%edi),%ecx
-	movl	%ebx,8(%esp)
-	xorl	12(%esp),%ecx
-	movl	12(%edi),%esi
-	movl	%ecx,12(%esp)
-	orl	%ebx,%esi
-	andl	(%edi),%ecx
-	xorl	%esi,%eax
-	roll	$1,%ecx
-	movl	%eax,4(%esp)
-	xorl	%ecx,%edx
-	movl	-8(%edi),%esi
-	movl	%edx,16(%esp)
-	jmp	.L006loop
-.align	8
-.L007done:
-	movl	%eax,%ecx
-	movl	%ebx,%edx
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	xorl	%esi,%ecx
-	xorl	12(%edi),%edx
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	ret
-.size	_x86_Camellia_decrypt,.-_x86_Camellia_decrypt
-.globl	Camellia_Ekeygen
-.type	Camellia_Ekeygen, at function
-.align	16
-Camellia_Ekeygen:
-.L_Camellia_Ekeygen_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	subl	$16,%esp
-	movl	36(%esp),%ebp
-	movl	40(%esp),%esi
-	movl	44(%esp),%edi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	cmpl	$128,%ebp
-	je	.L0081st128
-	movl	16(%esi),%eax
-	movl	20(%esi),%ebx
-	cmpl	$192,%ebp
-	je	.L0091st192
-	movl	24(%esi),%ecx
-	movl	28(%esi),%edx
-	jmp	.L0101st256
-.align	4
-.L0091st192:
-	movl	%eax,%ecx
-	movl	%ebx,%edx
-	notl	%ecx
-	notl	%edx
-.align	4
-.L0101st256:
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,32(%edi)
-	movl	%ebx,36(%edi)
-	movl	%ecx,40(%edi)
-	movl	%edx,44(%edi)
-	xorl	(%edi),%eax
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-.align	4
-.L0081st128:
-	call	.L011pic_point
-.L011pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L011pic_point(%ebp),%ebp
-	leal	.LCamellia_SIGMA-.LCamellia_SBOX(%ebp),%edi
-	movl	(%edi),%esi
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edx,12(%esp)
-	xorl	%esi,%eax
-	xorl	4(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	12(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	8(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	8(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,12(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,8(%esp)
-	xorl	%esi,%ecx
-	xorl	12(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	4(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	16(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,4(%esp)
-	xorl	%edx,%eax
-	movl	%eax,(%esp)
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	44(%esp),%esi
-	xorl	(%esi),%eax
-	xorl	4(%esi),%ebx
-	xorl	8(%esi),%ecx
-	xorl	12(%esi),%edx
-	movl	16(%edi),%esi
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edx,12(%esp)
-	xorl	%esi,%eax
-	xorl	20(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	12(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	8(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	24(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,12(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,8(%esp)
-	xorl	%esi,%ecx
-	xorl	28(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	4(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	32(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,4(%esp)
-	xorl	%edx,%eax
-	movl	%eax,(%esp)
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	36(%esp),%esi
-	cmpl	$128,%esi
-	jne	.L0122nd256
-	movl	44(%esp),%edi
-	leal	128(%edi),%edi
-	movl	%eax,-112(%edi)
-	movl	%ebx,-108(%edi)
-	movl	%ecx,-104(%edi)
-	movl	%edx,-100(%edi)
-	movl	%eax,%ebp
-	shll	$15,%eax
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	shll	$15,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$15,%ecx
-	movl	%eax,-80(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ebx
-	shrl	$17,%ebp
-	movl	%edx,%esi
-	shrl	$17,%esi
-	movl	%ebx,-76(%edi)
-	shll	$15,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,-72(%edi)
-	movl	%edx,-68(%edi)
-	movl	%eax,%ebp
-	shll	$15,%eax
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	shll	$15,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$15,%ecx
-	movl	%eax,-64(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ebx
-	shrl	$17,%ebp
-	movl	%edx,%esi
-	shrl	$17,%esi
-	movl	%ebx,-60(%edi)
-	shll	$15,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,-56(%edi)
-	movl	%edx,-52(%edi)
-	movl	%eax,%ebp
-	shll	$15,%eax
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	shll	$15,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$15,%ecx
-	movl	%eax,-32(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ebx
-	shrl	$17,%ebp
-	movl	%edx,%esi
-	shrl	$17,%esi
-	movl	%ebx,-28(%edi)
-	shll	$15,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%eax,%ebp
-	shll	$15,%eax
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	shll	$15,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$15,%ecx
-	movl	%eax,-16(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ebx
-	shrl	$17,%ebp
-	movl	%edx,%esi
-	shrl	$17,%esi
-	movl	%ebx,-12(%edi)
-	shll	$15,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,-8(%edi)
-	movl	%edx,-4(%edi)
-	movl	%ebx,%ebp
-	shll	$2,%ebx
-	movl	%ecx,%esi
-	shrl	$30,%esi
-	shll	$2,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$2,%edx
-	movl	%ebx,32(%edi)
-	shrl	$30,%esi
-	orl	%esi,%ecx
-	shrl	$30,%ebp
-	movl	%eax,%esi
-	shrl	$30,%esi
-	movl	%ecx,36(%edi)
-	shll	$2,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,40(%edi)
-	movl	%eax,44(%edi)
-	movl	%ebx,%ebp
-	shll	$17,%ebx
-	movl	%ecx,%esi
-	shrl	$15,%esi
-	shll	$17,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$17,%edx
-	movl	%ebx,64(%edi)
-	shrl	$15,%esi
-	orl	%esi,%ecx
-	shrl	$15,%ebp
-	movl	%eax,%esi
-	shrl	$15,%esi
-	movl	%ecx,68(%edi)
-	shll	$17,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,72(%edi)
-	movl	%eax,76(%edi)
-	movl	-128(%edi),%ebx
-	movl	-124(%edi),%ecx
-	movl	-120(%edi),%edx
-	movl	-116(%edi),%eax
-	movl	%ebx,%ebp
-	shll	$15,%ebx
-	movl	%ecx,%esi
-	shrl	$17,%esi
-	shll	$15,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$15,%edx
-	movl	%ebx,-96(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ecx
-	shrl	$17,%ebp
-	movl	%eax,%esi
-	shrl	$17,%esi
-	movl	%ecx,-92(%edi)
-	shll	$15,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,-88(%edi)
-	movl	%eax,-84(%edi)
-	movl	%ebx,%ebp
-	shll	$30,%ebx
-	movl	%ecx,%esi
-	shrl	$2,%esi
-	shll	$30,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$30,%edx
-	movl	%ebx,-48(%edi)
-	shrl	$2,%esi
-	orl	%esi,%ecx
-	shrl	$2,%ebp
-	movl	%eax,%esi
-	shrl	$2,%esi
-	movl	%ecx,-44(%edi)
-	shll	$30,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,-40(%edi)
-	movl	%eax,-36(%edi)
-	movl	%ebx,%ebp
-	shll	$15,%ebx
-	movl	%ecx,%esi
-	shrl	$17,%esi
-	shll	$15,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$15,%edx
-	shrl	$17,%esi
-	orl	%esi,%ecx
-	shrl	$17,%ebp
-	movl	%eax,%esi
-	shrl	$17,%esi
-	shll	$15,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,-24(%edi)
-	movl	%eax,-20(%edi)
-	movl	%ebx,%ebp
-	shll	$17,%ebx
-	movl	%ecx,%esi
-	shrl	$15,%esi
-	shll	$17,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$17,%edx
-	movl	%ebx,(%edi)
-	shrl	$15,%esi
-	orl	%esi,%ecx
-	shrl	$15,%ebp
-	movl	%eax,%esi
-	shrl	$15,%esi
-	movl	%ecx,4(%edi)
-	shll	$17,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,8(%edi)
-	movl	%eax,12(%edi)
-	movl	%ebx,%ebp
-	shll	$17,%ebx
-	movl	%ecx,%esi
-	shrl	$15,%esi
-	shll	$17,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$17,%edx
-	movl	%ebx,16(%edi)
-	shrl	$15,%esi
-	orl	%esi,%ecx
-	shrl	$15,%ebp
-	movl	%eax,%esi
-	shrl	$15,%esi
-	movl	%ecx,20(%edi)
-	shll	$17,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,24(%edi)
-	movl	%eax,28(%edi)
-	movl	%ebx,%ebp
-	shll	$17,%ebx
-	movl	%ecx,%esi
-	shrl	$15,%esi
-	shll	$17,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$17,%edx
-	movl	%ebx,48(%edi)
-	shrl	$15,%esi
-	orl	%esi,%ecx
-	shrl	$15,%ebp
-	movl	%eax,%esi
-	shrl	$15,%esi
-	movl	%ecx,52(%edi)
-	shll	$17,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,56(%edi)
-	movl	%eax,60(%edi)
-	movl	$3,%eax
-	jmp	.L013done
-.align	16
-.L0122nd256:
-	movl	44(%esp),%esi
-	movl	%eax,48(%esi)
-	movl	%ebx,52(%esi)
-	movl	%ecx,56(%esi)
-	movl	%edx,60(%esi)
-	xorl	32(%esi),%eax
-	xorl	36(%esi),%ebx
-	xorl	40(%esi),%ecx
-	xorl	44(%esi),%edx
-	movl	32(%edi),%esi
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edx,12(%esp)
-	xorl	%esi,%eax
-	xorl	36(%edi),%ebx
-	movzbl	%ah,%esi
-	movl	2052(%ebp,%esi,8),%edx
-	movzbl	%al,%esi
-	xorl	4(%ebp,%esi,8),%edx
-	shrl	$16,%eax
-	movzbl	%bl,%esi
-	movl	(%ebp,%esi,8),%ecx
-	movzbl	%ah,%esi
-	xorl	(%ebp,%esi,8),%edx
-	movzbl	%bh,%esi
-	xorl	4(%ebp,%esi,8),%ecx
-	shrl	$16,%ebx
-	movzbl	%al,%eax
-	xorl	2048(%ebp,%eax,8),%edx
-	movzbl	%bh,%esi
-	movl	12(%esp),%eax
-	xorl	%edx,%ecx
-	rorl	$8,%edx
-	xorl	2048(%ebp,%esi,8),%ecx
-	movzbl	%bl,%esi
-	movl	8(%esp),%ebx
-	xorl	%eax,%edx
-	xorl	2052(%ebp,%esi,8),%ecx
-	movl	40(%edi),%esi
-	xorl	%ecx,%edx
-	movl	%edx,12(%esp)
-	xorl	%ebx,%ecx
-	movl	%ecx,8(%esp)
-	xorl	%esi,%ecx
-	xorl	44(%edi),%edx
-	movzbl	%ch,%esi
-	movl	2052(%ebp,%esi,8),%ebx
-	movzbl	%cl,%esi
-	xorl	4(%ebp,%esi,8),%ebx
-	shrl	$16,%ecx
-	movzbl	%dl,%esi
-	movl	(%ebp,%esi,8),%eax
-	movzbl	%ch,%esi
-	xorl	(%ebp,%esi,8),%ebx
-	movzbl	%dh,%esi
-	xorl	4(%ebp,%esi,8),%eax
-	shrl	$16,%edx
-	movzbl	%cl,%ecx
-	xorl	2048(%ebp,%ecx,8),%ebx
-	movzbl	%dh,%esi
-	movl	4(%esp),%ecx
-	xorl	%ebx,%eax
-	rorl	$8,%ebx
-	xorl	2048(%ebp,%esi,8),%eax
-	movzbl	%dl,%esi
-	movl	(%esp),%edx
-	xorl	%ecx,%ebx
-	xorl	2052(%ebp,%esi,8),%eax
-	movl	48(%edi),%esi
-	xorl	%eax,%ebx
-	movl	%ebx,4(%esp)
-	xorl	%edx,%eax
-	movl	%eax,(%esp)
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	44(%esp),%edi
-	leal	128(%edi),%edi
-	movl	%eax,-112(%edi)
-	movl	%ebx,-108(%edi)
-	movl	%ecx,-104(%edi)
-	movl	%edx,-100(%edi)
-	movl	%eax,%ebp
-	shll	$30,%eax
-	movl	%ebx,%esi
-	shrl	$2,%esi
-	shll	$30,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$30,%ecx
-	movl	%eax,-48(%edi)
-	shrl	$2,%esi
-	orl	%esi,%ebx
-	shrl	$2,%ebp
-	movl	%edx,%esi
-	shrl	$2,%esi
-	movl	%ebx,-44(%edi)
-	shll	$30,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,-40(%edi)
-	movl	%edx,-36(%edi)
-	movl	%eax,%ebp
-	shll	$30,%eax
-	movl	%ebx,%esi
-	shrl	$2,%esi
-	shll	$30,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$30,%ecx
-	movl	%eax,32(%edi)
-	shrl	$2,%esi
-	orl	%esi,%ebx
-	shrl	$2,%ebp
-	movl	%edx,%esi
-	shrl	$2,%esi
-	movl	%ebx,36(%edi)
-	shll	$30,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,40(%edi)
-	movl	%edx,44(%edi)
-	movl	%ebx,%ebp
-	shll	$19,%ebx
-	movl	%ecx,%esi
-	shrl	$13,%esi
-	shll	$19,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$19,%edx
-	movl	%ebx,128(%edi)
-	shrl	$13,%esi
-	orl	%esi,%ecx
-	shrl	$13,%ebp
-	movl	%eax,%esi
-	shrl	$13,%esi
-	movl	%ecx,132(%edi)
-	shll	$19,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,136(%edi)
-	movl	%eax,140(%edi)
-	movl	-96(%edi),%ebx
-	movl	-92(%edi),%ecx
-	movl	-88(%edi),%edx
-	movl	-84(%edi),%eax
-	movl	%ebx,%ebp
-	shll	$15,%ebx
-	movl	%ecx,%esi
-	shrl	$17,%esi
-	shll	$15,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$15,%edx
-	movl	%ebx,-96(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ecx
-	shrl	$17,%ebp
-	movl	%eax,%esi
-	shrl	$17,%esi
-	movl	%ecx,-92(%edi)
-	shll	$15,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,-88(%edi)
-	movl	%eax,-84(%edi)
-	movl	%ebx,%ebp
-	shll	$15,%ebx
-	movl	%ecx,%esi
-	shrl	$17,%esi
-	shll	$15,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$15,%edx
-	movl	%ebx,-64(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ecx
-	shrl	$17,%ebp
-	movl	%eax,%esi
-	shrl	$17,%esi
-	movl	%ecx,-60(%edi)
-	shll	$15,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,-56(%edi)
-	movl	%eax,-52(%edi)
-	movl	%ebx,%ebp
-	shll	$30,%ebx
-	movl	%ecx,%esi
-	shrl	$2,%esi
-	shll	$30,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$30,%edx
-	movl	%ebx,16(%edi)
-	shrl	$2,%esi
-	orl	%esi,%ecx
-	shrl	$2,%ebp
-	movl	%eax,%esi
-	shrl	$2,%esi
-	movl	%ecx,20(%edi)
-	shll	$30,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,24(%edi)
-	movl	%eax,28(%edi)
-	movl	%ecx,%ebp
-	shll	$2,%ecx
-	movl	%edx,%esi
-	shrl	$30,%esi
-	shll	$2,%edx
-	orl	%esi,%ecx
-	movl	%eax,%esi
-	shll	$2,%eax
-	movl	%ecx,80(%edi)
-	shrl	$30,%esi
-	orl	%esi,%edx
-	shrl	$30,%ebp
-	movl	%ebx,%esi
-	shrl	$30,%esi
-	movl	%edx,84(%edi)
-	shll	$2,%ebx
-	orl	%esi,%eax
-	orl	%ebp,%ebx
-	movl	%eax,88(%edi)
-	movl	%ebx,92(%edi)
-	movl	-80(%edi),%ecx
-	movl	-76(%edi),%edx
-	movl	-72(%edi),%eax
-	movl	-68(%edi),%ebx
-	movl	%ecx,%ebp
-	shll	$15,%ecx
-	movl	%edx,%esi
-	shrl	$17,%esi
-	shll	$15,%edx
-	orl	%esi,%ecx
-	movl	%eax,%esi
-	shll	$15,%eax
-	movl	%ecx,-80(%edi)
-	shrl	$17,%esi
-	orl	%esi,%edx
-	shrl	$17,%ebp
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	movl	%edx,-76(%edi)
-	shll	$15,%ebx
-	orl	%esi,%eax
-	orl	%ebp,%ebx
-	movl	%eax,-72(%edi)
-	movl	%ebx,-68(%edi)
-	movl	%ecx,%ebp
-	shll	$30,%ecx
-	movl	%edx,%esi
-	shrl	$2,%esi
-	shll	$30,%edx
-	orl	%esi,%ecx
-	movl	%eax,%esi
-	shll	$30,%eax
-	movl	%ecx,-16(%edi)
-	shrl	$2,%esi
-	orl	%esi,%edx
-	shrl	$2,%ebp
-	movl	%ebx,%esi
-	shrl	$2,%esi
-	movl	%edx,-12(%edi)
-	shll	$30,%ebx
-	orl	%esi,%eax
-	orl	%ebp,%ebx
-	movl	%eax,-8(%edi)
-	movl	%ebx,-4(%edi)
-	movl	%edx,64(%edi)
-	movl	%eax,68(%edi)
-	movl	%ebx,72(%edi)
-	movl	%ecx,76(%edi)
-	movl	%edx,%ebp
-	shll	$17,%edx
-	movl	%eax,%esi
-	shrl	$15,%esi
-	shll	$17,%eax
-	orl	%esi,%edx
-	movl	%ebx,%esi
-	shll	$17,%ebx
-	movl	%edx,96(%edi)
-	shrl	$15,%esi
-	orl	%esi,%eax
-	shrl	$15,%ebp
-	movl	%ecx,%esi
-	shrl	$15,%esi
-	movl	%eax,100(%edi)
-	shll	$17,%ecx
-	orl	%esi,%ebx
-	orl	%ebp,%ecx
-	movl	%ebx,104(%edi)
-	movl	%ecx,108(%edi)
-	movl	-128(%edi),%edx
-	movl	-124(%edi),%eax
-	movl	-120(%edi),%ebx
-	movl	-116(%edi),%ecx
-	movl	%eax,%ebp
-	shll	$13,%eax
-	movl	%ebx,%esi
-	shrl	$19,%esi
-	shll	$13,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$13,%ecx
-	movl	%eax,-32(%edi)
-	shrl	$19,%esi
-	orl	%esi,%ebx
-	shrl	$19,%ebp
-	movl	%edx,%esi
-	shrl	$19,%esi
-	movl	%ebx,-28(%edi)
-	shll	$13,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,-24(%edi)
-	movl	%edx,-20(%edi)
-	movl	%eax,%ebp
-	shll	$15,%eax
-	movl	%ebx,%esi
-	shrl	$17,%esi
-	shll	$15,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$15,%ecx
-	movl	%eax,(%edi)
-	shrl	$17,%esi
-	orl	%esi,%ebx
-	shrl	$17,%ebp
-	movl	%edx,%esi
-	shrl	$17,%esi
-	movl	%ebx,4(%edi)
-	shll	$15,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	%eax,%ebp
-	shll	$17,%eax
-	movl	%ebx,%esi
-	shrl	$15,%esi
-	shll	$17,%ebx
-	orl	%esi,%eax
-	movl	%ecx,%esi
-	shll	$17,%ecx
-	movl	%eax,48(%edi)
-	shrl	$15,%esi
-	orl	%esi,%ebx
-	shrl	$15,%ebp
-	movl	%edx,%esi
-	shrl	$15,%esi
-	movl	%ebx,52(%edi)
-	shll	$17,%edx
-	orl	%esi,%ecx
-	orl	%ebp,%edx
-	movl	%ecx,56(%edi)
-	movl	%edx,60(%edi)
-	movl	%ebx,%ebp
-	shll	$2,%ebx
-	movl	%ecx,%esi
-	shrl	$30,%esi
-	shll	$2,%ecx
-	orl	%esi,%ebx
-	movl	%edx,%esi
-	shll	$2,%edx
-	movl	%ebx,112(%edi)
-	shrl	$30,%esi
-	orl	%esi,%ecx
-	shrl	$30,%ebp
-	movl	%eax,%esi
-	shrl	$30,%esi
-	movl	%ecx,116(%edi)
-	shll	$2,%eax
-	orl	%esi,%edx
-	orl	%ebp,%eax
-	movl	%edx,120(%edi)
-	movl	%eax,124(%edi)
-	movl	$4,%eax
-.L013done:
-	leal	144(%edi),%edx
-	addl	$16,%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
-.globl	private_Camellia_set_key
-.type	private_Camellia_set_key, at function
-.align	16
-private_Camellia_set_key:
-.L_private_Camellia_set_key_begin:
-	pushl	%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%ebx
-	movl	16(%esp),%edx
-	movl	$-1,%eax
-	testl	%ecx,%ecx
-	jz	.L014done
-	testl	%edx,%edx
-	jz	.L014done
-	movl	$-2,%eax
-	cmpl	$256,%ebx
-	je	.L015arg_ok
-	cmpl	$192,%ebx
-	je	.L015arg_ok
-	cmpl	$128,%ebx
-	jne	.L014done
-.align	4
-.L015arg_ok:
-	pushl	%edx
-	pushl	%ecx
-	pushl	%ebx
-	call	.L_Camellia_Ekeygen_begin
-	addl	$12,%esp
-	movl	%eax,(%edx)
-	xorl	%eax,%eax
-.align	4
-.L014done:
-	popl	%ebx
-	ret
-.size	private_Camellia_set_key,.-.L_private_Camellia_set_key_begin
-.align	64
-.LCamellia_SIGMA:
-.long	2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
-.align	64
-.LCamellia_SBOX:
-.long	1886416896,1886388336
-.long	2189591040,741081132
-.long	741092352,3014852787
-.long	3974949888,3233808576
-.long	3014898432,3840147684
-.long	656877312,1465319511
-.long	3233857536,3941204202
-.long	3857048832,2930639022
-.long	3840205824,589496355
-.long	2240120064,1802174571
-.long	1465341696,1162149957
-.long	892679424,2779054245
-.long	3941263872,3991732461
-.long	202116096,1330577487
-.long	2930683392,488439837
-.long	1094795520,2459041938
-.long	589505280,2256928902
-.long	4025478912,2947481775
-.long	1802201856,2088501372
-.long	2475922176,522125343
-.long	1162167552,1044250686
-.long	421075200,3705405660
-.long	2779096320,1583218782
-.long	555819264,185270283
-.long	3991792896,2795896998
-.long	235802112,960036921
-.long	1330597632,3587506389
-.long	1313754624,1566376029
-.long	488447232,3654877401
-.long	1701143808,1515847770
-.long	2459079168,1364262993
-.long	3183328512,1819017324
-.long	2256963072,2341142667
-.long	3099113472,2593783962
-.long	2947526400,4227531003
-.long	2408550144,2964324528
-.long	2088532992,1953759348
-.long	3958106880,724238379
-.long	522133248,4042260720
-.long	3469659648,2223243396
-.long	1044266496,3755933919
-.long	808464384,3419078859
-.long	3705461760,875823156
-.long	1600085760,1987444854
-.long	1583242752,1835860077
-.long	3318072576,2846425257
-.long	185273088,3520135377
-.long	437918208,67371012
-.long	2795939328,336855060
-.long	3789676800,976879674
-.long	960051456,3739091166
-.long	3402287616,286326801
-.long	3587560704,842137650
-.long	1195853568,2627469468
-.long	1566399744,1397948499
-.long	1027423488,4075946226
-.long	3654932736,4278059262
-.long	16843008,3486449871
-.long	1515870720,3284336835
-.long	3604403712,2054815866
-.long	1364283648,606339108
-.long	1448498688,3907518696
-.long	1819044864,1616904288
-.long	1296911616,1768489065
-.long	2341178112,2863268010
-.long	218959104,2694840480
-.long	2593823232,2711683233
-.long	1717986816,1650589794
-.long	4227595008,1414791252
-.long	3435973632,505282590
-.long	2964369408,3772776672
-.long	757935360,1684275300
-.long	1953788928,269484048
-.long	303174144,0
-.long	724249344,2745368739
-.long	538976256,1970602101
-.long	4042321920,2324299914
-.long	2981212416,3873833190
-.long	2223277056,151584777
-.long	2576980224,3722248413
-.long	3755990784,2273771655
-.long	1280068608,2206400643
-.long	3419130624,3452764365
-.long	3267543552,2425356432
-.long	875836416,1936916595
-.long	2122219008,4143317238
-.long	1987474944,2644312221
-.long	84215040,3216965823
-.long	1835887872,1381105746
-.long	3082270464,3638034648
-.long	2846468352,3368550600
-.long	825307392,3334865094
-.long	3520188672,2172715137
-.long	387389184,1869545583
-.long	67372032,320012307
-.long	3621246720,1667432547
-.long	336860160,3924361449
-.long	1482184704,2812739751
-.long	976894464,2677997727
-.long	1633771776,3166437564
-.long	3739147776,690552873
-.long	454761216,4193845497
-.long	286331136,791609391
-.long	471604224,3031695540
-.long	842150400,2021130360
-.long	252645120,101056518
-.long	2627509248,3890675943
-.long	370546176,1903231089
-.long	1397969664,3570663636
-.long	404232192,2880110763
-.long	4076007936,2290614408
-.long	572662272,2374828173
-.long	4278124032,1920073842
-.long	1145324544,3115909305
-.long	3486502656,4177002744
-.long	2998055424,2896953516
-.long	3284386560,909508662
-.long	3048584448,707395626
-.long	2054846976,1010565180
-.long	2442236160,4059103473
-.long	606348288,1077936192
-.long	134744064,3553820883
-.long	3907577856,3149594811
-.long	2829625344,1128464451
-.long	1616928768,353697813
-.long	4244438016,2913796269
-.long	1768515840,2004287607
-.long	1347440640,2155872384
-.long	2863311360,2189557890
-.long	3503345664,3974889708
-.long	2694881280,656867367
-.long	2105376000,3856990437
-.long	2711724288,2240086149
-.long	2307492096,892665909
-.long	1650614784,202113036
-.long	2543294208,1094778945
-.long	1414812672,4025417967
-.long	1532713728,2475884691
-.long	505290240,421068825
-.long	2509608192,555810849
-.long	3772833792,235798542
-.long	4294967040,1313734734
-.long	1684300800,1701118053
-.long	3537031680,3183280317
-.long	269488128,3099066552
-.long	3301229568,2408513679
-.long	0,3958046955
-.long	1212696576,3469607118
-.long	2745410304,808452144
-.long	4160222976,1600061535
-.long	1970631936,3318022341
-.long	3688618752,437911578
-.long	2324335104,3789619425
-.long	50529024,3402236106
-.long	3873891840,1195835463
-.long	3671775744,1027407933
-.long	151587072,16842753
-.long	1061109504,3604349142
-.long	3722304768,1448476758
-.long	2492765184,1296891981
-.long	2273806080,218955789
-.long	1549556736,1717960806
-.long	2206434048,3435921612
-.long	33686016,757923885
-.long	3452816640,303169554
-.long	1246382592,538968096
-.long	2425393152,2981167281
-.long	858993408,2576941209
-.long	1936945920,1280049228
-.long	1734829824,3267494082
-.long	4143379968,2122186878
-.long	4092850944,84213765
-.long	2644352256,3082223799
-.long	2139062016,825294897
-.long	3217014528,387383319
-.long	3806519808,3621191895
-.long	1381126656,1482162264
-.long	2610666240,1633747041
-.long	3638089728,454754331
-.long	640034304,471597084
-.long	3368601600,252641295
-.long	926365440,370540566
-.long	3334915584,404226072
-.long	993737472,572653602
-.long	2172748032,1145307204
-.long	2526451200,2998010034
-.long	1869573888,3048538293
-.long	1263225600,2442199185
-.long	320017152,134742024
-.long	3200171520,2829582504
-.long	1667457792,4244373756
-.long	774778368,1347420240
-.long	3924420864,3503292624
-.long	2038003968,2105344125
-.long	2812782336,2307457161
-.long	2358021120,2543255703
-.long	2678038272,1532690523
-.long	1852730880,2509570197
-.long	3166485504,4294902015
-.long	2391707136,3536978130
-.long	690563328,3301179588
-.long	4126536960,1212678216
-.long	4193908992,4160159991
-.long	3065427456,3688562907
-.long	791621376,50528259
-.long	4261281024,3671720154
-.long	3031741440,1061093439
-.long	1499027712,2492727444
-.long	2021160960,1549533276
-.long	2560137216,33685506
-.long	101058048,1246363722
-.long	1785358848,858980403
-.long	3890734848,1734803559
-.long	1179010560,4092788979
-.long	1903259904,2139029631
-.long	3132799488,3806462178
-.long	3570717696,2610626715
-.long	623191296,640024614
-.long	2880154368,926351415
-.long	1111638528,993722427
-.long	2290649088,2526412950
-.long	2728567296,1263206475
-.long	2374864128,3200123070
-.long	4210752000,774766638
-.long	1920102912,2037973113
-.long	117901056,2357985420
-.long	3115956480,1852702830
-.long	1431655680,2391670926
-.long	4177065984,4126474485
-.long	4008635904,3065381046
-.long	2896997376,4261216509
-.long	168430080,1499005017
-.long	909522432,2560098456
-.long	1229539584,1785331818
-.long	707406336,1178992710
-.long	1751672832,3132752058
-.long	1010580480,623181861
-.long	943208448,1111621698
-.long	4059164928,2728525986
-.long	2762253312,4210688250
-.long	1077952512,117899271
-.long	673720320,1431634005
-.long	3553874688,4008575214
-.long	2071689984,168427530
-.long	3149642496,1229520969
-.long	3385444608,1751646312
-.long	1128481536,943194168
-.long	3250700544,2762211492
-.long	353703168,673710120
-.long	3823362816,2071658619
-.long	2913840384,3385393353
-.long	4109693952,3250651329
-.long	2004317952,3823304931
-.long	3351758592,4109631732
-.long	2155905024,3351707847
-.long	2661195264,2661154974
-.long	14737632,939538488
-.long	328965,1090535745
-.long	5789784,369104406
-.long	14277081,1979741814
-.long	6776679,3640711641
-.long	5131854,2466288531
-.long	8487297,1610637408
-.long	13355979,4060148466
-.long	13224393,1912631922
-.long	723723,3254829762
-.long	11447982,2868947883
-.long	6974058,2583730842
-.long	14013909,1962964341
-.long	1579032,100664838
-.long	6118749,1459640151
-.long	8553090,2684395680
-.long	4605510,2432733585
-.long	14671839,4144035831
-.long	14079702,3036722613
-.long	2565927,3372272073
-.long	9079434,2717950626
-.long	3289650,2348846220
-.long	4934475,3523269330
-.long	4342338,2415956112
-.long	14408667,4127258358
-.long	1842204,117442311
-.long	10395294,2801837991
-.long	10263708,654321447
-.long	3815994,2382401166
-.long	13290186,2986390194
-.long	2434341,1224755529
-.long	8092539,3724599006
-.long	855309,1124090691
-.long	7434609,1543527516
-.long	6250335,3607156695
-.long	2039583,3338717127
-.long	16316664,1040203326
-.long	14145495,4110480885
-.long	4079166,2399178639
-.long	10329501,1728079719
-.long	8158332,520101663
-.long	6316128,402659352
-.long	12171705,1845522030
-.long	12500670,2936057775
-.long	12369084,788541231
-.long	9145227,3791708898
-.long	1447446,2231403909
-.long	3421236,218107149
-.long	5066061,1392530259
-.long	12829635,4026593520
-.long	7500402,2617285788
-.long	9803157,1694524773
-.long	11250603,3925928682
-.long	9342606,2734728099
-.long	12237498,2919280302
-.long	8026746,2650840734
-.long	11776947,3959483628
-.long	131586,2147516544
-.long	11842740,754986285
-.long	11382189,1795189611
-.long	10658466,2818615464
-.long	11316396,721431339
-.long	14211288,905983542
-.long	10132122,2785060518
-.long	1513239,3305162181
-.long	1710618,2248181382
-.long	3487029,1291865421
-.long	13421772,855651123
-.long	16250871,4244700669
-.long	10066329,1711302246
-.long	6381921,1476417624
-.long	5921370,2516620950
-.long	15263976,973093434
-.long	2368548,150997257
-.long	5658198,2499843477
-.long	4210752,268439568
-.long	14803425,2013296760
-.long	6513507,3623934168
-.long	592137,1107313218
-.long	3355443,3422604492
-.long	12566463,4009816047
-.long	10000536,637543974
-.long	9934743,3842041317
-.long	8750469,1627414881
-.long	6842472,436214298
-.long	16579836,1056980799
-.long	15527148,989870907
-.long	657930,2181071490
-.long	14342874,3053500086
-.long	7303023,3674266587
-.long	5460819,3556824276
-.long	6447714,2550175896
-.long	10724259,3892373736
-.long	3026478,2332068747
-.long	526344,33554946
-.long	11513775,3942706155
-.long	2631720,167774730
-.long	11579568,738208812
-.long	7631988,486546717
-.long	12763842,2952835248
-.long	12434877,1862299503
-.long	3552822,2365623693
-.long	2236962,2281736328
-.long	3684408,234884622
-.long	6579300,419436825
-.long	1973790,2264958855
-.long	3750201,1308642894
-.long	2894892,184552203
-.long	10921638,2835392937
-.long	3158064,201329676
-.long	15066597,2030074233
-.long	4473924,285217041
-.long	16645629,2130739071
-.long	8947848,570434082
-.long	10461087,3875596263
-.long	6645093,1493195097
-.long	8882055,3774931425
-.long	7039851,3657489114
-.long	16053492,1023425853
-.long	2302755,3355494600
-.long	4737096,301994514
-.long	1052688,67109892
-.long	13750737,1946186868
-.long	5329233,1409307732
-.long	12632256,805318704
-.long	16382457,2113961598
-.long	13816530,3019945140
-.long	10526880,671098920
-.long	5592405,1426085205
-.long	10592673,1744857192
-.long	4276545,1342197840
-.long	16448250,3187719870
-.long	4408131,3489714384
-.long	1250067,3288384708
-.long	12895428,822096177
-.long	3092271,3405827019
-.long	11053224,704653866
-.long	11974326,2902502829
-.long	3947580,251662095
-.long	2829099,3389049546
-.long	12698049,1879076976
-.long	16777215,4278255615
-.long	13158600,838873650
-.long	10855845,1761634665
-.long	2105376,134219784
-.long	9013641,1644192354
-.long	0,0
-.long	9474192,603989028
-.long	4671303,3506491857
-.long	15724527,4211145723
-.long	15395562,3120609978
-.long	12040119,3976261101
-.long	1381653,1157645637
-.long	394758,2164294017
-.long	13487565,1929409395
-.long	11908533,1828744557
-.long	1184274,2214626436
-.long	8289918,2667618207
-.long	12303291,3993038574
-.long	2697513,1241533002
-.long	986895,3271607235
-.long	12105912,771763758
-.long	460551,3238052289
-.long	263172,16777473
-.long	10197915,3858818790
-.long	9737364,620766501
-.long	2171169,1207978056
-.long	6710886,2566953369
-.long	15132390,3103832505
-.long	13553358,3003167667
-.long	15592941,2063629179
-.long	15198183,4177590777
-.long	3881787,3456159438
-.long	16711422,3204497343
-.long	8355711,3741376479
-.long	12961221,1895854449
-.long	10790052,687876393
-.long	3618615,3439381965
-.long	11645361,1811967084
-.long	5000268,318771987
-.long	9539985,1677747300
-.long	7237230,2600508315
-.long	9276813,1660969827
-.long	7763574,2634063261
-.long	197379,3221274816
-.long	2960685,1258310475
-.long	14606046,3070277559
-.long	9868950,2768283045
-.long	2500134,2298513801
-.long	8224125,1593859935
-.long	13027014,2969612721
-.long	6052956,385881879
-.long	13882323,4093703412
-.long	15921906,3154164924
-.long	5197647,3540046803
-.long	1644825,1174423110
-.long	4144959,3472936911
-.long	14474460,922761015
-.long	7960953,1577082462
-.long	1907997,1191200583
-.long	5395026,2483066004
-.long	15461355,4194368250
-.long	15987699,4227923196
-.long	7171437,1526750043
-.long	6184542,2533398423
-.long	16514043,4261478142
-.long	6908265,1509972570
-.long	11711154,2885725356
-.long	15790320,1006648380
-.long	3223857,1275087948
-.long	789516,50332419
-.long	13948116,889206069
-.long	13619151,4076925939
-.long	9211020,587211555
-.long	14869218,3087055032
-.long	7697781,1560304989
-.long	11119017,1778412138
-.long	4868682,2449511058
-.long	5723991,3573601749
-.long	8684676,553656609
-.long	1118481,1140868164
-.long	4539717,1358975313
-.long	1776411,3321939654
-.long	16119285,2097184125
-.long	15000804,956315961
-.long	921102,2197848963
-.long	7566195,3691044060
-.long	11184810,2852170410
-.long	15856113,2080406652
-.long	14540253,1996519287
-.long	5855577,1442862678
-.long	1315860,83887365
-.long	7105644,452991771
-.long	9605778,2751505572
-.long	5526612,352326933
-.long	13684944,872428596
-.long	7895160,503324190
-.long	7368816,469769244
-.long	14935011,4160813304
-.long	4802889,1375752786
-.long	8421504,536879136
-.long	5263440,335549460
-.long	10987431,3909151209
-.long	16185078,3170942397
-.long	7829367,3707821533
-.long	9671571,3825263844
-.long	8816262,2701173153
-.long	8618883,3758153952
-.long	2763306,2315291274
-.long	13092807,4043370993
-.long	5987163,3590379222
-.long	15329769,2046851706
-.long	15658734,3137387451
-.long	9408399,3808486371
-.long	65793,1073758272
-.long	4013373,1325420367
-.globl	Camellia_cbc_encrypt
-.type	Camellia_cbc_encrypt, at function
-.align	16
-Camellia_cbc_encrypt:
-.L_Camellia_cbc_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ecx
-	cmpl	$0,%ecx
-	je	.L016enc_out
-	pushfl
-	cld
-	movl	24(%esp),%eax
-	movl	28(%esp),%ebx
-	movl	36(%esp),%edx
-	movl	40(%esp),%ebp
-	leal	-64(%esp),%esi
-	andl	$-64,%esi
-	leal	-127(%edx),%edi
-	subl	%esi,%edi
-	negl	%edi
-	andl	$960,%edi
-	subl	%edi,%esi
-	movl	44(%esp),%edi
-	xchgl	%esi,%esp
-	addl	$4,%esp
-	movl	%esi,20(%esp)
-	movl	%eax,24(%esp)
-	movl	%ebx,28(%esp)
-	movl	%ecx,32(%esp)
-	movl	%edx,36(%esp)
-	movl	%ebp,40(%esp)
-	call	.L017pic_point
-.L017pic_point:
-	popl	%ebp
-	leal	.LCamellia_SBOX-.L017pic_point(%ebp),%ebp
-	movl	$32,%esi
-.align	4
-.L018prefetch_sbox:
-	movl	(%ebp),%eax
-	movl	32(%ebp),%ebx
-	movl	64(%ebp),%ecx
-	movl	96(%ebp),%edx
-	leal	128(%ebp),%ebp
-	decl	%esi
-	jnz	.L018prefetch_sbox
-	movl	36(%esp),%eax
-	subl	$4096,%ebp
-	movl	24(%esp),%esi
-	movl	272(%eax),%edx
-	cmpl	$0,%edi
-	je	.L019DECRYPT
-	movl	32(%esp),%ecx
-	movl	40(%esp),%edi
-	shll	$6,%edx
-	leal	(%eax,%edx,1),%edx
-	movl	%edx,16(%esp)
-	testl	$4294967280,%ecx
-	jz	.L020enc_tail
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-.align	4
-.L021enc_loop:
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	xorl	(%esi),%eax
-	xorl	4(%esi),%ebx
-	xorl	8(%esi),%ecx
-	bswap	%eax
-	xorl	12(%esi),%edx
-	bswap	%ebx
-	movl	36(%esp),%edi
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_encrypt
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	movl	%eax,(%edi)
-	bswap	%edx
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	32(%esp),%ecx
-	leal	16(%esi),%esi
-	movl	%esi,24(%esp)
-	leal	16(%edi),%edx
-	movl	%edx,28(%esp)
-	subl	$16,%ecx
-	testl	$4294967280,%ecx
-	movl	%ecx,32(%esp)
-	jnz	.L021enc_loop
-	testl	$15,%ecx
-	jnz	.L020enc_tail
-	movl	40(%esp),%esi
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	movl	20(%esp),%esp
-	popfl
-.L016enc_out:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-	pushfl
-.align	4
-.L020enc_tail:
-	movl	%edi,%eax
-	movl	28(%esp),%edi
-	pushl	%eax
-	movl	$16,%ebx
-	subl	%ecx,%ebx
-	cmpl	%esi,%edi
-	je	.L022enc_in_place
-.align	4
-.long	2767451785
-	jmp	.L023enc_skip_in_place
-.L022enc_in_place:
-	leal	(%edi,%ecx,1),%edi
-.L023enc_skip_in_place:
-	movl	%ebx,%ecx
-	xorl	%eax,%eax
-.align	4
-.long	2868115081
-	popl	%edi
-	movl	28(%esp),%esi
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	$16,32(%esp)
-	jmp	.L021enc_loop
-.align	16
-.L019DECRYPT:
-	shll	$6,%edx
-	leal	(%eax,%edx,1),%edx
-	movl	%eax,16(%esp)
-	movl	%edx,36(%esp)
-	cmpl	28(%esp),%esi
-	je	.L024dec_in_place
-	movl	40(%esp),%edi
-	movl	%edi,44(%esp)
-.align	4
-.L025dec_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	bswap	%eax
-	movl	12(%esi),%edx
-	bswap	%ebx
-	movl	36(%esp),%edi
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_decrypt
-	movl	44(%esp),%edi
-	movl	32(%esp),%esi
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	xorl	(%edi),%eax
-	bswap	%edx
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	subl	$16,%esi
-	jc	.L026dec_partial
-	movl	%esi,32(%esp)
-	movl	24(%esp),%esi
-	movl	28(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	%esi,44(%esp)
-	leal	16(%esi),%esi
-	movl	%esi,24(%esp)
-	leal	16(%edi),%edi
-	movl	%edi,28(%esp)
-	jnz	.L025dec_loop
-	movl	44(%esp),%edi
-.L027dec_end:
-	movl	40(%esp),%esi
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	jmp	.L028dec_out
-.align	4
-.L026dec_partial:
-	leal	44(%esp),%edi
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	leal	16(%esi),%ecx
-	movl	%edi,%esi
-	movl	28(%esp),%edi
-.long	2767451785
-	movl	24(%esp),%edi
-	jmp	.L027dec_end
-.align	4
-.L024dec_in_place:
-.L029dec_in_place_loop:
-	leal	44(%esp),%edi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	bswap	%eax
-	movl	%edx,12(%edi)
-	bswap	%ebx
-	movl	36(%esp),%edi
-	bswap	%ecx
-	bswap	%edx
-	call	_x86_Camellia_decrypt
-	movl	40(%esp),%edi
-	movl	28(%esp),%esi
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	xorl	(%edi),%eax
-	bswap	%edx
-	xorl	4(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	12(%edi),%edx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	leal	16(%esi),%esi
-	movl	%esi,28(%esp)
-	leal	44(%esp),%esi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,12(%edi)
-	movl	24(%esp),%esi
-	leal	16(%esi),%esi
-	movl	%esi,24(%esp)
-	movl	32(%esp),%ecx
-	subl	$16,%ecx
-	jc	.L030dec_in_place_partial
-	movl	%ecx,32(%esp)
-	jnz	.L029dec_in_place_loop
-	jmp	.L028dec_out
-.align	4
-.L030dec_in_place_partial:
-	movl	28(%esp),%edi
-	leal	44(%esp),%esi
-	leal	(%edi,%ecx,1),%edi
-	leal	16(%esi,%ecx,1),%esi
-	negl	%ecx
-.long	2767451785
-.align	4
-.L028dec_out:
-	movl	20(%esp),%esp
-	popfl
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	Camellia_cbc_encrypt,.-.L_Camellia_cbc_encrypt_begin
-.byte	67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
-.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
-.byte	115,108,46,111,114,103,62,0

Added: trunk/secure/lib/libcrypto/i386/co-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/co-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/co-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,2514 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/co-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from co-586.pl.
+#ifdef PIC
+.file	"co-586.S"
+.text
+.globl	bn_mul_comba8
+.type	bn_mul_comba8, at function
+.align	16
+bn_mul_comba8:
+.L_bn_mul_comba8_begin:
+	pushl	%esi
+	movl	12(%esp),%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	pushl	%ebp
+	pushl	%ebx
+	xorl	%ebx,%ebx
+	movl	(%esi),%eax
+	xorl	%ecx,%ecx
+	movl	(%edi),%edx
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%eax)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,4(%eax)
+	movl	8(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,12(%eax)
+	movl	16(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%eax)
+	movl	20(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	12(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	16(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,20(%eax)
+	movl	24(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	16(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	12(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	16(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	20(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,24(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	16(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	20(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	24(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,28(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	24(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	16(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	12(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	24(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	28(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,32(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	24(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	16(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	16(%esi),%eax
+	adcl	%edx,%ecx
+	movl	20(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	12(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	28(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,36(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esi),%eax
+	adcl	%edx,%ebp
+	movl	20(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	16(%esi),%eax
+	adcl	%edx,%ebp
+	movl	24(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,40(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	24(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esi),%eax
+	adcl	%edx,%ebx
+	movl	24(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	28(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,44(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	24(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	28(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,48(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,52(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	adcl	$0,%ecx
+	movl	%ebp,56(%eax)
+
+
+	movl	%ebx,60(%eax)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_mul_comba8,.-.L_bn_mul_comba8_begin
+.globl	bn_mul_comba4
+.type	bn_mul_comba4, at function
+.align	16
+bn_mul_comba4:
+.L_bn_mul_comba4_begin:
+	pushl	%esi
+	movl	12(%esp),%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	pushl	%ebp
+	pushl	%ebx
+	xorl	%ebx,%ebx
+	movl	(%esi),%eax
+	xorl	%ecx,%ecx
+	movl	(%edi),%edx
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%eax)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,4(%eax)
+	movl	8(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,12(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,20(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	adcl	$0,%ebp
+	movl	%ebx,24(%eax)
+
+
+	movl	%ecx,28(%eax)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_mul_comba4,.-.L_bn_mul_comba4_begin
+.globl	bn_sqr_comba8
+.type	bn_sqr_comba8, at function
+.align	16
+bn_sqr_comba8:
+.L_bn_sqr_comba8_begin:
+	pushl	%esi
+	pushl	%edi
+	pushl	%ebp
+	pushl	%ebx
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	xorl	%ebx,%ebx
+	xorl	%ecx,%ecx
+	movl	(%esi),%eax
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%edi)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,4(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	4(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	8(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	16(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,12(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	12(%esi),%eax
+	adcl	$0,%ebx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%edi)
+	movl	20(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	16(%esi),%eax
+	adcl	$0,%ecx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	12(%esi),%eax
+	adcl	$0,%ecx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,20(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	20(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	16(%esi),%eax
+	adcl	$0,%ebp
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	12(%esi),%eax
+	adcl	$0,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,24(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	24(%esi),%eax
+	adcl	$0,%ebx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	20(%esi),%eax
+	adcl	$0,%ebx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	16(%esi),%eax
+	adcl	$0,%ebx
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	28(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,28(%edi)
+	movl	4(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	20(%esi),%eax
+	adcl	$0,%ecx
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	16(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	8(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,32(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%eax
+	adcl	$0,%ebp
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	20(%esi),%eax
+	adcl	$0,%ebp
+	movl	16(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	28(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,36(%edi)
+	movl	12(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	24(%esi),%eax
+	adcl	$0,%ebx
+	movl	16(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	20(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	16(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,40(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	20(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	28(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,44(%edi)
+	movl	20(%esi),%edx
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%eax
+	adcl	$0,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,48(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	28(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,52(%edi)
+
+
+	xorl	%ecx,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	adcl	$0,%ecx
+	movl	%ebp,56(%edi)
+
+	movl	%ebx,60(%edi)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_sqr_comba8,.-.L_bn_sqr_comba8_begin
+.globl	bn_sqr_comba4
+.type	bn_sqr_comba4, at function
+.align	16
+bn_sqr_comba4:
+.L_bn_sqr_comba4_begin:
+	pushl	%esi
+	pushl	%edi
+	pushl	%ebp
+	pushl	%ebx
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	xorl	%ebx,%ebx
+	xorl	%ecx,%ecx
+	movl	(%esi),%eax
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%edi)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,4(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	4(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	8(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	12(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,12(%edi)
+	movl	4(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	12(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,20(%edi)
+
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	adcl	$0,%ebp
+	movl	%ebx,24(%edi)
+
+	movl	%ecx,28(%edi)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_sqr_comba4,.-.L_bn_sqr_comba4_begin
+#else
+.file	"co-586.S"
+.text
+.globl	bn_mul_comba8
+.type	bn_mul_comba8, at function
+.align	16
+bn_mul_comba8:
+.L_bn_mul_comba8_begin:
+	pushl	%esi
+	movl	12(%esp),%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	pushl	%ebp
+	pushl	%ebx
+	xorl	%ebx,%ebx
+	movl	(%esi),%eax
+	xorl	%ecx,%ecx
+	movl	(%edi),%edx
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%eax)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,4(%eax)
+	movl	8(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,12(%eax)
+	movl	16(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%eax)
+	movl	20(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	12(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	16(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,20(%eax)
+	movl	24(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	16(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	12(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	16(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	20(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,24(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	16(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	20(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	24(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,28(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	24(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	16(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	12(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	24(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	28(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,32(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	24(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	16(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	16(%esi),%eax
+	adcl	%edx,%ecx
+	movl	20(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	12(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	28(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,36(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esi),%eax
+	adcl	%edx,%ebp
+	movl	20(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	16(%esi),%eax
+	adcl	%edx,%ebp
+	movl	24(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	12(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	16(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,40(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	24(%esi),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esi),%eax
+	adcl	%edx,%ebx
+	movl	24(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	16(%esi),%eax
+	adcl	%edx,%ebx
+	movl	28(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	20(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,44(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	24(%esi),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esi),%eax
+	adcl	%edx,%ecx
+	movl	28(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	24(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,48(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	24(%esi),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	28(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,52(%eax)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	adcl	$0,%ecx
+	movl	%ebp,56(%eax)
+
+
+	movl	%ebx,60(%eax)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_mul_comba8,.-.L_bn_mul_comba8_begin
+.globl	bn_mul_comba4
+.type	bn_mul_comba4, at function
+.align	16
+bn_mul_comba4:
+.L_bn_mul_comba4_begin:
+	pushl	%esi
+	movl	12(%esp),%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	pushl	%ebp
+	pushl	%ebx
+	xorl	%ebx,%ebx
+	movl	(%esi),%eax
+	xorl	%ecx,%ecx
+	movl	(%edi),%edx
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%eax)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	(%esi),%eax
+	adcl	%edx,%ebp
+	movl	4(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,4(%eax)
+	movl	8(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	4(%esi),%eax
+	adcl	%edx,%ebx
+	movl	4(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	(%esi),%eax
+	adcl	%edx,%ebx
+	movl	8(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	8(%esi),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	4(%esi),%eax
+	adcl	%edx,%ecx
+	movl	8(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	(%esi),%eax
+	adcl	%edx,%ecx
+	movl	12(%edi),%edx
+	adcl	$0,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	movl	4(%edi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,12(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	8(%esi),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	4(%esi),%eax
+	adcl	%edx,%ebp
+	movl	12(%edi),%edx
+	adcl	$0,%ebx
+
+	mull	%edx
+	addl	%eax,%ecx
+	movl	20(%esp),%eax
+	adcl	%edx,%ebp
+	movl	8(%edi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	8(%esi),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+
+	mull	%edx
+	addl	%eax,%ebp
+	movl	20(%esp),%eax
+	adcl	%edx,%ebx
+	movl	12(%edi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,20(%eax)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%ebx
+	movl	20(%esp),%eax
+	adcl	%edx,%ecx
+	adcl	$0,%ebp
+	movl	%ebx,24(%eax)
+
+
+	movl	%ecx,28(%eax)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_mul_comba4,.-.L_bn_mul_comba4_begin
+.globl	bn_sqr_comba8
+.type	bn_sqr_comba8, at function
+.align	16
+bn_sqr_comba8:
+.L_bn_sqr_comba8_begin:
+	pushl	%esi
+	pushl	%edi
+	pushl	%ebp
+	pushl	%ebx
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	xorl	%ebx,%ebx
+	xorl	%ecx,%ecx
+	movl	(%esi),%eax
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%edi)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,4(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	4(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	8(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	16(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,12(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	12(%esi),%eax
+	adcl	$0,%ebx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%edi)
+	movl	20(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	16(%esi),%eax
+	adcl	$0,%ecx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	12(%esi),%eax
+	adcl	$0,%ecx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,20(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	20(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	16(%esi),%eax
+	adcl	$0,%ebp
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	12(%esi),%eax
+	adcl	$0,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,24(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	24(%esi),%eax
+	adcl	$0,%ebx
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	20(%esi),%eax
+	adcl	$0,%ebx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	16(%esi),%eax
+	adcl	$0,%ebx
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	28(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,28(%edi)
+	movl	4(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	8(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	20(%esi),%eax
+	adcl	$0,%ecx
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	16(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	8(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,32(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%eax
+	adcl	$0,%ebp
+	movl	12(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	20(%esi),%eax
+	adcl	$0,%ebp
+	movl	16(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	28(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,36(%edi)
+	movl	12(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	24(%esi),%eax
+	adcl	$0,%ebx
+	movl	16(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	20(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	16(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,40(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	24(%esi),%eax
+	adcl	$0,%ecx
+	movl	20(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	28(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,44(%edi)
+	movl	20(%esi),%edx
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%eax
+	adcl	$0,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	24(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,48(%edi)
+	movl	28(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	28(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,52(%edi)
+
+
+	xorl	%ecx,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	adcl	$0,%ecx
+	movl	%ebp,56(%edi)
+
+	movl	%ebx,60(%edi)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_sqr_comba8,.-.L_bn_sqr_comba8_begin
+.globl	bn_sqr_comba4
+.type	bn_sqr_comba4, at function
+.align	16
+bn_sqr_comba4:
+.L_bn_sqr_comba4_begin:
+	pushl	%esi
+	pushl	%edi
+	pushl	%ebp
+	pushl	%ebx
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	xorl	%ebx,%ebx
+	xorl	%ecx,%ecx
+	movl	(%esi),%eax
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	(%esi),%edx
+	adcl	$0,%ebp
+	movl	%ebx,(%edi)
+	movl	4(%esi),%eax
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+	movl	%ecx,4(%edi)
+	movl	(%esi),%edx
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	4(%esi),%eax
+	adcl	$0,%ecx
+
+	mull	%eax
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	(%esi),%edx
+	adcl	$0,%ecx
+	movl	%ebp,8(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ebp,%ebp
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	8(%esi),%eax
+	adcl	$0,%ebp
+	movl	4(%esi),%edx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebp
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	movl	12(%esi),%eax
+	adcl	$0,%ebp
+	movl	%ebx,12(%edi)
+	movl	4(%esi),%edx
+
+
+	xorl	%ebx,%ebx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ebx
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%eax
+	adcl	$0,%ebx
+
+	mull	%eax
+	addl	%eax,%ecx
+	adcl	%edx,%ebp
+	movl	8(%esi),%edx
+	adcl	$0,%ebx
+	movl	%ecx,16(%edi)
+	movl	12(%esi),%eax
+
+
+	xorl	%ecx,%ecx
+
+	mull	%edx
+	addl	%eax,%eax
+	adcl	%edx,%edx
+	adcl	$0,%ecx
+	addl	%eax,%ebp
+	adcl	%edx,%ebx
+	movl	12(%esi),%eax
+	adcl	$0,%ecx
+	movl	%ebp,20(%edi)
+
+
+	xorl	%ebp,%ebp
+
+	mull	%eax
+	addl	%eax,%ebx
+	adcl	%edx,%ecx
+	adcl	$0,%ebp
+	movl	%ebx,24(%edi)
+
+	movl	%ecx,28(%edi)
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	bn_sqr_comba4,.-.L_bn_sqr_comba4_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/co-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/co-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/co-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/co-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1255 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/co-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"co-586.s"
-.text
-.globl	bn_mul_comba8
-.type	bn_mul_comba8, at function
-.align	16
-bn_mul_comba8:
-.L_bn_mul_comba8_begin:
-	pushl	%esi
-	movl	12(%esp),%esi
-	pushl	%edi
-	movl	20(%esp),%edi
-	pushl	%ebp
-	pushl	%ebx
-	xorl	%ebx,%ebx
-	movl	(%esi),%eax
-	xorl	%ecx,%ecx
-	movl	(%edi),%edx
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,(%eax)
-	movl	4(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	(%esi),%eax
-	adcl	%edx,%ebp
-	movl	4(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,4(%eax)
-	movl	8(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	4(%esi),%eax
-	adcl	%edx,%ebx
-	movl	4(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	(%esi),%eax
-	adcl	%edx,%ebx
-	movl	8(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,8(%eax)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	8(%esi),%eax
-	adcl	%edx,%ecx
-	movl	4(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	4(%esi),%eax
-	adcl	%edx,%ecx
-	movl	8(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	(%esi),%eax
-	adcl	%edx,%ecx
-	movl	12(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,12(%eax)
-	movl	16(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	12(%esi),%eax
-	adcl	%edx,%ebp
-	movl	4(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	8(%esi),%eax
-	adcl	%edx,%ebp
-	movl	8(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	4(%esi),%eax
-	adcl	%edx,%ebp
-	movl	12(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	(%esi),%eax
-	adcl	%edx,%ebp
-	movl	16(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,16(%eax)
-	movl	20(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	16(%esi),%eax
-	adcl	%edx,%ebx
-	movl	4(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	12(%esi),%eax
-	adcl	%edx,%ebx
-	movl	8(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	8(%esi),%eax
-	adcl	%edx,%ebx
-	movl	12(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	4(%esi),%eax
-	adcl	%edx,%ebx
-	movl	16(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	(%esi),%eax
-	adcl	%edx,%ebx
-	movl	20(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,20(%eax)
-	movl	24(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esi),%eax
-	adcl	%edx,%ecx
-	movl	4(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	16(%esi),%eax
-	adcl	%edx,%ecx
-	movl	8(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	12(%esi),%eax
-	adcl	%edx,%ecx
-	movl	12(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	8(%esi),%eax
-	adcl	%edx,%ecx
-	movl	16(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	4(%esi),%eax
-	adcl	%edx,%ecx
-	movl	20(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	(%esi),%eax
-	adcl	%edx,%ecx
-	movl	24(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,24(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	24(%esi),%eax
-	adcl	%edx,%ebp
-	movl	4(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esi),%eax
-	adcl	%edx,%ebp
-	movl	8(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	16(%esi),%eax
-	adcl	%edx,%ebp
-	movl	12(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	12(%esi),%eax
-	adcl	%edx,%ebp
-	movl	16(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	8(%esi),%eax
-	adcl	%edx,%ebp
-	movl	20(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	4(%esi),%eax
-	adcl	%edx,%ebp
-	movl	24(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	(%esi),%eax
-	adcl	%edx,%ebp
-	movl	28(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	4(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,28(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	24(%esi),%eax
-	adcl	%edx,%ebx
-	movl	8(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esi),%eax
-	adcl	%edx,%ebx
-	movl	12(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	16(%esi),%eax
-	adcl	%edx,%ebx
-	movl	16(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	12(%esi),%eax
-	adcl	%edx,%ebx
-	movl	20(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	8(%esi),%eax
-	adcl	%edx,%ebx
-	movl	24(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	4(%esi),%eax
-	adcl	%edx,%ebx
-	movl	28(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	8(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,32(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	24(%esi),%eax
-	adcl	%edx,%ecx
-	movl	12(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esi),%eax
-	adcl	%edx,%ecx
-	movl	16(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	16(%esi),%eax
-	adcl	%edx,%ecx
-	movl	20(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	12(%esi),%eax
-	adcl	%edx,%ecx
-	movl	24(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	8(%esi),%eax
-	adcl	%edx,%ecx
-	movl	28(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	12(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,36(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	24(%esi),%eax
-	adcl	%edx,%ebp
-	movl	16(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esi),%eax
-	adcl	%edx,%ebp
-	movl	20(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	16(%esi),%eax
-	adcl	%edx,%ebp
-	movl	24(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	12(%esi),%eax
-	adcl	%edx,%ebp
-	movl	28(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	16(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,40(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	24(%esi),%eax
-	adcl	%edx,%ebx
-	movl	20(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esi),%eax
-	adcl	%edx,%ebx
-	movl	24(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	16(%esi),%eax
-	adcl	%edx,%ebx
-	movl	28(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	20(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,44(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	24(%esi),%eax
-	adcl	%edx,%ecx
-	movl	24(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esi),%eax
-	adcl	%edx,%ecx
-	movl	28(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	24(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,48(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	24(%esi),%eax
-	adcl	%edx,%ebp
-	movl	28(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	28(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,52(%eax)
-	movl	28(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	adcl	$0,%ecx
-	movl	%ebp,56(%eax)
-
-
-	movl	%ebx,60(%eax)
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	bn_mul_comba8,.-.L_bn_mul_comba8_begin
-.globl	bn_mul_comba4
-.type	bn_mul_comba4, at function
-.align	16
-bn_mul_comba4:
-.L_bn_mul_comba4_begin:
-	pushl	%esi
-	movl	12(%esp),%esi
-	pushl	%edi
-	movl	20(%esp),%edi
-	pushl	%ebp
-	pushl	%ebx
-	xorl	%ebx,%ebx
-	movl	(%esi),%eax
-	xorl	%ecx,%ecx
-	movl	(%edi),%edx
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,(%eax)
-	movl	4(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	(%esi),%eax
-	adcl	%edx,%ebp
-	movl	4(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,4(%eax)
-	movl	8(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	4(%esi),%eax
-	adcl	%edx,%ebx
-	movl	4(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	(%esi),%eax
-	adcl	%edx,%ebx
-	movl	8(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,8(%eax)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	8(%esi),%eax
-	adcl	%edx,%ecx
-	movl	4(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	4(%esi),%eax
-	adcl	%edx,%ecx
-	movl	8(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	(%esi),%eax
-	adcl	%edx,%ecx
-	movl	12(%edi),%edx
-	adcl	$0,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	movl	4(%edi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,12(%eax)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	8(%esi),%eax
-	adcl	%edx,%ebp
-	movl	8(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	4(%esi),%eax
-	adcl	%edx,%ebp
-	movl	12(%edi),%edx
-	adcl	$0,%ebx
-
-	mull	%edx
-	addl	%eax,%ecx
-	movl	20(%esp),%eax
-	adcl	%edx,%ebp
-	movl	8(%edi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,16(%eax)
-	movl	12(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	8(%esi),%eax
-	adcl	%edx,%ebx
-	movl	12(%edi),%edx
-	adcl	$0,%ecx
-
-	mull	%edx
-	addl	%eax,%ebp
-	movl	20(%esp),%eax
-	adcl	%edx,%ebx
-	movl	12(%edi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,20(%eax)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%ebx
-	movl	20(%esp),%eax
-	adcl	%edx,%ecx
-	adcl	$0,%ebp
-	movl	%ebx,24(%eax)
-
-
-	movl	%ecx,28(%eax)
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	bn_mul_comba4,.-.L_bn_mul_comba4_begin
-.globl	bn_sqr_comba8
-.type	bn_sqr_comba8, at function
-.align	16
-bn_sqr_comba8:
-.L_bn_sqr_comba8_begin:
-	pushl	%esi
-	pushl	%edi
-	pushl	%ebp
-	pushl	%ebx
-	movl	20(%esp),%edi
-	movl	24(%esp),%esi
-	xorl	%ebx,%ebx
-	xorl	%ecx,%ecx
-	movl	(%esi),%eax
-
-	xorl	%ebp,%ebp
-
-	mull	%eax
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	(%esi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,(%edi)
-	movl	4(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	8(%esi),%eax
-	adcl	$0,%ebx
-	movl	%ecx,4(%edi)
-	movl	(%esi),%edx
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	4(%esi),%eax
-	adcl	$0,%ecx
-
-	mull	%eax
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	(%esi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,8(%edi)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	8(%esi),%eax
-	adcl	$0,%ebp
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	16(%esi),%eax
-	adcl	$0,%ebp
-	movl	%ebx,12(%edi)
-	movl	(%esi),%edx
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	12(%esi),%eax
-	adcl	$0,%ebx
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	8(%esi),%eax
-	adcl	$0,%ebx
-
-	mull	%eax
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	(%esi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,16(%edi)
-	movl	20(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	16(%esi),%eax
-	adcl	$0,%ecx
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	12(%esi),%eax
-	adcl	$0,%ecx
-	movl	8(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	24(%esi),%eax
-	adcl	$0,%ecx
-	movl	%ebp,20(%edi)
-	movl	(%esi),%edx
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	20(%esi),%eax
-	adcl	$0,%ebp
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	16(%esi),%eax
-	adcl	$0,%ebp
-	movl	8(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	12(%esi),%eax
-	adcl	$0,%ebp
-
-	mull	%eax
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	(%esi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,24(%edi)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	24(%esi),%eax
-	adcl	$0,%ebx
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	20(%esi),%eax
-	adcl	$0,%ebx
-	movl	8(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	16(%esi),%eax
-	adcl	$0,%ebx
-	movl	12(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	28(%esi),%eax
-	adcl	$0,%ebx
-	movl	%ecx,28(%edi)
-	movl	4(%esi),%edx
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	24(%esi),%eax
-	adcl	$0,%ecx
-	movl	8(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	20(%esi),%eax
-	adcl	$0,%ecx
-	movl	12(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	16(%esi),%eax
-	adcl	$0,%ecx
-
-	mull	%eax
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	8(%esi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,32(%edi)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	24(%esi),%eax
-	adcl	$0,%ebp
-	movl	12(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	20(%esi),%eax
-	adcl	$0,%ebp
-	movl	16(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	28(%esi),%eax
-	adcl	$0,%ebp
-	movl	%ebx,36(%edi)
-	movl	12(%esi),%edx
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	24(%esi),%eax
-	adcl	$0,%ebx
-	movl	16(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	20(%esi),%eax
-	adcl	$0,%ebx
-
-	mull	%eax
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	16(%esi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,40(%edi)
-	movl	28(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	24(%esi),%eax
-	adcl	$0,%ecx
-	movl	20(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	28(%esi),%eax
-	adcl	$0,%ecx
-	movl	%ebp,44(%edi)
-	movl	20(%esi),%edx
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	24(%esi),%eax
-	adcl	$0,%ebp
-
-	mull	%eax
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	24(%esi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,48(%edi)
-	movl	28(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	28(%esi),%eax
-	adcl	$0,%ebx
-	movl	%ecx,52(%edi)
-
-
-	xorl	%ecx,%ecx
-
-	mull	%eax
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	adcl	$0,%ecx
-	movl	%ebp,56(%edi)
-
-	movl	%ebx,60(%edi)
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	bn_sqr_comba8,.-.L_bn_sqr_comba8_begin
-.globl	bn_sqr_comba4
-.type	bn_sqr_comba4, at function
-.align	16
-bn_sqr_comba4:
-.L_bn_sqr_comba4_begin:
-	pushl	%esi
-	pushl	%edi
-	pushl	%ebp
-	pushl	%ebx
-	movl	20(%esp),%edi
-	movl	24(%esp),%esi
-	xorl	%ebx,%ebx
-	xorl	%ecx,%ecx
-	movl	(%esi),%eax
-
-	xorl	%ebp,%ebp
-
-	mull	%eax
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	(%esi),%edx
-	adcl	$0,%ebp
-	movl	%ebx,(%edi)
-	movl	4(%esi),%eax
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	8(%esi),%eax
-	adcl	$0,%ebx
-	movl	%ecx,4(%edi)
-	movl	(%esi),%edx
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	4(%esi),%eax
-	adcl	$0,%ecx
-
-	mull	%eax
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	(%esi),%edx
-	adcl	$0,%ecx
-	movl	%ebp,8(%edi)
-	movl	12(%esi),%eax
-
-
-	xorl	%ebp,%ebp
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	8(%esi),%eax
-	adcl	$0,%ebp
-	movl	4(%esi),%edx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebp
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	movl	12(%esi),%eax
-	adcl	$0,%ebp
-	movl	%ebx,12(%edi)
-	movl	4(%esi),%edx
-
-
-	xorl	%ebx,%ebx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ebx
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	8(%esi),%eax
-	adcl	$0,%ebx
-
-	mull	%eax
-	addl	%eax,%ecx
-	adcl	%edx,%ebp
-	movl	8(%esi),%edx
-	adcl	$0,%ebx
-	movl	%ecx,16(%edi)
-	movl	12(%esi),%eax
-
-
-	xorl	%ecx,%ecx
-
-	mull	%edx
-	addl	%eax,%eax
-	adcl	%edx,%edx
-	adcl	$0,%ecx
-	addl	%eax,%ebp
-	adcl	%edx,%ebx
-	movl	12(%esi),%eax
-	adcl	$0,%ecx
-	movl	%ebp,20(%edi)
-
-
-	xorl	%ebp,%ebp
-
-	mull	%eax
-	addl	%eax,%ebx
-	adcl	%edx,%ecx
-	adcl	$0,%ebp
-	movl	%ebx,24(%edi)
-
-	movl	%ecx,28(%edi)
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	bn_sqr_comba4,.-.L_bn_sqr_comba4_begin

Added: trunk/secure/lib/libcrypto/i386/crypt586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/crypt586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/crypt586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1760 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/crypt586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from crypt586.pl.
+#ifdef PIC
+.file	"crypt586.S"
+.text
+.globl	fcrypt_body
+.type	fcrypt_body, at function
+.align	16
+fcrypt_body:
+.L_fcrypt_body_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	xorl	%edi,%edi
+	xorl	%esi,%esi
+	call	.L000PIC_me_up
+.L000PIC_me_up:
+	popl	%edx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L000PIC_me_up](%edx),%edx
+	movl	DES_SPtrans at GOT(%edx),%edx
+	pushl	%edx
+	movl	28(%esp),%ebp
+	pushl	$25
+.L001start:
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	4(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	8(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	12(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	16(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	20(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	24(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	28(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	32(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	36(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	40(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	44(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	48(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	52(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	56(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	60(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	64(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	68(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	72(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	76(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	80(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	84(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	88(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	92(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	96(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	100(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	104(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	108(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	112(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	116(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	120(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	124(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+	movl	(%esp),%ebx
+	movl	%edi,%eax
+	decl	%ebx
+	movl	%esi,%edi
+	movl	%eax,%esi
+	movl	%ebx,(%esp)
+	jnz	.L001start
+
+
+	movl	28(%esp),%edx
+	rorl	$1,%edi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0xaaaaaaaa,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$23,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$10,%esi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0x33333333,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$18,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xfff0000f,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	roll	$12,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xf0f0f0f0,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%edx)
+	movl	%edi,4(%edx)
+	addl	$8,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	fcrypt_body,.-.L_fcrypt_body_begin
+#else
+.file	"crypt586.S"
+.text
+.globl	fcrypt_body
+.type	fcrypt_body, at function
+.align	16
+fcrypt_body:
+.L_fcrypt_body_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+
+
+	xorl	%edi,%edi
+	xorl	%esi,%esi
+	leal	DES_SPtrans,%edx
+	pushl	%edx
+	movl	28(%esp),%ebp
+	pushl	$25
+.L000start:
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	4(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	8(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	12(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	16(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	20(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	24(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	28(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	32(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	36(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	40(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	44(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	48(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	52(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	56(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	60(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	64(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	68(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	72(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	76(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	80(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	84(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	88(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	92(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	96(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	100(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	104(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	108(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%esi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%esi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	112(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	116(%ebp),%ecx
+	xorl	%esi,%eax
+	xorl	%esi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%edi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%edi
+	movl	32(%esp),%ebp
+
+
+	movl	36(%esp),%eax
+	movl	%edi,%edx
+	shrl	$16,%edx
+	movl	40(%esp),%ecx
+	xorl	%edi,%edx
+	andl	%edx,%eax
+	andl	%ecx,%edx
+	movl	%eax,%ebx
+	shll	$16,%ebx
+	movl	%edx,%ecx
+	shll	$16,%ecx
+	xorl	%ebx,%eax
+	xorl	%ecx,%edx
+	movl	120(%ebp),%ebx
+	xorl	%ebx,%eax
+	movl	124(%ebp),%ecx
+	xorl	%edi,%eax
+	xorl	%edi,%edx
+	xorl	%ecx,%edx
+	andl	$0xfcfcfcfc,%eax
+	xorl	%ebx,%ebx
+	andl	$0xcfcfcfcf,%edx
+	xorl	%ecx,%ecx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	movl	4(%esp),%ebp
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	movl	0x600(%ebp,%ebx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x700(%ebp,%ecx,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x400(%ebp,%eax,1),%ebx
+	xorl	%ebx,%esi
+	movl	0x500(%ebp,%edx,1),%ebx
+	xorl	%ebx,%esi
+	movl	32(%esp),%ebp
+	movl	(%esp),%ebx
+	movl	%edi,%eax
+	decl	%ebx
+	movl	%esi,%edi
+	movl	%eax,%esi
+	movl	%ebx,(%esp)
+	jnz	.L000start
+
+
+	movl	28(%esp),%edx
+	rorl	$1,%edi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0xaaaaaaaa,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$23,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$10,%esi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0x33333333,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$18,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xfff0000f,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	roll	$12,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xf0f0f0f0,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%edx)
+	movl	%edi,4(%edx)
+	addl	$8,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	fcrypt_body,.-.L_fcrypt_body_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/crypt586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/crypt586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/crypt586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/crypt586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,876 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/crypt586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"crypt586.s"
-.text
-.globl	fcrypt_body
-.type	fcrypt_body, at function
-.align	16
-fcrypt_body:
-.L_fcrypt_body_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-
-
-	xorl	%edi,%edi
-	xorl	%esi,%esi
-	leal	DES_SPtrans,%edx
-	pushl	%edx
-	movl	28(%esp),%ebp
-	pushl	$25
-.L000start:
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	4(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	8(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	12(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	16(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	20(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	24(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	28(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	32(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	36(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	40(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	44(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	48(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	52(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	56(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	60(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	64(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	68(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	72(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	76(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	80(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	84(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	88(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	92(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	96(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	100(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	104(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	108(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%esi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%esi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	112(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	116(%ebp),%ecx
-	xorl	%esi,%eax
-	xorl	%esi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%edi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%edi
-	movl	32(%esp),%ebp
-
-
-	movl	36(%esp),%eax
-	movl	%edi,%edx
-	shrl	$16,%edx
-	movl	40(%esp),%ecx
-	xorl	%edi,%edx
-	andl	%edx,%eax
-	andl	%ecx,%edx
-	movl	%eax,%ebx
-	shll	$16,%ebx
-	movl	%edx,%ecx
-	shll	$16,%ecx
-	xorl	%ebx,%eax
-	xorl	%ecx,%edx
-	movl	120(%ebp),%ebx
-	xorl	%ebx,%eax
-	movl	124(%ebp),%ecx
-	xorl	%edi,%eax
-	xorl	%edi,%edx
-	xorl	%ecx,%edx
-	andl	$0xfcfcfcfc,%eax
-	xorl	%ebx,%ebx
-	andl	$0xcfcfcfcf,%edx
-	xorl	%ecx,%ecx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	movl	4(%esp),%ebp
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	movl	0x600(%ebp,%ebx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x700(%ebp,%ecx,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x400(%ebp,%eax,1),%ebx
-	xorl	%ebx,%esi
-	movl	0x500(%ebp,%edx,1),%ebx
-	xorl	%ebx,%esi
-	movl	32(%esp),%ebp
-	movl	(%esp),%ebx
-	movl	%edi,%eax
-	decl	%ebx
-	movl	%esi,%edi
-	movl	%eax,%esi
-	movl	%ebx,(%esp)
-	jnz	.L000start
-
-
-	movl	28(%esp),%edx
-	rorl	$1,%edi
-	movl	%esi,%eax
-	xorl	%edi,%esi
-	andl	$0xaaaaaaaa,%esi
-	xorl	%esi,%eax
-	xorl	%esi,%edi
-
-	roll	$23,%eax
-	movl	%eax,%esi
-	xorl	%edi,%eax
-	andl	$0x03fc03fc,%eax
-	xorl	%eax,%esi
-	xorl	%eax,%edi
-
-	roll	$10,%esi
-	movl	%esi,%eax
-	xorl	%edi,%esi
-	andl	$0x33333333,%esi
-	xorl	%esi,%eax
-	xorl	%esi,%edi
-
-	roll	$18,%edi
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	andl	$0xfff0000f,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%eax
-
-	roll	$12,%esi
-	movl	%esi,%edi
-	xorl	%eax,%esi
-	andl	$0xf0f0f0f0,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%eax
-
-	rorl	$4,%eax
-	movl	%eax,(%edx)
-	movl	%edi,4(%edx)
-	addl	$8,%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	fcrypt_body,.-.L_fcrypt_body_begin

Added: trunk/secure/lib/libcrypto/i386/des-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/des-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/des-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,3680 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/des-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from des-586.pl.
+#ifdef PIC
+.file	"des-586.S"
+.text
+.globl	DES_SPtrans
+.type	_x86_DES_encrypt, at function
+.align	16
+_x86_DES_encrypt:
+	pushl	%ecx
+
+	movl	(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	4(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	8(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	12(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	16(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	20(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	24(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	28(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	32(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	36(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	40(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	44(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	48(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	52(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	56(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	60(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	64(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	68(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	72(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	76(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	80(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	84(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	88(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	92(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	96(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	100(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	104(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	108(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	112(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	116(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	120(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	124(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+	addl	$4,%esp
+	ret
+.size	_x86_DES_encrypt,.-_x86_DES_encrypt
+.type	_x86_DES_decrypt, at function
+.align	16
+_x86_DES_decrypt:
+	pushl	%ecx
+
+	movl	120(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	124(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	112(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	116(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	104(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	108(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	96(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	100(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	88(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	92(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	80(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	84(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	72(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	76(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	64(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	68(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	56(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	60(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	48(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	52(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	40(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	44(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	32(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	36(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	24(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	28(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	16(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	20(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	8(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	12(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	4(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+	addl	$4,%esp
+	ret
+.size	_x86_DES_decrypt,.-_x86_DES_decrypt
+.globl	DES_encrypt1
+.type	DES_encrypt1, at function
+.align	16
+DES_encrypt1:
+.L_DES_encrypt1_begin:
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	12(%esp),%esi
+	xorl	%ecx,%ecx
+	pushl	%ebx
+	pushl	%ebp
+	movl	(%esi),%eax
+	movl	28(%esp),%ebx
+	movl	4(%esi),%edi
+
+
+	roll	$4,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0xf0f0f0f0,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$20,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xfff0000f,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$14,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x33333333,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$22,%esi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0x03fc03fc,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$9,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0xaaaaaaaa,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$1,%edi
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	DES_SPtrans-.L000pic_point(%ebp),%ebp
+	movl	24(%esp),%ecx
+	cmpl	$0,%ebx
+	je	.L001decrypt
+	call	_x86_DES_encrypt
+	jmp	.L002done
+.L001decrypt:
+	call	_x86_DES_decrypt
+.L002done:
+
+
+	movl	20(%esp),%edx
+	rorl	$1,%esi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebp
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	ret
+.size	DES_encrypt1,.-.L_DES_encrypt1_begin
+.globl	DES_encrypt2
+.type	DES_encrypt2, at function
+.align	16
+DES_encrypt2:
+.L_DES_encrypt2_begin:
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	12(%esp),%eax
+	xorl	%ecx,%ecx
+	pushl	%ebx
+	pushl	%ebp
+	movl	(%eax),%esi
+	movl	28(%esp),%ebx
+	roll	$3,%esi
+	movl	4(%eax),%edi
+	roll	$3,%edi
+	call	.L003pic_point
+.L003pic_point:
+	popl	%ebp
+	leal	DES_SPtrans-.L003pic_point(%ebp),%ebp
+	movl	24(%esp),%ecx
+	cmpl	$0,%ebx
+	je	.L004decrypt
+	call	_x86_DES_encrypt
+	jmp	.L005done
+.L004decrypt:
+	call	_x86_DES_decrypt
+.L005done:
+
+
+	rorl	$3,%edi
+	movl	20(%esp),%eax
+	rorl	$3,%esi
+	movl	%edi,(%eax)
+	movl	%esi,4(%eax)
+	popl	%ebp
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	ret
+.size	DES_encrypt2,.-.L_DES_encrypt2_begin
+.globl	DES_encrypt3
+.type	DES_encrypt3, at function
+.align	16
+DES_encrypt3:
+.L_DES_encrypt3_begin:
+	pushl	%ebx
+	movl	8(%esp),%ebx
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	subl	$12,%esp
+
+
+	roll	$4,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	roll	$20,%esi
+	movl	%esi,%edi
+	xorl	%edx,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%edx
+
+	roll	$14,%edi
+	movl	%edi,%esi
+	xorl	%edx,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%edx
+
+	roll	$22,%edx
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	andl	$0x03fc03fc,%edx
+	xorl	%edx,%edi
+	xorl	%edx,%esi
+
+	roll	$9,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	rorl	$3,%edx
+	rorl	$2,%esi
+	movl	%esi,4(%ebx)
+	movl	36(%esp),%eax
+	movl	%edx,(%ebx)
+	movl	40(%esp),%edi
+	movl	44(%esp),%esi
+	movl	$1,8(%esp)
+	movl	%eax,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$0,8(%esp)
+	movl	%edi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$1,8(%esp)
+	movl	%esi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	addl	$12,%esp
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+
+	roll	$2,%esi
+	roll	$3,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%ebx)
+	movl	%esi,4(%ebx)
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	popl	%ebx
+	ret
+.size	DES_encrypt3,.-.L_DES_encrypt3_begin
+.globl	DES_decrypt3
+.type	DES_decrypt3, at function
+.align	16
+DES_decrypt3:
+.L_DES_decrypt3_begin:
+	pushl	%ebx
+	movl	8(%esp),%ebx
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	subl	$12,%esp
+
+
+	roll	$4,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	roll	$20,%esi
+	movl	%esi,%edi
+	xorl	%edx,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%edx
+
+	roll	$14,%edi
+	movl	%edi,%esi
+	xorl	%edx,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%edx
+
+	roll	$22,%edx
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	andl	$0x03fc03fc,%edx
+	xorl	%edx,%edi
+	xorl	%edx,%esi
+
+	roll	$9,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	rorl	$3,%edx
+	rorl	$2,%esi
+	movl	%esi,4(%ebx)
+	movl	36(%esp),%esi
+	movl	%edx,(%ebx)
+	movl	40(%esp),%edi
+	movl	44(%esp),%eax
+	movl	$0,8(%esp)
+	movl	%eax,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$1,8(%esp)
+	movl	%edi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$0,8(%esp)
+	movl	%esi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	addl	$12,%esp
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+
+	roll	$2,%esi
+	roll	$3,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%ebx)
+	movl	%esi,4(%ebx)
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	popl	%ebx
+	ret
+.size	DES_decrypt3,.-.L_DES_decrypt3_begin
+.globl	DES_ncbc_encrypt
+.type	DES_ncbc_encrypt, at function
+.align	16
+DES_ncbc_encrypt:
+.L_DES_ncbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	pushl	%ecx
+
+	movl	52(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L006decrypt
+	andl	$4294967288,%ebp
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	jz	.L007encrypt_finish
+.L008encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L008encrypt_loop
+.L007encrypt_finish:
+	movl	56(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L009finish
+	call	.L010PIC_point
+.L010PIC_point:
+	popl	%edx
+	leal	.L011cbc_enc_jmp_table-.L010PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L012ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L013ej6:
+	movb	5(%esi),%dh
+.L014ej5:
+	movb	4(%esi),%dl
+.L015ej4:
+	movl	(%esi),%ecx
+	jmp	.L016ejend
+.L017ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L018ej2:
+	movb	1(%esi),%ch
+.L019ej1:
+	movb	(%esi),%cl
+.L016ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L009finish
+.L006decrypt:
+	andl	$4294967288,%ebp
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	jz	.L020decrypt_finish
+.L021decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	20(%esp),%ecx
+	movl	24(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,20(%esp)
+	movl	%ebx,24(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L021decrypt_loop
+.L020decrypt_finish:
+	movl	56(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L009finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	20(%esp),%ecx
+	movl	24(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L022dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L023dj6:
+	movb	%dh,5(%edi)
+.L024dj5:
+	movb	%dl,4(%edi)
+.L025dj4:
+	movl	%ecx,(%edi)
+	jmp	.L026djend
+.L027dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L028dj2:
+	movb	%ch,1(%esi)
+.L029dj1:
+	movb	%cl,(%esi)
+.L026djend:
+	jmp	.L009finish
+.L009finish:
+	movl	64(%esp),%ecx
+	addl	$28,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L011cbc_enc_jmp_table:
+.long	0
+.long	.L019ej1-.L010PIC_point
+.long	.L018ej2-.L010PIC_point
+.long	.L017ej3-.L010PIC_point
+.long	.L015ej4-.L010PIC_point
+.long	.L014ej5-.L010PIC_point
+.long	.L013ej6-.L010PIC_point
+.long	.L012ej7-.L010PIC_point
+.align	64
+.size	DES_ncbc_encrypt,.-.L_DES_ncbc_encrypt_begin
+.globl	DES_ede3_cbc_encrypt
+.type	DES_ede3_cbc_encrypt, at function
+.align	16
+DES_ede3_cbc_encrypt:
+.L_DES_ede3_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	44(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	64(%esp),%ecx
+
+	movl	56(%esp),%eax
+	pushl	%eax
+
+	movl	56(%esp),%eax
+	pushl	%eax
+
+	movl	56(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L030decrypt
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L031encrypt_finish
+.L032encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_encrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L032encrypt_loop
+.L031encrypt_finish:
+	movl	60(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L033finish
+	call	.L034PIC_point
+.L034PIC_point:
+	popl	%edx
+	leal	.L035cbc_enc_jmp_table-.L034PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L036ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L037ej6:
+	movb	5(%esi),%dh
+.L038ej5:
+	movb	4(%esi),%dl
+.L039ej4:
+	movl	(%esi),%ecx
+	jmp	.L040ejend
+.L041ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L042ej2:
+	movb	1(%esi),%ch
+.L043ej1:
+	movb	(%esi),%cl
+.L040ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_encrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L033finish
+.L030decrypt:
+	andl	$4294967288,%ebp
+	movl	24(%esp),%eax
+	movl	28(%esp),%ebx
+	jz	.L044decrypt_finish
+.L045decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_decrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	24(%esp),%ecx
+	movl	28(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,24(%esp)
+	movl	%ebx,28(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L045decrypt_loop
+.L044decrypt_finish:
+	movl	60(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L033finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_decrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	24(%esp),%ecx
+	movl	28(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L046dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L047dj6:
+	movb	%dh,5(%edi)
+.L048dj5:
+	movb	%dl,4(%edi)
+.L049dj4:
+	movl	%ecx,(%edi)
+	jmp	.L050djend
+.L051dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L052dj2:
+	movb	%ch,1(%esi)
+.L053dj1:
+	movb	%cl,(%esi)
+.L050djend:
+	jmp	.L033finish
+.L033finish:
+	movl	76(%esp),%ecx
+	addl	$32,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L035cbc_enc_jmp_table:
+.long	0
+.long	.L043ej1-.L034PIC_point
+.long	.L042ej2-.L034PIC_point
+.long	.L041ej3-.L034PIC_point
+.long	.L039ej4-.L034PIC_point
+.long	.L038ej5-.L034PIC_point
+.long	.L037ej6-.L034PIC_point
+.long	.L036ej7-.L034PIC_point
+.align	64
+.size	DES_ede3_cbc_encrypt,.-.L_DES_ede3_cbc_encrypt_begin
+.align	64
+DES_SPtrans:
+.long	34080768,524288,33554434,34080770
+.long	33554432,526338,524290,33554434
+.long	526338,34080768,34078720,2050
+.long	33556482,33554432,0,524290
+.long	524288,2,33556480,526336
+.long	34080770,34078720,2050,33556480
+.long	2,2048,526336,34078722
+.long	2048,33556482,34078722,0
+.long	0,34080770,33556480,524290
+.long	34080768,524288,2050,33556480
+.long	34078722,2048,526336,33554434
+.long	526338,2,33554434,34078720
+.long	34080770,526336,34078720,33556482
+.long	33554432,2050,524290,0
+.long	524288,33554432,33556482,34080768
+.long	2,34078722,2048,526338
+.long	1074823184,0,1081344,1074790400
+.long	1073741840,32784,1073774592,1081344
+.long	32768,1074790416,16,1073774592
+.long	1048592,1074823168,1074790400,16
+.long	1048576,1073774608,1074790416,32768
+.long	1081360,1073741824,0,1048592
+.long	1073774608,1081360,1074823168,1073741840
+.long	1073741824,1048576,32784,1074823184
+.long	1048592,1074823168,1073774592,1081360
+.long	1074823184,1048592,1073741840,0
+.long	1073741824,32784,1048576,1074790416
+.long	32768,1073741824,1081360,1073774608
+.long	1074823168,32768,0,1073741840
+.long	16,1074823184,1081344,1074790400
+.long	1074790416,1048576,32784,1073774592
+.long	1073774608,16,1074790400,1081344
+.long	67108865,67371264,256,67109121
+.long	262145,67108864,67109121,262400
+.long	67109120,262144,67371008,1
+.long	67371265,257,1,67371009
+.long	0,262145,67371264,256
+.long	257,67371265,262144,67108865
+.long	67371009,67109120,262401,67371008
+.long	262400,0,67108864,262401
+.long	67371264,256,1,262144
+.long	257,262145,67371008,67109121
+.long	0,67371264,262400,67371009
+.long	262145,67108864,67371265,1
+.long	262401,67108865,67108864,67371265
+.long	262144,67109120,67109121,262400
+.long	67109120,0,67371009,257
+.long	67108865,262401,256,67371008
+.long	4198408,268439552,8,272633864
+.long	0,272629760,268439560,4194312
+.long	272633856,268435464,268435456,4104
+.long	268435464,4198408,4194304,268435456
+.long	272629768,4198400,4096,8
+.long	4198400,268439560,272629760,4096
+.long	4104,0,4194312,272633856
+.long	268439552,272629768,272633864,4194304
+.long	272629768,4104,4194304,268435464
+.long	4198400,268439552,8,272629760
+.long	268439560,0,4096,4194312
+.long	0,272629768,272633856,4096
+.long	268435456,272633864,4198408,4194304
+.long	272633864,8,268439552,4198408
+.long	4194312,4198400,272629760,268439560
+.long	4104,268435456,268435464,272633856
+.long	134217728,65536,1024,134284320
+.long	134283296,134218752,66592,134283264
+.long	65536,32,134217760,66560
+.long	134218784,134283296,134284288,0
+.long	66560,134217728,65568,1056
+.long	134218752,66592,0,134217760
+.long	32,134218784,134284320,65568
+.long	134283264,1024,1056,134284288
+.long	134284288,134218784,65568,134283264
+.long	65536,32,134217760,134218752
+.long	134217728,66560,134284320,0
+.long	66592,134217728,1024,65568
+.long	134218784,1024,0,134284320
+.long	134283296,134284288,1056,65536
+.long	66560,134283296,134218752,1056
+.long	32,66592,134283264,134217760
+.long	2147483712,2097216,0,2149588992
+.long	2097216,8192,2147491904,2097152
+.long	8256,2149589056,2105344,2147483648
+.long	2147491840,2147483712,2149580800,2105408
+.long	2097152,2147491904,2149580864,0
+.long	8192,64,2149588992,2149580864
+.long	2149589056,2149580800,2147483648,8256
+.long	64,2105344,2105408,2147491840
+.long	8256,2147483648,2147491840,2105408
+.long	2149588992,2097216,0,2147491840
+.long	2147483648,8192,2149580864,2097152
+.long	2097216,2149589056,2105344,64
+.long	2149589056,2105344,2097152,2147491904
+.long	2147483712,2149580800,2105408,0
+.long	8192,2147483712,2147491904,2149588992
+.long	2149580800,8256,64,2149580864
+.long	16384,512,16777728,16777220
+.long	16794116,16388,16896,0
+.long	16777216,16777732,516,16793600
+.long	4,16794112,16793600,516
+.long	16777732,16384,16388,16794116
+.long	0,16777728,16777220,16896
+.long	16793604,16900,16794112,4
+.long	16900,16793604,512,16777216
+.long	16900,16793600,16793604,516
+.long	16384,512,16777216,16793604
+.long	16777732,16900,16896,0
+.long	512,16777220,4,16777728
+.long	0,16777732,16777728,16896
+.long	516,16384,16794116,16777216
+.long	16794112,4,16388,16794116
+.long	16777220,16794112,16793600,16388
+.long	545259648,545390592,131200,0
+.long	537001984,8388736,545259520,545390720
+.long	128,536870912,8519680,131200
+.long	8519808,537002112,536871040,545259520
+.long	131072,8519808,8388736,537001984
+.long	545390720,536871040,0,8519680
+.long	536870912,8388608,537002112,545259648
+.long	8388608,131072,545390592,128
+.long	8388608,131072,536871040,545390720
+.long	131200,536870912,0,8519680
+.long	545259648,537002112,537001984,8388736
+.long	545390592,128,8388736,537001984
+.long	545390720,8388608,545259520,536871040
+.long	8519680,131200,537002112,545259520
+.long	128,545390592,8519808,0
+.long	536870912,545259648,131072,8519808
+#else
+.file	"des-586.S"
+.text
+.globl	DES_SPtrans
+.type	_x86_DES_encrypt, at function
+.align	16
+_x86_DES_encrypt:
+	pushl	%ecx
+
+	movl	(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	4(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	8(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	12(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	16(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	20(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	24(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	28(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	32(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	36(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	40(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	44(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	48(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	52(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	56(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	60(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	64(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	68(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	72(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	76(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	80(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	84(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	88(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	92(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	96(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	100(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	104(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	108(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	112(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	116(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	120(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	124(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+	addl	$4,%esp
+	ret
+.size	_x86_DES_encrypt,.-_x86_DES_encrypt
+.type	_x86_DES_decrypt, at function
+.align	16
+_x86_DES_decrypt:
+	pushl	%ecx
+
+	movl	120(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	124(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	112(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	116(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	104(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	108(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	96(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	100(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	88(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	92(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	80(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	84(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	72(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	76(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	64(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	68(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	56(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	60(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	48(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	52(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	40(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	44(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	32(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	36(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	24(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	28(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	16(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	20(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+
+	movl	8(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	12(%ecx),%edx
+	xorl	%esi,%eax
+	xorl	%ecx,%ecx
+	xorl	%esi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%edi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%edi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%edi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%edi
+	xorl	0x700(%ebp,%ecx,1),%edi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%edi
+	xorl	0x500(%ebp,%edx,1),%edi
+
+	movl	(%ecx),%eax
+	xorl	%ebx,%ebx
+	movl	4(%ecx),%edx
+	xorl	%edi,%eax
+	xorl	%ecx,%ecx
+	xorl	%edi,%edx
+	andl	$0xfcfcfcfc,%eax
+	andl	$0xcfcfcfcf,%edx
+	movb	%al,%bl
+	movb	%ah,%cl
+	rorl	$4,%edx
+	xorl	(%ebp,%ebx,1),%esi
+	movb	%dl,%bl
+	xorl	0x200(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	shrl	$16,%eax
+	xorl	0x100(%ebp,%ebx,1),%esi
+	movb	%ah,%bl
+	shrl	$16,%edx
+	xorl	0x300(%ebp,%ecx,1),%esi
+	movb	%dh,%cl
+	andl	$0xff,%eax
+	andl	$0xff,%edx
+	xorl	0x600(%ebp,%ebx,1),%esi
+	xorl	0x700(%ebp,%ecx,1),%esi
+	movl	(%esp),%ecx
+	xorl	0x400(%ebp,%eax,1),%esi
+	xorl	0x500(%ebp,%edx,1),%esi
+	addl	$4,%esp
+	ret
+.size	_x86_DES_decrypt,.-_x86_DES_decrypt
+.globl	DES_encrypt1
+.type	DES_encrypt1, at function
+.align	16
+DES_encrypt1:
+.L_DES_encrypt1_begin:
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	12(%esp),%esi
+	xorl	%ecx,%ecx
+	pushl	%ebx
+	pushl	%ebp
+	movl	(%esi),%eax
+	movl	28(%esp),%ebx
+	movl	4(%esi),%edi
+
+
+	roll	$4,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0xf0f0f0f0,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$20,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xfff0000f,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$14,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x33333333,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$22,%esi
+	movl	%esi,%eax
+	xorl	%edi,%esi
+	andl	$0x03fc03fc,%esi
+	xorl	%esi,%eax
+	xorl	%esi,%edi
+
+	roll	$9,%eax
+	movl	%eax,%esi
+	xorl	%edi,%eax
+	andl	$0xaaaaaaaa,%eax
+	xorl	%eax,%esi
+	xorl	%eax,%edi
+
+	roll	$1,%edi
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	DES_SPtrans-.L000pic_point(%ebp),%ebp
+	movl	24(%esp),%ecx
+	cmpl	$0,%ebx
+	je	.L001decrypt
+	call	_x86_DES_encrypt
+	jmp	.L002done
+.L001decrypt:
+	call	_x86_DES_decrypt
+.L002done:
+
+
+	movl	20(%esp),%edx
+	rorl	$1,%esi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebp
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	ret
+.size	DES_encrypt1,.-.L_DES_encrypt1_begin
+.globl	DES_encrypt2
+.type	DES_encrypt2, at function
+.align	16
+DES_encrypt2:
+.L_DES_encrypt2_begin:
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	12(%esp),%eax
+	xorl	%ecx,%ecx
+	pushl	%ebx
+	pushl	%ebp
+	movl	(%eax),%esi
+	movl	28(%esp),%ebx
+	roll	$3,%esi
+	movl	4(%eax),%edi
+	roll	$3,%edi
+	call	.L003pic_point
+.L003pic_point:
+	popl	%ebp
+	leal	DES_SPtrans-.L003pic_point(%ebp),%ebp
+	movl	24(%esp),%ecx
+	cmpl	$0,%ebx
+	je	.L004decrypt
+	call	_x86_DES_encrypt
+	jmp	.L005done
+.L004decrypt:
+	call	_x86_DES_decrypt
+.L005done:
+
+
+	rorl	$3,%edi
+	movl	20(%esp),%eax
+	rorl	$3,%esi
+	movl	%edi,(%eax)
+	movl	%esi,4(%eax)
+	popl	%ebp
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	ret
+.size	DES_encrypt2,.-.L_DES_encrypt2_begin
+.globl	DES_encrypt3
+.type	DES_encrypt3, at function
+.align	16
+DES_encrypt3:
+.L_DES_encrypt3_begin:
+	pushl	%ebx
+	movl	8(%esp),%ebx
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	subl	$12,%esp
+
+
+	roll	$4,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	roll	$20,%esi
+	movl	%esi,%edi
+	xorl	%edx,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%edx
+
+	roll	$14,%edi
+	movl	%edi,%esi
+	xorl	%edx,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%edx
+
+	roll	$22,%edx
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	andl	$0x03fc03fc,%edx
+	xorl	%edx,%edi
+	xorl	%edx,%esi
+
+	roll	$9,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	rorl	$3,%edx
+	rorl	$2,%esi
+	movl	%esi,4(%ebx)
+	movl	36(%esp),%eax
+	movl	%edx,(%ebx)
+	movl	40(%esp),%edi
+	movl	44(%esp),%esi
+	movl	$1,8(%esp)
+	movl	%eax,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$0,8(%esp)
+	movl	%edi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$1,8(%esp)
+	movl	%esi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	addl	$12,%esp
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+
+	roll	$2,%esi
+	roll	$3,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%ebx)
+	movl	%esi,4(%ebx)
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	popl	%ebx
+	ret
+.size	DES_encrypt3,.-.L_DES_encrypt3_begin
+.globl	DES_decrypt3
+.type	DES_decrypt3, at function
+.align	16
+DES_decrypt3:
+.L_DES_decrypt3_begin:
+	pushl	%ebx
+	movl	8(%esp),%ebx
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+
+
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+	subl	$12,%esp
+
+
+	roll	$4,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	roll	$20,%esi
+	movl	%esi,%edi
+	xorl	%edx,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%edx
+
+	roll	$14,%edi
+	movl	%edi,%esi
+	xorl	%edx,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%edx
+
+	roll	$22,%edx
+	movl	%edx,%edi
+	xorl	%esi,%edx
+	andl	$0x03fc03fc,%edx
+	xorl	%edx,%edi
+	xorl	%edx,%esi
+
+	roll	$9,%edi
+	movl	%edi,%edx
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%edx
+	xorl	%edi,%esi
+
+	rorl	$3,%edx
+	rorl	$2,%esi
+	movl	%esi,4(%ebx)
+	movl	36(%esp),%esi
+	movl	%edx,(%ebx)
+	movl	40(%esp),%edi
+	movl	44(%esp),%eax
+	movl	$0,8(%esp)
+	movl	%eax,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$1,8(%esp)
+	movl	%edi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	movl	$0,8(%esp)
+	movl	%esi,4(%esp)
+	movl	%ebx,(%esp)
+	call	.L_DES_encrypt2_begin
+	addl	$12,%esp
+	movl	(%ebx),%edi
+	movl	4(%ebx),%esi
+
+
+	roll	$2,%esi
+	roll	$3,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0xaaaaaaaa,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$23,%eax
+	movl	%eax,%edi
+	xorl	%esi,%eax
+	andl	$0x03fc03fc,%eax
+	xorl	%eax,%edi
+	xorl	%eax,%esi
+
+	roll	$10,%edi
+	movl	%edi,%eax
+	xorl	%esi,%edi
+	andl	$0x33333333,%edi
+	xorl	%edi,%eax
+	xorl	%edi,%esi
+
+	roll	$18,%esi
+	movl	%esi,%edi
+	xorl	%eax,%esi
+	andl	$0xfff0000f,%esi
+	xorl	%esi,%edi
+	xorl	%esi,%eax
+
+	roll	$12,%edi
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	andl	$0xf0f0f0f0,%edi
+	xorl	%edi,%esi
+	xorl	%edi,%eax
+
+	rorl	$4,%eax
+	movl	%eax,(%ebx)
+	movl	%esi,4(%ebx)
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	popl	%ebx
+	ret
+.size	DES_decrypt3,.-.L_DES_decrypt3_begin
+.globl	DES_ncbc_encrypt
+.type	DES_ncbc_encrypt, at function
+.align	16
+DES_ncbc_encrypt:
+.L_DES_ncbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	pushl	%ecx
+
+	movl	52(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L006decrypt
+	andl	$4294967288,%ebp
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	jz	.L007encrypt_finish
+.L008encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L008encrypt_loop
+.L007encrypt_finish:
+	movl	56(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L009finish
+	call	.L010PIC_point
+.L010PIC_point:
+	popl	%edx
+	leal	.L011cbc_enc_jmp_table-.L010PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L012ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L013ej6:
+	movb	5(%esi),%dh
+.L014ej5:
+	movb	4(%esi),%dl
+.L015ej4:
+	movl	(%esi),%ecx
+	jmp	.L016ejend
+.L017ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L018ej2:
+	movb	1(%esi),%ch
+.L019ej1:
+	movb	(%esi),%cl
+.L016ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L009finish
+.L006decrypt:
+	andl	$4294967288,%ebp
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	jz	.L020decrypt_finish
+.L021decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	20(%esp),%ecx
+	movl	24(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,20(%esp)
+	movl	%ebx,24(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L021decrypt_loop
+.L020decrypt_finish:
+	movl	56(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L009finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,12(%esp)
+	movl	%ebx,16(%esp)
+	call	.L_DES_encrypt1_begin
+	movl	12(%esp),%eax
+	movl	16(%esp),%ebx
+	movl	20(%esp),%ecx
+	movl	24(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L022dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L023dj6:
+	movb	%dh,5(%edi)
+.L024dj5:
+	movb	%dl,4(%edi)
+.L025dj4:
+	movl	%ecx,(%edi)
+	jmp	.L026djend
+.L027dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L028dj2:
+	movb	%ch,1(%esi)
+.L029dj1:
+	movb	%cl,(%esi)
+.L026djend:
+	jmp	.L009finish
+.L009finish:
+	movl	64(%esp),%ecx
+	addl	$28,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L011cbc_enc_jmp_table:
+.long	0
+.long	.L019ej1-.L010PIC_point
+.long	.L018ej2-.L010PIC_point
+.long	.L017ej3-.L010PIC_point
+.long	.L015ej4-.L010PIC_point
+.long	.L014ej5-.L010PIC_point
+.long	.L013ej6-.L010PIC_point
+.long	.L012ej7-.L010PIC_point
+.align	64
+.size	DES_ncbc_encrypt,.-.L_DES_ncbc_encrypt_begin
+.globl	DES_ede3_cbc_encrypt
+.type	DES_ede3_cbc_encrypt, at function
+.align	16
+DES_ede3_cbc_encrypt:
+.L_DES_ede3_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	44(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	64(%esp),%ecx
+
+	movl	56(%esp),%eax
+	pushl	%eax
+
+	movl	56(%esp),%eax
+	pushl	%eax
+
+	movl	56(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L030decrypt
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L031encrypt_finish
+.L032encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_encrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L032encrypt_loop
+.L031encrypt_finish:
+	movl	60(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L033finish
+	call	.L034PIC_point
+.L034PIC_point:
+	popl	%edx
+	leal	.L035cbc_enc_jmp_table-.L034PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L036ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L037ej6:
+	movb	5(%esi),%dh
+.L038ej5:
+	movb	4(%esi),%dl
+.L039ej4:
+	movl	(%esi),%ecx
+	jmp	.L040ejend
+.L041ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L042ej2:
+	movb	1(%esi),%ch
+.L043ej1:
+	movb	(%esi),%cl
+.L040ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_encrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L033finish
+.L030decrypt:
+	andl	$4294967288,%ebp
+	movl	24(%esp),%eax
+	movl	28(%esp),%ebx
+	jz	.L044decrypt_finish
+.L045decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_decrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	24(%esp),%ecx
+	movl	28(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,24(%esp)
+	movl	%ebx,28(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L045decrypt_loop
+.L044decrypt_finish:
+	movl	60(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L033finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	call	.L_DES_decrypt3_begin
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	movl	24(%esp),%ecx
+	movl	28(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L046dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L047dj6:
+	movb	%dh,5(%edi)
+.L048dj5:
+	movb	%dl,4(%edi)
+.L049dj4:
+	movl	%ecx,(%edi)
+	jmp	.L050djend
+.L051dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L052dj2:
+	movb	%ch,1(%esi)
+.L053dj1:
+	movb	%cl,(%esi)
+.L050djend:
+	jmp	.L033finish
+.L033finish:
+	movl	76(%esp),%ecx
+	addl	$32,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L035cbc_enc_jmp_table:
+.long	0
+.long	.L043ej1-.L034PIC_point
+.long	.L042ej2-.L034PIC_point
+.long	.L041ej3-.L034PIC_point
+.long	.L039ej4-.L034PIC_point
+.long	.L038ej5-.L034PIC_point
+.long	.L037ej6-.L034PIC_point
+.long	.L036ej7-.L034PIC_point
+.align	64
+.size	DES_ede3_cbc_encrypt,.-.L_DES_ede3_cbc_encrypt_begin
+.align	64
+DES_SPtrans:
+.long	34080768,524288,33554434,34080770
+.long	33554432,526338,524290,33554434
+.long	526338,34080768,34078720,2050
+.long	33556482,33554432,0,524290
+.long	524288,2,33556480,526336
+.long	34080770,34078720,2050,33556480
+.long	2,2048,526336,34078722
+.long	2048,33556482,34078722,0
+.long	0,34080770,33556480,524290
+.long	34080768,524288,2050,33556480
+.long	34078722,2048,526336,33554434
+.long	526338,2,33554434,34078720
+.long	34080770,526336,34078720,33556482
+.long	33554432,2050,524290,0
+.long	524288,33554432,33556482,34080768
+.long	2,34078722,2048,526338
+.long	1074823184,0,1081344,1074790400
+.long	1073741840,32784,1073774592,1081344
+.long	32768,1074790416,16,1073774592
+.long	1048592,1074823168,1074790400,16
+.long	1048576,1073774608,1074790416,32768
+.long	1081360,1073741824,0,1048592
+.long	1073774608,1081360,1074823168,1073741840
+.long	1073741824,1048576,32784,1074823184
+.long	1048592,1074823168,1073774592,1081360
+.long	1074823184,1048592,1073741840,0
+.long	1073741824,32784,1048576,1074790416
+.long	32768,1073741824,1081360,1073774608
+.long	1074823168,32768,0,1073741840
+.long	16,1074823184,1081344,1074790400
+.long	1074790416,1048576,32784,1073774592
+.long	1073774608,16,1074790400,1081344
+.long	67108865,67371264,256,67109121
+.long	262145,67108864,67109121,262400
+.long	67109120,262144,67371008,1
+.long	67371265,257,1,67371009
+.long	0,262145,67371264,256
+.long	257,67371265,262144,67108865
+.long	67371009,67109120,262401,67371008
+.long	262400,0,67108864,262401
+.long	67371264,256,1,262144
+.long	257,262145,67371008,67109121
+.long	0,67371264,262400,67371009
+.long	262145,67108864,67371265,1
+.long	262401,67108865,67108864,67371265
+.long	262144,67109120,67109121,262400
+.long	67109120,0,67371009,257
+.long	67108865,262401,256,67371008
+.long	4198408,268439552,8,272633864
+.long	0,272629760,268439560,4194312
+.long	272633856,268435464,268435456,4104
+.long	268435464,4198408,4194304,268435456
+.long	272629768,4198400,4096,8
+.long	4198400,268439560,272629760,4096
+.long	4104,0,4194312,272633856
+.long	268439552,272629768,272633864,4194304
+.long	272629768,4104,4194304,268435464
+.long	4198400,268439552,8,272629760
+.long	268439560,0,4096,4194312
+.long	0,272629768,272633856,4096
+.long	268435456,272633864,4198408,4194304
+.long	272633864,8,268439552,4198408
+.long	4194312,4198400,272629760,268439560
+.long	4104,268435456,268435464,272633856
+.long	134217728,65536,1024,134284320
+.long	134283296,134218752,66592,134283264
+.long	65536,32,134217760,66560
+.long	134218784,134283296,134284288,0
+.long	66560,134217728,65568,1056
+.long	134218752,66592,0,134217760
+.long	32,134218784,134284320,65568
+.long	134283264,1024,1056,134284288
+.long	134284288,134218784,65568,134283264
+.long	65536,32,134217760,134218752
+.long	134217728,66560,134284320,0
+.long	66592,134217728,1024,65568
+.long	134218784,1024,0,134284320
+.long	134283296,134284288,1056,65536
+.long	66560,134283296,134218752,1056
+.long	32,66592,134283264,134217760
+.long	2147483712,2097216,0,2149588992
+.long	2097216,8192,2147491904,2097152
+.long	8256,2149589056,2105344,2147483648
+.long	2147491840,2147483712,2149580800,2105408
+.long	2097152,2147491904,2149580864,0
+.long	8192,64,2149588992,2149580864
+.long	2149589056,2149580800,2147483648,8256
+.long	64,2105344,2105408,2147491840
+.long	8256,2147483648,2147491840,2105408
+.long	2149588992,2097216,0,2147491840
+.long	2147483648,8192,2149580864,2097152
+.long	2097216,2149589056,2105344,64
+.long	2149589056,2105344,2097152,2147491904
+.long	2147483712,2149580800,2105408,0
+.long	8192,2147483712,2147491904,2149588992
+.long	2149580800,8256,64,2149580864
+.long	16384,512,16777728,16777220
+.long	16794116,16388,16896,0
+.long	16777216,16777732,516,16793600
+.long	4,16794112,16793600,516
+.long	16777732,16384,16388,16794116
+.long	0,16777728,16777220,16896
+.long	16793604,16900,16794112,4
+.long	16900,16793604,512,16777216
+.long	16900,16793600,16793604,516
+.long	16384,512,16777216,16793604
+.long	16777732,16900,16896,0
+.long	512,16777220,4,16777728
+.long	0,16777732,16777728,16896
+.long	516,16384,16794116,16777216
+.long	16794112,4,16388,16794116
+.long	16777220,16794112,16793600,16388
+.long	545259648,545390592,131200,0
+.long	537001984,8388736,545259520,545390720
+.long	128,536870912,8519680,131200
+.long	8519808,537002112,536871040,545259520
+.long	131072,8519808,8388736,537001984
+.long	545390720,536871040,0,8519680
+.long	536870912,8388608,537002112,545259648
+.long	8388608,131072,545390592,128
+.long	8388608,131072,536871040,545390720
+.long	131200,536870912,0,8519680
+.long	545259648,537002112,537001984,8388736
+.long	545390592,128,8388736,537001984
+.long	545390720,8388608,545259520,536871040
+.long	8519680,131200,537002112,545259520
+.long	128,545390592,8519808,0
+.long	536870912,545259648,131072,8519808
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/des-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/des-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/des-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/des-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1838 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/des-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"des-586.s"
-.text
-.globl	DES_SPtrans
-.type	_x86_DES_encrypt, at function
-.align	16
-_x86_DES_encrypt:
-	pushl	%ecx
-
-	movl	(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	4(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	8(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	12(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	16(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	20(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	24(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	28(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	32(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	36(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	40(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	44(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	48(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	52(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	56(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	60(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	64(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	68(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	72(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	76(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	80(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	84(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	88(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	92(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	96(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	100(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	104(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	108(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	112(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	116(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	120(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	124(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-	addl	$4,%esp
-	ret
-.size	_x86_DES_encrypt,.-_x86_DES_encrypt
-.type	_x86_DES_decrypt, at function
-.align	16
-_x86_DES_decrypt:
-	pushl	%ecx
-
-	movl	120(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	124(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	112(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	116(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	104(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	108(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	96(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	100(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	88(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	92(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	80(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	84(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	72(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	76(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	64(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	68(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	56(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	60(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	48(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	52(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	40(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	44(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	32(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	36(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	24(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	28(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	16(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	20(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-
-	movl	8(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	12(%ecx),%edx
-	xorl	%esi,%eax
-	xorl	%ecx,%ecx
-	xorl	%esi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%edi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%edi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%edi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%edi
-	xorl	0x700(%ebp,%ecx,1),%edi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%edi
-	xorl	0x500(%ebp,%edx,1),%edi
-
-	movl	(%ecx),%eax
-	xorl	%ebx,%ebx
-	movl	4(%ecx),%edx
-	xorl	%edi,%eax
-	xorl	%ecx,%ecx
-	xorl	%edi,%edx
-	andl	$0xfcfcfcfc,%eax
-	andl	$0xcfcfcfcf,%edx
-	movb	%al,%bl
-	movb	%ah,%cl
-	rorl	$4,%edx
-	xorl	(%ebp,%ebx,1),%esi
-	movb	%dl,%bl
-	xorl	0x200(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	shrl	$16,%eax
-	xorl	0x100(%ebp,%ebx,1),%esi
-	movb	%ah,%bl
-	shrl	$16,%edx
-	xorl	0x300(%ebp,%ecx,1),%esi
-	movb	%dh,%cl
-	andl	$0xff,%eax
-	andl	$0xff,%edx
-	xorl	0x600(%ebp,%ebx,1),%esi
-	xorl	0x700(%ebp,%ecx,1),%esi
-	movl	(%esp),%ecx
-	xorl	0x400(%ebp,%eax,1),%esi
-	xorl	0x500(%ebp,%edx,1),%esi
-	addl	$4,%esp
-	ret
-.size	_x86_DES_decrypt,.-_x86_DES_decrypt
-.globl	DES_encrypt1
-.type	DES_encrypt1, at function
-.align	16
-DES_encrypt1:
-.L_DES_encrypt1_begin:
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	12(%esp),%esi
-	xorl	%ecx,%ecx
-	pushl	%ebx
-	pushl	%ebp
-	movl	(%esi),%eax
-	movl	28(%esp),%ebx
-	movl	4(%esi),%edi
-
-
-	roll	$4,%eax
-	movl	%eax,%esi
-	xorl	%edi,%eax
-	andl	$0xf0f0f0f0,%eax
-	xorl	%eax,%esi
-	xorl	%eax,%edi
-
-	roll	$20,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0xfff0000f,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$14,%eax
-	movl	%eax,%edi
-	xorl	%esi,%eax
-	andl	$0x33333333,%eax
-	xorl	%eax,%edi
-	xorl	%eax,%esi
-
-	roll	$22,%esi
-	movl	%esi,%eax
-	xorl	%edi,%esi
-	andl	$0x03fc03fc,%esi
-	xorl	%esi,%eax
-	xorl	%esi,%edi
-
-	roll	$9,%eax
-	movl	%eax,%esi
-	xorl	%edi,%eax
-	andl	$0xaaaaaaaa,%eax
-	xorl	%eax,%esi
-	xorl	%eax,%edi
-
-	roll	$1,%edi
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	DES_SPtrans-.L000pic_point(%ebp),%ebp
-	movl	24(%esp),%ecx
-	cmpl	$0,%ebx
-	je	.L001decrypt
-	call	_x86_DES_encrypt
-	jmp	.L002done
-.L001decrypt:
-	call	_x86_DES_decrypt
-.L002done:
-
-
-	movl	20(%esp),%edx
-	rorl	$1,%esi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0xaaaaaaaa,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$23,%eax
-	movl	%eax,%edi
-	xorl	%esi,%eax
-	andl	$0x03fc03fc,%eax
-	xorl	%eax,%edi
-	xorl	%eax,%esi
-
-	roll	$10,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0x33333333,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$18,%esi
-	movl	%esi,%edi
-	xorl	%eax,%esi
-	andl	$0xfff0000f,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%eax
-
-	roll	$12,%edi
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	andl	$0xf0f0f0f0,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%eax
-
-	rorl	$4,%eax
-	movl	%eax,(%edx)
-	movl	%esi,4(%edx)
-	popl	%ebp
-	popl	%ebx
-	popl	%edi
-	popl	%esi
-	ret
-.size	DES_encrypt1,.-.L_DES_encrypt1_begin
-.globl	DES_encrypt2
-.type	DES_encrypt2, at function
-.align	16
-DES_encrypt2:
-.L_DES_encrypt2_begin:
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	12(%esp),%eax
-	xorl	%ecx,%ecx
-	pushl	%ebx
-	pushl	%ebp
-	movl	(%eax),%esi
-	movl	28(%esp),%ebx
-	roll	$3,%esi
-	movl	4(%eax),%edi
-	roll	$3,%edi
-	call	.L003pic_point
-.L003pic_point:
-	popl	%ebp
-	leal	DES_SPtrans-.L003pic_point(%ebp),%ebp
-	movl	24(%esp),%ecx
-	cmpl	$0,%ebx
-	je	.L004decrypt
-	call	_x86_DES_encrypt
-	jmp	.L005done
-.L004decrypt:
-	call	_x86_DES_decrypt
-.L005done:
-
-
-	rorl	$3,%edi
-	movl	20(%esp),%eax
-	rorl	$3,%esi
-	movl	%edi,(%eax)
-	movl	%esi,4(%eax)
-	popl	%ebp
-	popl	%ebx
-	popl	%edi
-	popl	%esi
-	ret
-.size	DES_encrypt2,.-.L_DES_encrypt2_begin
-.globl	DES_encrypt3
-.type	DES_encrypt3, at function
-.align	16
-DES_encrypt3:
-.L_DES_encrypt3_begin:
-	pushl	%ebx
-	movl	8(%esp),%ebx
-	pushl	%ebp
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-	subl	$12,%esp
-
-
-	roll	$4,%edi
-	movl	%edi,%edx
-	xorl	%esi,%edi
-	andl	$0xf0f0f0f0,%edi
-	xorl	%edi,%edx
-	xorl	%edi,%esi
-
-	roll	$20,%esi
-	movl	%esi,%edi
-	xorl	%edx,%esi
-	andl	$0xfff0000f,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%edx
-
-	roll	$14,%edi
-	movl	%edi,%esi
-	xorl	%edx,%edi
-	andl	$0x33333333,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%edx
-
-	roll	$22,%edx
-	movl	%edx,%edi
-	xorl	%esi,%edx
-	andl	$0x03fc03fc,%edx
-	xorl	%edx,%edi
-	xorl	%edx,%esi
-
-	roll	$9,%edi
-	movl	%edi,%edx
-	xorl	%esi,%edi
-	andl	$0xaaaaaaaa,%edi
-	xorl	%edi,%edx
-	xorl	%edi,%esi
-
-	rorl	$3,%edx
-	rorl	$2,%esi
-	movl	%esi,4(%ebx)
-	movl	36(%esp),%eax
-	movl	%edx,(%ebx)
-	movl	40(%esp),%edi
-	movl	44(%esp),%esi
-	movl	$1,8(%esp)
-	movl	%eax,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	movl	$0,8(%esp)
-	movl	%edi,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	movl	$1,8(%esp)
-	movl	%esi,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	addl	$12,%esp
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-
-
-	roll	$2,%esi
-	roll	$3,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0xaaaaaaaa,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$23,%eax
-	movl	%eax,%edi
-	xorl	%esi,%eax
-	andl	$0x03fc03fc,%eax
-	xorl	%eax,%edi
-	xorl	%eax,%esi
-
-	roll	$10,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0x33333333,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$18,%esi
-	movl	%esi,%edi
-	xorl	%eax,%esi
-	andl	$0xfff0000f,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%eax
-
-	roll	$12,%edi
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	andl	$0xf0f0f0f0,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%eax
-
-	rorl	$4,%eax
-	movl	%eax,(%ebx)
-	movl	%esi,4(%ebx)
-	popl	%edi
-	popl	%esi
-	popl	%ebp
-	popl	%ebx
-	ret
-.size	DES_encrypt3,.-.L_DES_encrypt3_begin
-.globl	DES_decrypt3
-.type	DES_decrypt3, at function
-.align	16
-DES_decrypt3:
-.L_DES_decrypt3_begin:
-	pushl	%ebx
-	movl	8(%esp),%ebx
-	pushl	%ebp
-	pushl	%esi
-	pushl	%edi
-
-
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-	subl	$12,%esp
-
-
-	roll	$4,%edi
-	movl	%edi,%edx
-	xorl	%esi,%edi
-	andl	$0xf0f0f0f0,%edi
-	xorl	%edi,%edx
-	xorl	%edi,%esi
-
-	roll	$20,%esi
-	movl	%esi,%edi
-	xorl	%edx,%esi
-	andl	$0xfff0000f,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%edx
-
-	roll	$14,%edi
-	movl	%edi,%esi
-	xorl	%edx,%edi
-	andl	$0x33333333,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%edx
-
-	roll	$22,%edx
-	movl	%edx,%edi
-	xorl	%esi,%edx
-	andl	$0x03fc03fc,%edx
-	xorl	%edx,%edi
-	xorl	%edx,%esi
-
-	roll	$9,%edi
-	movl	%edi,%edx
-	xorl	%esi,%edi
-	andl	$0xaaaaaaaa,%edi
-	xorl	%edi,%edx
-	xorl	%edi,%esi
-
-	rorl	$3,%edx
-	rorl	$2,%esi
-	movl	%esi,4(%ebx)
-	movl	36(%esp),%esi
-	movl	%edx,(%ebx)
-	movl	40(%esp),%edi
-	movl	44(%esp),%eax
-	movl	$0,8(%esp)
-	movl	%eax,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	movl	$1,8(%esp)
-	movl	%edi,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	movl	$0,8(%esp)
-	movl	%esi,4(%esp)
-	movl	%ebx,(%esp)
-	call	.L_DES_encrypt2_begin
-	addl	$12,%esp
-	movl	(%ebx),%edi
-	movl	4(%ebx),%esi
-
-
-	roll	$2,%esi
-	roll	$3,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0xaaaaaaaa,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$23,%eax
-	movl	%eax,%edi
-	xorl	%esi,%eax
-	andl	$0x03fc03fc,%eax
-	xorl	%eax,%edi
-	xorl	%eax,%esi
-
-	roll	$10,%edi
-	movl	%edi,%eax
-	xorl	%esi,%edi
-	andl	$0x33333333,%edi
-	xorl	%edi,%eax
-	xorl	%edi,%esi
-
-	roll	$18,%esi
-	movl	%esi,%edi
-	xorl	%eax,%esi
-	andl	$0xfff0000f,%esi
-	xorl	%esi,%edi
-	xorl	%esi,%eax
-
-	roll	$12,%edi
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	andl	$0xf0f0f0f0,%edi
-	xorl	%edi,%esi
-	xorl	%edi,%eax
-
-	rorl	$4,%eax
-	movl	%eax,(%ebx)
-	movl	%esi,4(%ebx)
-	popl	%edi
-	popl	%esi
-	popl	%ebp
-	popl	%ebx
-	ret
-.size	DES_decrypt3,.-.L_DES_decrypt3_begin
-.globl	DES_ncbc_encrypt
-.type	DES_ncbc_encrypt, at function
-.align	16
-DES_ncbc_encrypt:
-.L_DES_ncbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	36(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	56(%esp),%ecx
-
-	pushl	%ecx
-
-	movl	52(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L006decrypt
-	andl	$4294967288,%ebp
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	jz	.L007encrypt_finish
-.L008encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,12(%esp)
-	movl	%ebx,16(%esp)
-	call	.L_DES_encrypt1_begin
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L008encrypt_loop
-.L007encrypt_finish:
-	movl	56(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L009finish
-	call	.L010PIC_point
-.L010PIC_point:
-	popl	%edx
-	leal	.L011cbc_enc_jmp_table-.L010PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L012ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L013ej6:
-	movb	5(%esi),%dh
-.L014ej5:
-	movb	4(%esi),%dl
-.L015ej4:
-	movl	(%esi),%ecx
-	jmp	.L016ejend
-.L017ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L018ej2:
-	movb	1(%esi),%ch
-.L019ej1:
-	movb	(%esi),%cl
-.L016ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,12(%esp)
-	movl	%ebx,16(%esp)
-	call	.L_DES_encrypt1_begin
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L009finish
-.L006decrypt:
-	andl	$4294967288,%ebp
-	movl	20(%esp),%eax
-	movl	24(%esp),%ebx
-	jz	.L020decrypt_finish
-.L021decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,12(%esp)
-	movl	%ebx,16(%esp)
-	call	.L_DES_encrypt1_begin
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	movl	20(%esp),%ecx
-	movl	24(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,20(%esp)
-	movl	%ebx,24(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L021decrypt_loop
-.L020decrypt_finish:
-	movl	56(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L009finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,12(%esp)
-	movl	%ebx,16(%esp)
-	call	.L_DES_encrypt1_begin
-	movl	12(%esp),%eax
-	movl	16(%esp),%ebx
-	movl	20(%esp),%ecx
-	movl	24(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L022dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L023dj6:
-	movb	%dh,5(%edi)
-.L024dj5:
-	movb	%dl,4(%edi)
-.L025dj4:
-	movl	%ecx,(%edi)
-	jmp	.L026djend
-.L027dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L028dj2:
-	movb	%ch,1(%esi)
-.L029dj1:
-	movb	%cl,(%esi)
-.L026djend:
-	jmp	.L009finish
-.L009finish:
-	movl	64(%esp),%ecx
-	addl	$28,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L011cbc_enc_jmp_table:
-.long	0
-.long	.L019ej1-.L010PIC_point
-.long	.L018ej2-.L010PIC_point
-.long	.L017ej3-.L010PIC_point
-.long	.L015ej4-.L010PIC_point
-.long	.L014ej5-.L010PIC_point
-.long	.L013ej6-.L010PIC_point
-.long	.L012ej7-.L010PIC_point
-.align	64
-.size	DES_ncbc_encrypt,.-.L_DES_ncbc_encrypt_begin
-.globl	DES_ede3_cbc_encrypt
-.type	DES_ede3_cbc_encrypt, at function
-.align	16
-DES_ede3_cbc_encrypt:
-.L_DES_ede3_cbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	44(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	64(%esp),%ecx
-
-	movl	56(%esp),%eax
-	pushl	%eax
-
-	movl	56(%esp),%eax
-	pushl	%eax
-
-	movl	56(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L030decrypt
-	andl	$4294967288,%ebp
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	jz	.L031encrypt_finish
-.L032encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	call	.L_DES_encrypt3_begin
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L032encrypt_loop
-.L031encrypt_finish:
-	movl	60(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L033finish
-	call	.L034PIC_point
-.L034PIC_point:
-	popl	%edx
-	leal	.L035cbc_enc_jmp_table-.L034PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L036ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L037ej6:
-	movb	5(%esi),%dh
-.L038ej5:
-	movb	4(%esi),%dl
-.L039ej4:
-	movl	(%esi),%ecx
-	jmp	.L040ejend
-.L041ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L042ej2:
-	movb	1(%esi),%ch
-.L043ej1:
-	movb	(%esi),%cl
-.L040ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	call	.L_DES_encrypt3_begin
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L033finish
-.L030decrypt:
-	andl	$4294967288,%ebp
-	movl	24(%esp),%eax
-	movl	28(%esp),%ebx
-	jz	.L044decrypt_finish
-.L045decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	call	.L_DES_decrypt3_begin
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	movl	24(%esp),%ecx
-	movl	28(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,24(%esp)
-	movl	%ebx,28(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L045decrypt_loop
-.L044decrypt_finish:
-	movl	60(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L033finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	call	.L_DES_decrypt3_begin
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	movl	24(%esp),%ecx
-	movl	28(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L046dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L047dj6:
-	movb	%dh,5(%edi)
-.L048dj5:
-	movb	%dl,4(%edi)
-.L049dj4:
-	movl	%ecx,(%edi)
-	jmp	.L050djend
-.L051dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L052dj2:
-	movb	%ch,1(%esi)
-.L053dj1:
-	movb	%cl,(%esi)
-.L050djend:
-	jmp	.L033finish
-.L033finish:
-	movl	76(%esp),%ecx
-	addl	$32,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L035cbc_enc_jmp_table:
-.long	0
-.long	.L043ej1-.L034PIC_point
-.long	.L042ej2-.L034PIC_point
-.long	.L041ej3-.L034PIC_point
-.long	.L039ej4-.L034PIC_point
-.long	.L038ej5-.L034PIC_point
-.long	.L037ej6-.L034PIC_point
-.long	.L036ej7-.L034PIC_point
-.align	64
-.size	DES_ede3_cbc_encrypt,.-.L_DES_ede3_cbc_encrypt_begin
-.align	64
-DES_SPtrans:
-.long	34080768,524288,33554434,34080770
-.long	33554432,526338,524290,33554434
-.long	526338,34080768,34078720,2050
-.long	33556482,33554432,0,524290
-.long	524288,2,33556480,526336
-.long	34080770,34078720,2050,33556480
-.long	2,2048,526336,34078722
-.long	2048,33556482,34078722,0
-.long	0,34080770,33556480,524290
-.long	34080768,524288,2050,33556480
-.long	34078722,2048,526336,33554434
-.long	526338,2,33554434,34078720
-.long	34080770,526336,34078720,33556482
-.long	33554432,2050,524290,0
-.long	524288,33554432,33556482,34080768
-.long	2,34078722,2048,526338
-.long	1074823184,0,1081344,1074790400
-.long	1073741840,32784,1073774592,1081344
-.long	32768,1074790416,16,1073774592
-.long	1048592,1074823168,1074790400,16
-.long	1048576,1073774608,1074790416,32768
-.long	1081360,1073741824,0,1048592
-.long	1073774608,1081360,1074823168,1073741840
-.long	1073741824,1048576,32784,1074823184
-.long	1048592,1074823168,1073774592,1081360
-.long	1074823184,1048592,1073741840,0
-.long	1073741824,32784,1048576,1074790416
-.long	32768,1073741824,1081360,1073774608
-.long	1074823168,32768,0,1073741840
-.long	16,1074823184,1081344,1074790400
-.long	1074790416,1048576,32784,1073774592
-.long	1073774608,16,1074790400,1081344
-.long	67108865,67371264,256,67109121
-.long	262145,67108864,67109121,262400
-.long	67109120,262144,67371008,1
-.long	67371265,257,1,67371009
-.long	0,262145,67371264,256
-.long	257,67371265,262144,67108865
-.long	67371009,67109120,262401,67371008
-.long	262400,0,67108864,262401
-.long	67371264,256,1,262144
-.long	257,262145,67371008,67109121
-.long	0,67371264,262400,67371009
-.long	262145,67108864,67371265,1
-.long	262401,67108865,67108864,67371265
-.long	262144,67109120,67109121,262400
-.long	67109120,0,67371009,257
-.long	67108865,262401,256,67371008
-.long	4198408,268439552,8,272633864
-.long	0,272629760,268439560,4194312
-.long	272633856,268435464,268435456,4104
-.long	268435464,4198408,4194304,268435456
-.long	272629768,4198400,4096,8
-.long	4198400,268439560,272629760,4096
-.long	4104,0,4194312,272633856
-.long	268439552,272629768,272633864,4194304
-.long	272629768,4104,4194304,268435464
-.long	4198400,268439552,8,272629760
-.long	268439560,0,4096,4194312
-.long	0,272629768,272633856,4096
-.long	268435456,272633864,4198408,4194304
-.long	272633864,8,268439552,4198408
-.long	4194312,4198400,272629760,268439560
-.long	4104,268435456,268435464,272633856
-.long	134217728,65536,1024,134284320
-.long	134283296,134218752,66592,134283264
-.long	65536,32,134217760,66560
-.long	134218784,134283296,134284288,0
-.long	66560,134217728,65568,1056
-.long	134218752,66592,0,134217760
-.long	32,134218784,134284320,65568
-.long	134283264,1024,1056,134284288
-.long	134284288,134218784,65568,134283264
-.long	65536,32,134217760,134218752
-.long	134217728,66560,134284320,0
-.long	66592,134217728,1024,65568
-.long	134218784,1024,0,134284320
-.long	134283296,134284288,1056,65536
-.long	66560,134283296,134218752,1056
-.long	32,66592,134283264,134217760
-.long	2147483712,2097216,0,2149588992
-.long	2097216,8192,2147491904,2097152
-.long	8256,2149589056,2105344,2147483648
-.long	2147491840,2147483712,2149580800,2105408
-.long	2097152,2147491904,2149580864,0
-.long	8192,64,2149588992,2149580864
-.long	2149589056,2149580800,2147483648,8256
-.long	64,2105344,2105408,2147491840
-.long	8256,2147483648,2147491840,2105408
-.long	2149588992,2097216,0,2147491840
-.long	2147483648,8192,2149580864,2097152
-.long	2097216,2149589056,2105344,64
-.long	2149589056,2105344,2097152,2147491904
-.long	2147483712,2149580800,2105408,0
-.long	8192,2147483712,2147491904,2149588992
-.long	2149580800,8256,64,2149580864
-.long	16384,512,16777728,16777220
-.long	16794116,16388,16896,0
-.long	16777216,16777732,516,16793600
-.long	4,16794112,16793600,516
-.long	16777732,16384,16388,16794116
-.long	0,16777728,16777220,16896
-.long	16793604,16900,16794112,4
-.long	16900,16793604,512,16777216
-.long	16900,16793600,16793604,516
-.long	16384,512,16777216,16793604
-.long	16777732,16900,16896,0
-.long	512,16777220,4,16777728
-.long	0,16777732,16777728,16896
-.long	516,16384,16794116,16777216
-.long	16794112,4,16388,16794116
-.long	16777220,16794112,16793600,16388
-.long	545259648,545390592,131200,0
-.long	537001984,8388736,545259520,545390720
-.long	128,536870912,8519680,131200
-.long	8519808,537002112,536871040,545259520
-.long	131072,8519808,8388736,537001984
-.long	545390720,536871040,0,8519680
-.long	536870912,8388608,537002112,545259648
-.long	8388608,131072,545390592,128
-.long	8388608,131072,536871040,545390720
-.long	131200,536870912,0,8519680
-.long	545259648,537002112,537001984,8388736
-.long	545390592,128,8388736,537001984
-.long	545390720,8388608,545259520,536871040
-.long	8519680,131200,537002112,545259520
-.long	128,545390592,8519808,0
-.long	536870912,545259648,131072,8519808

Added: trunk/secure/lib/libcrypto/i386/ghash-x86.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/ghash-x86.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/ghash-x86.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,2544 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/ghash-x86.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from ghash-x86.pl.
+#ifdef PIC
+.file	"ghash-x86.S"
+.text
+.globl	gcm_gmult_4bit_x86
+.type	gcm_gmult_4bit_x86, at function
+.align	16
+gcm_gmult_4bit_x86:
+.L_gcm_gmult_4bit_x86_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$84,%esp
+	movl	104(%esp),%edi
+	movl	108(%esp),%esi
+	movl	(%edi),%ebp
+	movl	4(%edi),%edx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%ebx
+	movl	$0,16(%esp)
+	movl	$471859200,20(%esp)
+	movl	$943718400,24(%esp)
+	movl	$610271232,28(%esp)
+	movl	$1887436800,32(%esp)
+	movl	$1822425088,36(%esp)
+	movl	$1220542464,40(%esp)
+	movl	$1423966208,44(%esp)
+	movl	$3774873600,48(%esp)
+	movl	$4246732800,52(%esp)
+	movl	$3644850176,56(%esp)
+	movl	$3311403008,60(%esp)
+	movl	$2441084928,64(%esp)
+	movl	$2376073216,68(%esp)
+	movl	$2847932416,72(%esp)
+	movl	$3051356160,76(%esp)
+	movl	%ebp,(%esp)
+	movl	%edx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%ebx,12(%esp)
+	shrl	$20,%ebx
+	andl	$240,%ebx
+	movl	4(%esi,%ebx,1),%ebp
+	movl	(%esi,%ebx,1),%edx
+	movl	12(%esi,%ebx,1),%ecx
+	movl	8(%esi,%ebx,1),%ebx
+	xorl	%eax,%eax
+	movl	$15,%edi
+	jmp	.L000x86_loop
+.align	16
+.L000x86_loop:
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	andb	$240,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	decl	%edi
+	js	.L001x86_break
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	shlb	$4,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	jmp	.L000x86_loop
+.align	16
+.L001x86_break:
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	bswap	%ebp
+	movl	104(%esp),%edi
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,4(%edi)
+	movl	%ebp,(%edi)
+	addl	$84,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
+.globl	gcm_ghash_4bit_x86
+.type	gcm_ghash_4bit_x86, at function
+.align	16
+gcm_ghash_4bit_x86:
+.L_gcm_ghash_4bit_x86_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$84,%esp
+	movl	104(%esp),%ebx
+	movl	108(%esp),%esi
+	movl	112(%esp),%edi
+	movl	116(%esp),%ecx
+	addl	%edi,%ecx
+	movl	%ecx,116(%esp)
+	movl	(%ebx),%ebp
+	movl	4(%ebx),%edx
+	movl	8(%ebx),%ecx
+	movl	12(%ebx),%ebx
+	movl	$0,16(%esp)
+	movl	$471859200,20(%esp)
+	movl	$943718400,24(%esp)
+	movl	$610271232,28(%esp)
+	movl	$1887436800,32(%esp)
+	movl	$1822425088,36(%esp)
+	movl	$1220542464,40(%esp)
+	movl	$1423966208,44(%esp)
+	movl	$3774873600,48(%esp)
+	movl	$4246732800,52(%esp)
+	movl	$3644850176,56(%esp)
+	movl	$3311403008,60(%esp)
+	movl	$2441084928,64(%esp)
+	movl	$2376073216,68(%esp)
+	movl	$2847932416,72(%esp)
+	movl	$3051356160,76(%esp)
+.align	16
+.L002x86_outer_loop:
+	xorl	12(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	4(%edi),%edx
+	xorl	(%edi),%ebp
+	movl	%ebx,12(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,4(%esp)
+	movl	%ebp,(%esp)
+	shrl	$20,%ebx
+	andl	$240,%ebx
+	movl	4(%esi,%ebx,1),%ebp
+	movl	(%esi,%ebx,1),%edx
+	movl	12(%esi,%ebx,1),%ecx
+	movl	8(%esi,%ebx,1),%ebx
+	xorl	%eax,%eax
+	movl	$15,%edi
+	jmp	.L003x86_loop
+.align	16
+.L003x86_loop:
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	andb	$240,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	decl	%edi
+	js	.L004x86_break
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	shlb	$4,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	jmp	.L003x86_loop
+.align	16
+.L004x86_break:
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	bswap	%ebp
+	movl	112(%esp),%edi
+	leal	16(%edi),%edi
+	cmpl	116(%esp),%edi
+	movl	%edi,112(%esp)
+	jb	.L002x86_outer_loop
+	movl	104(%esp),%edi
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,4(%edi)
+	movl	%ebp,(%edi)
+	addl	$84,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
+.globl	gcm_gmult_4bit_mmx
+.type	gcm_gmult_4bit_mmx, at function
+.align	16
+gcm_gmult_4bit_mmx:
+.L_gcm_gmult_4bit_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	call	.L005pic_point
+.L005pic_point:
+	popl	%eax
+	leal	.Lrem_4bit-.L005pic_point(%eax),%eax
+	movzbl	15(%edi),%ebx
+	xorl	%ecx,%ecx
+	movl	%ebx,%edx
+	movb	%dl,%cl
+	movl	$14,%ebp
+	shlb	$4,%cl
+	andl	$240,%edx
+	movq	8(%esi,%ecx,1),%mm0
+	movq	(%esi,%ecx,1),%mm1
+	movd	%mm0,%ebx
+	jmp	.L006mmx_loop
+.align	16
+.L006mmx_loop:
+	psrlq	$4,%mm0
+	andl	$15,%ebx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%edx,1),%mm0
+	movb	(%edi,%ebp,1),%cl
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	decl	%ebp
+	movd	%mm0,%ebx
+	pxor	(%esi,%edx,1),%mm1
+	movl	%ecx,%edx
+	pxor	%mm2,%mm0
+	js	.L007mmx_break
+	shlb	$4,%cl
+	andl	$15,%ebx
+	psrlq	$4,%mm0
+	andl	$240,%edx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%ecx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%ecx,1),%mm1
+	pxor	%mm2,%mm0
+	jmp	.L006mmx_loop
+.align	16
+.L007mmx_break:
+	shlb	$4,%cl
+	andl	$15,%ebx
+	psrlq	$4,%mm0
+	andl	$240,%edx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%ecx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%ecx,1),%mm1
+	pxor	%mm2,%mm0
+	psrlq	$4,%mm0
+	andl	$15,%ebx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%edx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%edx,1),%mm1
+	pxor	%mm2,%mm0
+	psrlq	$32,%mm0
+	movd	%mm1,%edx
+	psrlq	$32,%mm1
+	movd	%mm0,%ecx
+	movd	%mm1,%ebp
+	bswap	%ebx
+	bswap	%edx
+	bswap	%ecx
+	bswap	%ebp
+	emms
+	movl	%ebx,12(%edi)
+	movl	%edx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%ebp,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
+.globl	gcm_ghash_4bit_mmx
+.type	gcm_ghash_4bit_mmx, at function
+.align	16
+gcm_ghash_4bit_mmx:
+.L_gcm_ghash_4bit_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ecx
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	call	.L008pic_point
+.L008pic_point:
+	popl	%esi
+	leal	.Lrem_8bit-.L008pic_point(%esi),%esi
+	subl	$544,%esp
+	andl	$-64,%esp
+	subl	$16,%esp
+	addl	%ecx,%edx
+	movl	%eax,544(%esp)
+	movl	%edx,552(%esp)
+	movl	%ebp,556(%esp)
+	addl	$128,%ebx
+	leal	144(%esp),%edi
+	leal	400(%esp),%ebp
+	movl	-120(%ebx),%edx
+	movq	-120(%ebx),%mm0
+	movq	-128(%ebx),%mm3
+	shll	$4,%edx
+	movb	%dl,(%esp)
+	movl	-104(%ebx),%edx
+	movq	-104(%ebx),%mm2
+	movq	-112(%ebx),%mm5
+	movq	%mm0,-128(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,(%edi)
+	movq	%mm3,%mm7
+	psrlq	$4,%mm3
+	shll	$4,%edx
+	movb	%dl,1(%esp)
+	movl	-88(%ebx),%edx
+	movq	-88(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	-96(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-120(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,8(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-128(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,(%ebp)
+	shll	$4,%edx
+	movb	%dl,2(%esp)
+	movl	-72(%ebx),%edx
+	movq	-72(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	-80(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-112(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,16(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-120(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,8(%ebp)
+	shll	$4,%edx
+	movb	%dl,3(%esp)
+	movl	-56(%ebx),%edx
+	movq	-56(%ebx),%mm2
+	psllq	$60,%mm7
+	movq	-64(%ebx),%mm5
+	por	%mm7,%mm1
+	movq	%mm0,-104(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,24(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-112(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,16(%ebp)
+	shll	$4,%edx
+	movb	%dl,4(%esp)
+	movl	-40(%ebx),%edx
+	movq	-40(%ebx),%mm1
+	psllq	$60,%mm6
+	movq	-48(%ebx),%mm4
+	por	%mm6,%mm0
+	movq	%mm2,-96(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,32(%edi)
+	movq	%mm5,%mm7
+	movq	%mm0,-104(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,24(%ebp)
+	shll	$4,%edx
+	movb	%dl,5(%esp)
+	movl	-24(%ebx),%edx
+	movq	-24(%ebx),%mm0
+	psllq	$60,%mm7
+	movq	-32(%ebx),%mm3
+	por	%mm7,%mm2
+	movq	%mm1,-88(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,40(%edi)
+	movq	%mm4,%mm6
+	movq	%mm2,-96(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,32(%ebp)
+	shll	$4,%edx
+	movb	%dl,6(%esp)
+	movl	-8(%ebx),%edx
+	movq	-8(%ebx),%mm2
+	psllq	$60,%mm6
+	movq	-16(%ebx),%mm5
+	por	%mm6,%mm1
+	movq	%mm0,-80(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,48(%edi)
+	movq	%mm3,%mm7
+	movq	%mm1,-88(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,40(%ebp)
+	shll	$4,%edx
+	movb	%dl,7(%esp)
+	movl	8(%ebx),%edx
+	movq	8(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-72(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,56(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-80(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,48(%ebp)
+	shll	$4,%edx
+	movb	%dl,8(%esp)
+	movl	24(%ebx),%edx
+	movq	24(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	16(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-64(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,64(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-72(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,56(%ebp)
+	shll	$4,%edx
+	movb	%dl,9(%esp)
+	movl	40(%ebx),%edx
+	movq	40(%ebx),%mm2
+	psllq	$60,%mm7
+	movq	32(%ebx),%mm5
+	por	%mm7,%mm1
+	movq	%mm0,-56(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,72(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-64(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,64(%ebp)
+	shll	$4,%edx
+	movb	%dl,10(%esp)
+	movl	56(%ebx),%edx
+	movq	56(%ebx),%mm1
+	psllq	$60,%mm6
+	movq	48(%ebx),%mm4
+	por	%mm6,%mm0
+	movq	%mm2,-48(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,80(%edi)
+	movq	%mm5,%mm7
+	movq	%mm0,-56(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,72(%ebp)
+	shll	$4,%edx
+	movb	%dl,11(%esp)
+	movl	72(%ebx),%edx
+	movq	72(%ebx),%mm0
+	psllq	$60,%mm7
+	movq	64(%ebx),%mm3
+	por	%mm7,%mm2
+	movq	%mm1,-40(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,88(%edi)
+	movq	%mm4,%mm6
+	movq	%mm2,-48(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,80(%ebp)
+	shll	$4,%edx
+	movb	%dl,12(%esp)
+	movl	88(%ebx),%edx
+	movq	88(%ebx),%mm2
+	psllq	$60,%mm6
+	movq	80(%ebx),%mm5
+	por	%mm6,%mm1
+	movq	%mm0,-32(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,96(%edi)
+	movq	%mm3,%mm7
+	movq	%mm1,-40(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,88(%ebp)
+	shll	$4,%edx
+	movb	%dl,13(%esp)
+	movl	104(%ebx),%edx
+	movq	104(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	96(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-24(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,104(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-32(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,96(%ebp)
+	shll	$4,%edx
+	movb	%dl,14(%esp)
+	movl	120(%ebx),%edx
+	movq	120(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	112(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-16(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,112(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-24(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,104(%ebp)
+	shll	$4,%edx
+	movb	%dl,15(%esp)
+	psllq	$60,%mm7
+	por	%mm7,%mm1
+	movq	%mm0,-8(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,120(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-16(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,112(%ebp)
+	psllq	$60,%mm6
+	por	%mm6,%mm0
+	movq	%mm0,-8(%ebp)
+	movq	%mm3,120(%ebp)
+	movq	(%eax),%mm6
+	movl	8(%eax),%ebx
+	movl	12(%eax),%edx
+.align	16
+.L009outer:
+	xorl	12(%ecx),%edx
+	xorl	8(%ecx),%ebx
+	pxor	(%ecx),%mm6
+	leal	16(%ecx),%ecx
+	movl	%ebx,536(%esp)
+	movq	%mm6,528(%esp)
+	movl	%ecx,548(%esp)
+	xorl	%eax,%eax
+	roll	$8,%edx
+	movb	%dl,%al
+	movl	%eax,%ebp
+	andb	$15,%al
+	shrl	$4,%ebp
+	pxor	%mm0,%mm0
+	roll	$8,%edx
+	pxor	%mm1,%mm1
+	pxor	%mm2,%mm2
+	movq	16(%esp,%eax,8),%mm7
+	movq	144(%esp,%eax,8),%mm6
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	shrl	$4,%edi
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	536(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	532(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	528(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	524(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	pxor	144(%esp,%eax,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	movzbl	%bl,%ebx
+	pxor	%mm2,%mm2
+	psllq	$4,%mm1
+	movd	%mm7,%ecx
+	psrlq	$4,%mm7
+	movq	%mm6,%mm3
+	psrlq	$4,%mm6
+	shll	$4,%ecx
+	pxor	16(%esp,%edi,8),%mm7
+	psllq	$60,%mm3
+	movzbl	%cl,%ecx
+	pxor	%mm3,%mm7
+	pxor	144(%esp,%edi,8),%mm6
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	%mm1,%mm6
+	movd	%mm7,%edx
+	pinsrw	$3,(%esi,%ecx,2),%mm2
+	psllq	$12,%mm0
+	pxor	%mm0,%mm6
+	psrlq	$32,%mm7
+	pxor	%mm2,%mm6
+	movl	548(%esp),%ecx
+	movd	%mm7,%ebx
+	movq	%mm6,%mm3
+	psllw	$8,%mm6
+	psrlw	$8,%mm3
+	por	%mm3,%mm6
+	bswap	%edx
+	pshufw	$27,%mm6,%mm6
+	bswap	%ebx
+	cmpl	552(%esp),%ecx
+	jne	.L009outer
+	movl	544(%esp),%eax
+	movl	%edx,12(%eax)
+	movl	%ebx,8(%eax)
+	movq	%mm6,(%eax)
+	movl	556(%esp),%esp
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
+.globl	gcm_init_clmul
+.type	gcm_init_clmul, at function
+.align	16
+gcm_init_clmul:
+.L_gcm_init_clmul_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	call	.L010pic
+.L010pic:
+	popl	%ecx
+	leal	.Lbswap-.L010pic(%ecx),%ecx
+	movdqu	(%eax),%xmm2
+	pshufd	$78,%xmm2,%xmm2
+	pshufd	$255,%xmm2,%xmm4
+	movdqa	%xmm2,%xmm3
+	psllq	$1,%xmm2
+	pxor	%xmm5,%xmm5
+	psrlq	$63,%xmm3
+	pcmpgtd	%xmm4,%xmm5
+	pslldq	$8,%xmm3
+	por	%xmm3,%xmm2
+	pand	16(%ecx),%xmm5
+	pxor	%xmm5,%xmm2
+	movdqa	%xmm2,%xmm0
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	movdqu	%xmm2,(%edx)
+	movdqu	%xmm0,16(%edx)
+	ret
+.size	gcm_init_clmul,.-.L_gcm_init_clmul_begin
+.globl	gcm_gmult_clmul
+.type	gcm_gmult_clmul, at function
+.align	16
+gcm_gmult_clmul:
+.L_gcm_gmult_clmul_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	call	.L011pic
+.L011pic:
+	popl	%ecx
+	leal	.Lbswap-.L011pic(%ecx),%ecx
+	movdqu	(%eax),%xmm0
+	movdqa	(%ecx),%xmm5
+	movups	(%edx),%xmm2
+.byte	102,15,56,0,197
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,197
+	movdqu	%xmm0,(%eax)
+	ret
+.size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin
+.globl	gcm_ghash_clmul
+.type	gcm_ghash_clmul, at function
+.align	16
+gcm_ghash_clmul:
+.L_gcm_ghash_clmul_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%edx
+	movl	28(%esp),%esi
+	movl	32(%esp),%ebx
+	call	.L012pic
+.L012pic:
+	popl	%ecx
+	leal	.Lbswap-.L012pic(%ecx),%ecx
+	movdqu	(%eax),%xmm0
+	movdqa	(%ecx),%xmm5
+	movdqu	(%edx),%xmm2
+.byte	102,15,56,0,197
+	subl	$16,%ebx
+	jz	.L013odd_tail
+	movdqu	(%esi),%xmm3
+	movdqu	16(%esi),%xmm6
+.byte	102,15,56,0,221
+.byte	102,15,56,0,245
+	pxor	%xmm3,%xmm0
+	movdqa	%xmm6,%xmm7
+	pshufd	$78,%xmm6,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm6,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,242,0
+.byte	102,15,58,68,250,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm7
+	pxor	%xmm4,%xmm6
+	movups	16(%edx),%xmm2
+	leal	32(%esi),%esi
+	subl	$32,%ebx
+	jbe	.L014even_tail
+.L015mod_loop:
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqu	(%esi),%xmm3
+	movups	(%edx),%xmm2
+	pxor	%xmm6,%xmm0
+	pxor	%xmm7,%xmm1
+	movdqu	16(%esi),%xmm6
+.byte	102,15,56,0,221
+.byte	102,15,56,0,245
+	movdqa	%xmm6,%xmm5
+	movdqa	%xmm6,%xmm7
+	pxor	%xmm3,%xmm1
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+.byte	102,15,58,68,242,0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pshufd	$78,%xmm5,%xmm3
+	pxor	%xmm4,%xmm1
+	pxor	%xmm5,%xmm3
+	pshufd	$78,%xmm2,%xmm5
+	pxor	%xmm2,%xmm5
+.byte	102,15,58,68,250,17
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,68,221,0
+	movups	16(%edx),%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm3
+	movdqa	%xmm3,%xmm5
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm5
+	pxor	%xmm3,%xmm7
+	pxor	%xmm5,%xmm6
+	movdqa	(%ecx),%xmm5
+	leal	32(%esi),%esi
+	subl	$32,%ebx
+	ja	.L015mod_loop
+.L014even_tail:
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	pxor	%xmm6,%xmm0
+	pxor	%xmm7,%xmm1
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	testl	%ebx,%ebx
+	jnz	.L016done
+	movups	(%edx),%xmm2
+.L013odd_tail:
+	movdqu	(%esi),%xmm3
+.byte	102,15,56,0,221
+	pxor	%xmm3,%xmm0
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.L016done:
+.byte	102,15,56,0,197
+	movdqu	%xmm0,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin
+.align	64
+.Lbswap:
+.byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
+.byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194
+.align	64
+.Lrem_4bit:
+.long	0,0,0,471859200,0,943718400,0,610271232
+.long	0,1887436800,0,1822425088,0,1220542464,0,1423966208
+.long	0,3774873600,0,4246732800,0,3644850176,0,3311403008
+.long	0,2441084928,0,2376073216,0,2847932416,0,3051356160
+.align	64
+.Lrem_8bit:
+.value	0,450,900,582,1800,1738,1164,1358
+.value	3600,4050,3476,3158,2328,2266,2716,2910
+.value	7200,7650,8100,7782,6952,6890,6316,6510
+.value	4656,5106,4532,4214,5432,5370,5820,6014
+.value	14400,14722,15300,14854,16200,16010,15564,15630
+.value	13904,14226,13780,13334,12632,12442,13020,13086
+.value	9312,9634,10212,9766,9064,8874,8428,8494
+.value	10864,11186,10740,10294,11640,11450,12028,12094
+.value	28800,28994,29444,29382,30600,30282,29708,30158
+.value	32400,32594,32020,31958,31128,30810,31260,31710
+.value	27808,28002,28452,28390,27560,27242,26668,27118
+.value	25264,25458,24884,24822,26040,25722,26172,26622
+.value	18624,18690,19268,19078,20424,19978,19532,19854
+.value	18128,18194,17748,17558,16856,16410,16988,17310
+.value	21728,21794,22372,22182,21480,21034,20588,20910
+.value	23280,23346,22900,22710,24056,23610,24188,24510
+.value	57600,57538,57988,58182,58888,59338,58764,58446
+.value	61200,61138,60564,60758,59416,59866,60316,59998
+.value	64800,64738,65188,65382,64040,64490,63916,63598
+.value	62256,62194,61620,61814,62520,62970,63420,63102
+.value	55616,55426,56004,56070,56904,57226,56780,56334
+.value	55120,54930,54484,54550,53336,53658,54236,53790
+.value	50528,50338,50916,50982,49768,50090,49644,49198
+.value	52080,51890,51444,51510,52344,52666,53244,52798
+.value	37248,36930,37380,37830,38536,38730,38156,38094
+.value	40848,40530,39956,40406,39064,39258,39708,39646
+.value	36256,35938,36388,36838,35496,35690,35116,35054
+.value	33712,33394,32820,33270,33976,34170,34620,34558
+.value	43456,43010,43588,43910,44744,44810,44364,44174
+.value	42960,42514,42068,42390,41176,41242,41820,41630
+.value	46560,46114,46692,47014,45800,45866,45420,45230
+.value	48112,47666,47220,47542,48376,48442,49020,48830
+.byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
+.byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
+.byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
+.byte	0
+#else
+.file	"ghash-x86.S"
+.text
+.globl	gcm_gmult_4bit_x86
+.type	gcm_gmult_4bit_x86, at function
+.align	16
+gcm_gmult_4bit_x86:
+.L_gcm_gmult_4bit_x86_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$84,%esp
+	movl	104(%esp),%edi
+	movl	108(%esp),%esi
+	movl	(%edi),%ebp
+	movl	4(%edi),%edx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%ebx
+	movl	$0,16(%esp)
+	movl	$471859200,20(%esp)
+	movl	$943718400,24(%esp)
+	movl	$610271232,28(%esp)
+	movl	$1887436800,32(%esp)
+	movl	$1822425088,36(%esp)
+	movl	$1220542464,40(%esp)
+	movl	$1423966208,44(%esp)
+	movl	$3774873600,48(%esp)
+	movl	$4246732800,52(%esp)
+	movl	$3644850176,56(%esp)
+	movl	$3311403008,60(%esp)
+	movl	$2441084928,64(%esp)
+	movl	$2376073216,68(%esp)
+	movl	$2847932416,72(%esp)
+	movl	$3051356160,76(%esp)
+	movl	%ebp,(%esp)
+	movl	%edx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%ebx,12(%esp)
+	shrl	$20,%ebx
+	andl	$240,%ebx
+	movl	4(%esi,%ebx,1),%ebp
+	movl	(%esi,%ebx,1),%edx
+	movl	12(%esi,%ebx,1),%ecx
+	movl	8(%esi,%ebx,1),%ebx
+	xorl	%eax,%eax
+	movl	$15,%edi
+	jmp	.L000x86_loop
+.align	16
+.L000x86_loop:
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	andb	$240,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	decl	%edi
+	js	.L001x86_break
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	shlb	$4,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	jmp	.L000x86_loop
+.align	16
+.L001x86_break:
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	bswap	%ebp
+	movl	104(%esp),%edi
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,4(%edi)
+	movl	%ebp,(%edi)
+	addl	$84,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
+.globl	gcm_ghash_4bit_x86
+.type	gcm_ghash_4bit_x86, at function
+.align	16
+gcm_ghash_4bit_x86:
+.L_gcm_ghash_4bit_x86_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$84,%esp
+	movl	104(%esp),%ebx
+	movl	108(%esp),%esi
+	movl	112(%esp),%edi
+	movl	116(%esp),%ecx
+	addl	%edi,%ecx
+	movl	%ecx,116(%esp)
+	movl	(%ebx),%ebp
+	movl	4(%ebx),%edx
+	movl	8(%ebx),%ecx
+	movl	12(%ebx),%ebx
+	movl	$0,16(%esp)
+	movl	$471859200,20(%esp)
+	movl	$943718400,24(%esp)
+	movl	$610271232,28(%esp)
+	movl	$1887436800,32(%esp)
+	movl	$1822425088,36(%esp)
+	movl	$1220542464,40(%esp)
+	movl	$1423966208,44(%esp)
+	movl	$3774873600,48(%esp)
+	movl	$4246732800,52(%esp)
+	movl	$3644850176,56(%esp)
+	movl	$3311403008,60(%esp)
+	movl	$2441084928,64(%esp)
+	movl	$2376073216,68(%esp)
+	movl	$2847932416,72(%esp)
+	movl	$3051356160,76(%esp)
+.align	16
+.L002x86_outer_loop:
+	xorl	12(%edi),%ebx
+	xorl	8(%edi),%ecx
+	xorl	4(%edi),%edx
+	xorl	(%edi),%ebp
+	movl	%ebx,12(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,4(%esp)
+	movl	%ebp,(%esp)
+	shrl	$20,%ebx
+	andl	$240,%ebx
+	movl	4(%esi,%ebx,1),%ebp
+	movl	(%esi,%ebx,1),%edx
+	movl	12(%esi,%ebx,1),%ecx
+	movl	8(%esi,%ebx,1),%ebx
+	xorl	%eax,%eax
+	movl	$15,%edi
+	jmp	.L003x86_loop
+.align	16
+.L003x86_loop:
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	andb	$240,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	decl	%edi
+	js	.L004x86_break
+	movb	%bl,%al
+	shrdl	$4,%ecx,%ebx
+	andb	$15,%al
+	shrdl	$4,%edx,%ecx
+	shrdl	$4,%ebp,%edx
+	shrl	$4,%ebp
+	xorl	16(%esp,%eax,4),%ebp
+	movb	(%esp,%edi,1),%al
+	shlb	$4,%al
+	xorl	8(%esi,%eax,1),%ebx
+	xorl	12(%esi,%eax,1),%ecx
+	xorl	(%esi,%eax,1),%edx
+	xorl	4(%esi,%eax,1),%ebp
+	jmp	.L003x86_loop
+.align	16
+.L004x86_break:
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	bswap	%ebp
+	movl	112(%esp),%edi
+	leal	16(%edi),%edi
+	cmpl	116(%esp),%edi
+	movl	%edi,112(%esp)
+	jb	.L002x86_outer_loop
+	movl	104(%esp),%edi
+	movl	%ebx,12(%edi)
+	movl	%ecx,8(%edi)
+	movl	%edx,4(%edi)
+	movl	%ebp,(%edi)
+	addl	$84,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
+.globl	gcm_gmult_4bit_mmx
+.type	gcm_gmult_4bit_mmx, at function
+.align	16
+gcm_gmult_4bit_mmx:
+.L_gcm_gmult_4bit_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%esi
+	call	.L005pic_point
+.L005pic_point:
+	popl	%eax
+	leal	.Lrem_4bit-.L005pic_point(%eax),%eax
+	movzbl	15(%edi),%ebx
+	xorl	%ecx,%ecx
+	movl	%ebx,%edx
+	movb	%dl,%cl
+	movl	$14,%ebp
+	shlb	$4,%cl
+	andl	$240,%edx
+	movq	8(%esi,%ecx,1),%mm0
+	movq	(%esi,%ecx,1),%mm1
+	movd	%mm0,%ebx
+	jmp	.L006mmx_loop
+.align	16
+.L006mmx_loop:
+	psrlq	$4,%mm0
+	andl	$15,%ebx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%edx,1),%mm0
+	movb	(%edi,%ebp,1),%cl
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	decl	%ebp
+	movd	%mm0,%ebx
+	pxor	(%esi,%edx,1),%mm1
+	movl	%ecx,%edx
+	pxor	%mm2,%mm0
+	js	.L007mmx_break
+	shlb	$4,%cl
+	andl	$15,%ebx
+	psrlq	$4,%mm0
+	andl	$240,%edx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%ecx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%ecx,1),%mm1
+	pxor	%mm2,%mm0
+	jmp	.L006mmx_loop
+.align	16
+.L007mmx_break:
+	shlb	$4,%cl
+	andl	$15,%ebx
+	psrlq	$4,%mm0
+	andl	$240,%edx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%ecx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%ecx,1),%mm1
+	pxor	%mm2,%mm0
+	psrlq	$4,%mm0
+	andl	$15,%ebx
+	movq	%mm1,%mm2
+	psrlq	$4,%mm1
+	pxor	8(%esi,%edx,1),%mm0
+	psllq	$60,%mm2
+	pxor	(%eax,%ebx,8),%mm1
+	movd	%mm0,%ebx
+	pxor	(%esi,%edx,1),%mm1
+	pxor	%mm2,%mm0
+	psrlq	$32,%mm0
+	movd	%mm1,%edx
+	psrlq	$32,%mm1
+	movd	%mm0,%ecx
+	movd	%mm1,%ebp
+	bswap	%ebx
+	bswap	%edx
+	bswap	%ecx
+	bswap	%ebp
+	emms
+	movl	%ebx,12(%edi)
+	movl	%edx,4(%edi)
+	movl	%ecx,8(%edi)
+	movl	%ebp,(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
+.globl	gcm_ghash_4bit_mmx
+.type	gcm_ghash_4bit_mmx, at function
+.align	16
+gcm_ghash_4bit_mmx:
+.L_gcm_ghash_4bit_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ecx
+	movl	32(%esp),%edx
+	movl	%esp,%ebp
+	call	.L008pic_point
+.L008pic_point:
+	popl	%esi
+	leal	.Lrem_8bit-.L008pic_point(%esi),%esi
+	subl	$544,%esp
+	andl	$-64,%esp
+	subl	$16,%esp
+	addl	%ecx,%edx
+	movl	%eax,544(%esp)
+	movl	%edx,552(%esp)
+	movl	%ebp,556(%esp)
+	addl	$128,%ebx
+	leal	144(%esp),%edi
+	leal	400(%esp),%ebp
+	movl	-120(%ebx),%edx
+	movq	-120(%ebx),%mm0
+	movq	-128(%ebx),%mm3
+	shll	$4,%edx
+	movb	%dl,(%esp)
+	movl	-104(%ebx),%edx
+	movq	-104(%ebx),%mm2
+	movq	-112(%ebx),%mm5
+	movq	%mm0,-128(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,(%edi)
+	movq	%mm3,%mm7
+	psrlq	$4,%mm3
+	shll	$4,%edx
+	movb	%dl,1(%esp)
+	movl	-88(%ebx),%edx
+	movq	-88(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	-96(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-120(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,8(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-128(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,(%ebp)
+	shll	$4,%edx
+	movb	%dl,2(%esp)
+	movl	-72(%ebx),%edx
+	movq	-72(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	-80(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-112(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,16(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-120(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,8(%ebp)
+	shll	$4,%edx
+	movb	%dl,3(%esp)
+	movl	-56(%ebx),%edx
+	movq	-56(%ebx),%mm2
+	psllq	$60,%mm7
+	movq	-64(%ebx),%mm5
+	por	%mm7,%mm1
+	movq	%mm0,-104(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,24(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-112(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,16(%ebp)
+	shll	$4,%edx
+	movb	%dl,4(%esp)
+	movl	-40(%ebx),%edx
+	movq	-40(%ebx),%mm1
+	psllq	$60,%mm6
+	movq	-48(%ebx),%mm4
+	por	%mm6,%mm0
+	movq	%mm2,-96(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,32(%edi)
+	movq	%mm5,%mm7
+	movq	%mm0,-104(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,24(%ebp)
+	shll	$4,%edx
+	movb	%dl,5(%esp)
+	movl	-24(%ebx),%edx
+	movq	-24(%ebx),%mm0
+	psllq	$60,%mm7
+	movq	-32(%ebx),%mm3
+	por	%mm7,%mm2
+	movq	%mm1,-88(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,40(%edi)
+	movq	%mm4,%mm6
+	movq	%mm2,-96(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,32(%ebp)
+	shll	$4,%edx
+	movb	%dl,6(%esp)
+	movl	-8(%ebx),%edx
+	movq	-8(%ebx),%mm2
+	psllq	$60,%mm6
+	movq	-16(%ebx),%mm5
+	por	%mm6,%mm1
+	movq	%mm0,-80(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,48(%edi)
+	movq	%mm3,%mm7
+	movq	%mm1,-88(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,40(%ebp)
+	shll	$4,%edx
+	movb	%dl,7(%esp)
+	movl	8(%ebx),%edx
+	movq	8(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-72(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,56(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-80(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,48(%ebp)
+	shll	$4,%edx
+	movb	%dl,8(%esp)
+	movl	24(%ebx),%edx
+	movq	24(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	16(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-64(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,64(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-72(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,56(%ebp)
+	shll	$4,%edx
+	movb	%dl,9(%esp)
+	movl	40(%ebx),%edx
+	movq	40(%ebx),%mm2
+	psllq	$60,%mm7
+	movq	32(%ebx),%mm5
+	por	%mm7,%mm1
+	movq	%mm0,-56(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,72(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-64(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,64(%ebp)
+	shll	$4,%edx
+	movb	%dl,10(%esp)
+	movl	56(%ebx),%edx
+	movq	56(%ebx),%mm1
+	psllq	$60,%mm6
+	movq	48(%ebx),%mm4
+	por	%mm6,%mm0
+	movq	%mm2,-48(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,80(%edi)
+	movq	%mm5,%mm7
+	movq	%mm0,-56(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,72(%ebp)
+	shll	$4,%edx
+	movb	%dl,11(%esp)
+	movl	72(%ebx),%edx
+	movq	72(%ebx),%mm0
+	psllq	$60,%mm7
+	movq	64(%ebx),%mm3
+	por	%mm7,%mm2
+	movq	%mm1,-40(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,88(%edi)
+	movq	%mm4,%mm6
+	movq	%mm2,-48(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,80(%ebp)
+	shll	$4,%edx
+	movb	%dl,12(%esp)
+	movl	88(%ebx),%edx
+	movq	88(%ebx),%mm2
+	psllq	$60,%mm6
+	movq	80(%ebx),%mm5
+	por	%mm6,%mm1
+	movq	%mm0,-32(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,96(%edi)
+	movq	%mm3,%mm7
+	movq	%mm1,-40(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,88(%ebp)
+	shll	$4,%edx
+	movb	%dl,13(%esp)
+	movl	104(%ebx),%edx
+	movq	104(%ebx),%mm1
+	psllq	$60,%mm7
+	movq	96(%ebx),%mm4
+	por	%mm7,%mm0
+	movq	%mm2,-24(%edi)
+	psrlq	$4,%mm2
+	movq	%mm5,104(%edi)
+	movq	%mm5,%mm6
+	movq	%mm0,-32(%ebp)
+	psrlq	$4,%mm5
+	movq	%mm3,96(%ebp)
+	shll	$4,%edx
+	movb	%dl,14(%esp)
+	movl	120(%ebx),%edx
+	movq	120(%ebx),%mm0
+	psllq	$60,%mm6
+	movq	112(%ebx),%mm3
+	por	%mm6,%mm2
+	movq	%mm1,-16(%edi)
+	psrlq	$4,%mm1
+	movq	%mm4,112(%edi)
+	movq	%mm4,%mm7
+	movq	%mm2,-24(%ebp)
+	psrlq	$4,%mm4
+	movq	%mm5,104(%ebp)
+	shll	$4,%edx
+	movb	%dl,15(%esp)
+	psllq	$60,%mm7
+	por	%mm7,%mm1
+	movq	%mm0,-8(%edi)
+	psrlq	$4,%mm0
+	movq	%mm3,120(%edi)
+	movq	%mm3,%mm6
+	movq	%mm1,-16(%ebp)
+	psrlq	$4,%mm3
+	movq	%mm4,112(%ebp)
+	psllq	$60,%mm6
+	por	%mm6,%mm0
+	movq	%mm0,-8(%ebp)
+	movq	%mm3,120(%ebp)
+	movq	(%eax),%mm6
+	movl	8(%eax),%ebx
+	movl	12(%eax),%edx
+.align	16
+.L009outer:
+	xorl	12(%ecx),%edx
+	xorl	8(%ecx),%ebx
+	pxor	(%ecx),%mm6
+	leal	16(%ecx),%ecx
+	movl	%ebx,536(%esp)
+	movq	%mm6,528(%esp)
+	movl	%ecx,548(%esp)
+	xorl	%eax,%eax
+	roll	$8,%edx
+	movb	%dl,%al
+	movl	%eax,%ebp
+	andb	$15,%al
+	shrl	$4,%ebp
+	pxor	%mm0,%mm0
+	roll	$8,%edx
+	pxor	%mm1,%mm1
+	pxor	%mm2,%mm2
+	movq	16(%esp,%eax,8),%mm7
+	movq	144(%esp,%eax,8),%mm6
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	shrl	$4,%edi
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	536(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	532(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	528(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm1,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm0
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	movb	%dl,%al
+	movd	%mm7,%ecx
+	movzbl	%bl,%ebx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%ebp
+	psrlq	$8,%mm6
+	pxor	272(%esp,%edi,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm0,%mm6
+	shrl	$4,%ebp
+	pinsrw	$2,(%esi,%ebx,2),%mm2
+	pxor	16(%esp,%eax,8),%mm7
+	roll	$8,%edx
+	pxor	144(%esp,%eax,8),%mm6
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%edi,8),%mm6
+	xorb	(%esp,%edi,1),%cl
+	movb	%dl,%al
+	movl	524(%esp),%edx
+	movd	%mm7,%ebx
+	movzbl	%cl,%ecx
+	psrlq	$8,%mm7
+	movq	%mm6,%mm3
+	movl	%eax,%edi
+	psrlq	$8,%mm6
+	pxor	272(%esp,%ebp,8),%mm7
+	andb	$15,%al
+	psllq	$56,%mm3
+	pxor	%mm2,%mm6
+	shrl	$4,%edi
+	pinsrw	$2,(%esi,%ecx,2),%mm1
+	pxor	16(%esp,%eax,8),%mm7
+	pxor	144(%esp,%eax,8),%mm6
+	xorb	(%esp,%ebp,1),%bl
+	pxor	%mm3,%mm7
+	pxor	400(%esp,%ebp,8),%mm6
+	movzbl	%bl,%ebx
+	pxor	%mm2,%mm2
+	psllq	$4,%mm1
+	movd	%mm7,%ecx
+	psrlq	$4,%mm7
+	movq	%mm6,%mm3
+	psrlq	$4,%mm6
+	shll	$4,%ecx
+	pxor	16(%esp,%edi,8),%mm7
+	psllq	$60,%mm3
+	movzbl	%cl,%ecx
+	pxor	%mm3,%mm7
+	pxor	144(%esp,%edi,8),%mm6
+	pinsrw	$2,(%esi,%ebx,2),%mm0
+	pxor	%mm1,%mm6
+	movd	%mm7,%edx
+	pinsrw	$3,(%esi,%ecx,2),%mm2
+	psllq	$12,%mm0
+	pxor	%mm0,%mm6
+	psrlq	$32,%mm7
+	pxor	%mm2,%mm6
+	movl	548(%esp),%ecx
+	movd	%mm7,%ebx
+	movq	%mm6,%mm3
+	psllw	$8,%mm6
+	psrlw	$8,%mm3
+	por	%mm3,%mm6
+	bswap	%edx
+	pshufw	$27,%mm6,%mm6
+	bswap	%ebx
+	cmpl	552(%esp),%ecx
+	jne	.L009outer
+	movl	544(%esp),%eax
+	movl	%edx,12(%eax)
+	movl	%ebx,8(%eax)
+	movq	%mm6,(%eax)
+	movl	556(%esp),%esp
+	emms
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
+.globl	gcm_init_clmul
+.type	gcm_init_clmul, at function
+.align	16
+gcm_init_clmul:
+.L_gcm_init_clmul_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	call	.L010pic
+.L010pic:
+	popl	%ecx
+	leal	.Lbswap-.L010pic(%ecx),%ecx
+	movdqu	(%eax),%xmm2
+	pshufd	$78,%xmm2,%xmm2
+	pshufd	$255,%xmm2,%xmm4
+	movdqa	%xmm2,%xmm3
+	psllq	$1,%xmm2
+	pxor	%xmm5,%xmm5
+	psrlq	$63,%xmm3
+	pcmpgtd	%xmm4,%xmm5
+	pslldq	$8,%xmm3
+	por	%xmm3,%xmm2
+	pand	16(%ecx),%xmm5
+	pxor	%xmm5,%xmm2
+	movdqa	%xmm2,%xmm0
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	movdqu	%xmm2,(%edx)
+	movdqu	%xmm0,16(%edx)
+	ret
+.size	gcm_init_clmul,.-.L_gcm_init_clmul_begin
+.globl	gcm_gmult_clmul
+.type	gcm_gmult_clmul, at function
+.align	16
+gcm_gmult_clmul:
+.L_gcm_gmult_clmul_begin:
+	movl	4(%esp),%eax
+	movl	8(%esp),%edx
+	call	.L011pic
+.L011pic:
+	popl	%ecx
+	leal	.Lbswap-.L011pic(%ecx),%ecx
+	movdqu	(%eax),%xmm0
+	movdqa	(%ecx),%xmm5
+	movups	(%edx),%xmm2
+.byte	102,15,56,0,197
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,197
+	movdqu	%xmm0,(%eax)
+	ret
+.size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin
+.globl	gcm_ghash_clmul
+.type	gcm_ghash_clmul, at function
+.align	16
+gcm_ghash_clmul:
+.L_gcm_ghash_clmul_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%eax
+	movl	24(%esp),%edx
+	movl	28(%esp),%esi
+	movl	32(%esp),%ebx
+	call	.L012pic
+.L012pic:
+	popl	%ecx
+	leal	.Lbswap-.L012pic(%ecx),%ecx
+	movdqu	(%eax),%xmm0
+	movdqa	(%ecx),%xmm5
+	movdqu	(%edx),%xmm2
+.byte	102,15,56,0,197
+	subl	$16,%ebx
+	jz	.L013odd_tail
+	movdqu	(%esi),%xmm3
+	movdqu	16(%esi),%xmm6
+.byte	102,15,56,0,221
+.byte	102,15,56,0,245
+	pxor	%xmm3,%xmm0
+	movdqa	%xmm6,%xmm7
+	pshufd	$78,%xmm6,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm6,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,242,0
+.byte	102,15,58,68,250,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm7
+	pxor	%xmm4,%xmm6
+	movups	16(%edx),%xmm2
+	leal	32(%esi),%esi
+	subl	$32,%ebx
+	jbe	.L014even_tail
+.L015mod_loop:
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqu	(%esi),%xmm3
+	movups	(%edx),%xmm2
+	pxor	%xmm6,%xmm0
+	pxor	%xmm7,%xmm1
+	movdqu	16(%esi),%xmm6
+.byte	102,15,56,0,221
+.byte	102,15,56,0,245
+	movdqa	%xmm6,%xmm5
+	movdqa	%xmm6,%xmm7
+	pxor	%xmm3,%xmm1
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+.byte	102,15,58,68,242,0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pshufd	$78,%xmm5,%xmm3
+	pxor	%xmm4,%xmm1
+	pxor	%xmm5,%xmm3
+	pshufd	$78,%xmm2,%xmm5
+	pxor	%xmm2,%xmm5
+.byte	102,15,58,68,250,17
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,68,221,0
+	movups	16(%edx),%xmm2
+	xorps	%xmm6,%xmm3
+	xorps	%xmm7,%xmm3
+	movdqa	%xmm3,%xmm5
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm5
+	pxor	%xmm3,%xmm7
+	pxor	%xmm5,%xmm6
+	movdqa	(%ecx),%xmm5
+	leal	32(%esi),%esi
+	subl	$32,%ebx
+	ja	.L015mod_loop
+.L014even_tail:
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	pxor	%xmm6,%xmm0
+	pxor	%xmm7,%xmm1
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	testl	%ebx,%ebx
+	jnz	.L016done
+	movups	(%edx),%xmm2
+.L013odd_tail:
+	movdqu	(%esi),%xmm3
+.byte	102,15,56,0,221
+	pxor	%xmm3,%xmm0
+	movdqa	%xmm0,%xmm1
+	pshufd	$78,%xmm0,%xmm3
+	pshufd	$78,%xmm2,%xmm4
+	pxor	%xmm0,%xmm3
+	pxor	%xmm2,%xmm4
+.byte	102,15,58,68,194,0
+.byte	102,15,58,68,202,17
+.byte	102,15,58,68,220,0
+	xorps	%xmm0,%xmm3
+	xorps	%xmm1,%xmm3
+	movdqa	%xmm3,%xmm4
+	psrldq	$8,%xmm3
+	pslldq	$8,%xmm4
+	pxor	%xmm3,%xmm1
+	pxor	%xmm4,%xmm0
+	movdqa	%xmm0,%xmm3
+	psllq	$1,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$5,%xmm0
+	pxor	%xmm3,%xmm0
+	psllq	$57,%xmm0
+	movdqa	%xmm0,%xmm4
+	pslldq	$8,%xmm0
+	psrldq	$8,%xmm4
+	pxor	%xmm3,%xmm0
+	pxor	%xmm4,%xmm1
+	movdqa	%xmm0,%xmm4
+	psrlq	$5,%xmm0
+	pxor	%xmm4,%xmm0
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+	pxor	%xmm1,%xmm4
+	psrlq	$1,%xmm0
+	pxor	%xmm4,%xmm0
+.L016done:
+.byte	102,15,56,0,197
+	movdqu	%xmm0,(%eax)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin
+.align	64
+.Lbswap:
+.byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
+.byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194
+.align	64
+.Lrem_4bit:
+.long	0,0,0,471859200,0,943718400,0,610271232
+.long	0,1887436800,0,1822425088,0,1220542464,0,1423966208
+.long	0,3774873600,0,4246732800,0,3644850176,0,3311403008
+.long	0,2441084928,0,2376073216,0,2847932416,0,3051356160
+.align	64
+.Lrem_8bit:
+.value	0,450,900,582,1800,1738,1164,1358
+.value	3600,4050,3476,3158,2328,2266,2716,2910
+.value	7200,7650,8100,7782,6952,6890,6316,6510
+.value	4656,5106,4532,4214,5432,5370,5820,6014
+.value	14400,14722,15300,14854,16200,16010,15564,15630
+.value	13904,14226,13780,13334,12632,12442,13020,13086
+.value	9312,9634,10212,9766,9064,8874,8428,8494
+.value	10864,11186,10740,10294,11640,11450,12028,12094
+.value	28800,28994,29444,29382,30600,30282,29708,30158
+.value	32400,32594,32020,31958,31128,30810,31260,31710
+.value	27808,28002,28452,28390,27560,27242,26668,27118
+.value	25264,25458,24884,24822,26040,25722,26172,26622
+.value	18624,18690,19268,19078,20424,19978,19532,19854
+.value	18128,18194,17748,17558,16856,16410,16988,17310
+.value	21728,21794,22372,22182,21480,21034,20588,20910
+.value	23280,23346,22900,22710,24056,23610,24188,24510
+.value	57600,57538,57988,58182,58888,59338,58764,58446
+.value	61200,61138,60564,60758,59416,59866,60316,59998
+.value	64800,64738,65188,65382,64040,64490,63916,63598
+.value	62256,62194,61620,61814,62520,62970,63420,63102
+.value	55616,55426,56004,56070,56904,57226,56780,56334
+.value	55120,54930,54484,54550,53336,53658,54236,53790
+.value	50528,50338,50916,50982,49768,50090,49644,49198
+.value	52080,51890,51444,51510,52344,52666,53244,52798
+.value	37248,36930,37380,37830,38536,38730,38156,38094
+.value	40848,40530,39956,40406,39064,39258,39708,39646
+.value	36256,35938,36388,36838,35496,35690,35116,35054
+.value	33712,33394,32820,33270,33976,34170,34620,34558
+.value	43456,43010,43588,43910,44744,44810,44364,44174
+.value	42960,42514,42068,42390,41176,41242,41820,41630
+.value	46560,46114,46692,47014,45800,45866,45420,45230
+.value	48112,47666,47220,47542,48376,48442,49020,48830
+.byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
+.byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
+.byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
+.byte	0
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/ghash-x86.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/ghash-x86.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/ghash-x86.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/ghash-x86.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1270 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/ghash-x86.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"ghash-x86.s"
-.text
-.globl	gcm_gmult_4bit_x86
-.type	gcm_gmult_4bit_x86, at function
-.align	16
-gcm_gmult_4bit_x86:
-.L_gcm_gmult_4bit_x86_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	subl	$84,%esp
-	movl	104(%esp),%edi
-	movl	108(%esp),%esi
-	movl	(%edi),%ebp
-	movl	4(%edi),%edx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%ebx
-	movl	$0,16(%esp)
-	movl	$471859200,20(%esp)
-	movl	$943718400,24(%esp)
-	movl	$610271232,28(%esp)
-	movl	$1887436800,32(%esp)
-	movl	$1822425088,36(%esp)
-	movl	$1220542464,40(%esp)
-	movl	$1423966208,44(%esp)
-	movl	$3774873600,48(%esp)
-	movl	$4246732800,52(%esp)
-	movl	$3644850176,56(%esp)
-	movl	$3311403008,60(%esp)
-	movl	$2441084928,64(%esp)
-	movl	$2376073216,68(%esp)
-	movl	$2847932416,72(%esp)
-	movl	$3051356160,76(%esp)
-	movl	%ebp,(%esp)
-	movl	%edx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%ebx,12(%esp)
-	shrl	$20,%ebx
-	andl	$240,%ebx
-	movl	4(%esi,%ebx,1),%ebp
-	movl	(%esi,%ebx,1),%edx
-	movl	12(%esi,%ebx,1),%ecx
-	movl	8(%esi,%ebx,1),%ebx
-	xorl	%eax,%eax
-	movl	$15,%edi
-	jmp	.L000x86_loop
-.align	16
-.L000x86_loop:
-	movb	%bl,%al
-	shrdl	$4,%ecx,%ebx
-	andb	$15,%al
-	shrdl	$4,%edx,%ecx
-	shrdl	$4,%ebp,%edx
-	shrl	$4,%ebp
-	xorl	16(%esp,%eax,4),%ebp
-	movb	(%esp,%edi,1),%al
-	andb	$240,%al
-	xorl	8(%esi,%eax,1),%ebx
-	xorl	12(%esi,%eax,1),%ecx
-	xorl	(%esi,%eax,1),%edx
-	xorl	4(%esi,%eax,1),%ebp
-	decl	%edi
-	js	.L001x86_break
-	movb	%bl,%al
-	shrdl	$4,%ecx,%ebx
-	andb	$15,%al
-	shrdl	$4,%edx,%ecx
-	shrdl	$4,%ebp,%edx
-	shrl	$4,%ebp
-	xorl	16(%esp,%eax,4),%ebp
-	movb	(%esp,%edi,1),%al
-	shlb	$4,%al
-	xorl	8(%esi,%eax,1),%ebx
-	xorl	12(%esi,%eax,1),%ecx
-	xorl	(%esi,%eax,1),%edx
-	xorl	4(%esi,%eax,1),%ebp
-	jmp	.L000x86_loop
-.align	16
-.L001x86_break:
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	bswap	%ebp
-	movl	104(%esp),%edi
-	movl	%ebx,12(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,4(%edi)
-	movl	%ebp,(%edi)
-	addl	$84,%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
-.globl	gcm_ghash_4bit_x86
-.type	gcm_ghash_4bit_x86, at function
-.align	16
-gcm_ghash_4bit_x86:
-.L_gcm_ghash_4bit_x86_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	subl	$84,%esp
-	movl	104(%esp),%ebx
-	movl	108(%esp),%esi
-	movl	112(%esp),%edi
-	movl	116(%esp),%ecx
-	addl	%edi,%ecx
-	movl	%ecx,116(%esp)
-	movl	(%ebx),%ebp
-	movl	4(%ebx),%edx
-	movl	8(%ebx),%ecx
-	movl	12(%ebx),%ebx
-	movl	$0,16(%esp)
-	movl	$471859200,20(%esp)
-	movl	$943718400,24(%esp)
-	movl	$610271232,28(%esp)
-	movl	$1887436800,32(%esp)
-	movl	$1822425088,36(%esp)
-	movl	$1220542464,40(%esp)
-	movl	$1423966208,44(%esp)
-	movl	$3774873600,48(%esp)
-	movl	$4246732800,52(%esp)
-	movl	$3644850176,56(%esp)
-	movl	$3311403008,60(%esp)
-	movl	$2441084928,64(%esp)
-	movl	$2376073216,68(%esp)
-	movl	$2847932416,72(%esp)
-	movl	$3051356160,76(%esp)
-.align	16
-.L002x86_outer_loop:
-	xorl	12(%edi),%ebx
-	xorl	8(%edi),%ecx
-	xorl	4(%edi),%edx
-	xorl	(%edi),%ebp
-	movl	%ebx,12(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edx,4(%esp)
-	movl	%ebp,(%esp)
-	shrl	$20,%ebx
-	andl	$240,%ebx
-	movl	4(%esi,%ebx,1),%ebp
-	movl	(%esi,%ebx,1),%edx
-	movl	12(%esi,%ebx,1),%ecx
-	movl	8(%esi,%ebx,1),%ebx
-	xorl	%eax,%eax
-	movl	$15,%edi
-	jmp	.L003x86_loop
-.align	16
-.L003x86_loop:
-	movb	%bl,%al
-	shrdl	$4,%ecx,%ebx
-	andb	$15,%al
-	shrdl	$4,%edx,%ecx
-	shrdl	$4,%ebp,%edx
-	shrl	$4,%ebp
-	xorl	16(%esp,%eax,4),%ebp
-	movb	(%esp,%edi,1),%al
-	andb	$240,%al
-	xorl	8(%esi,%eax,1),%ebx
-	xorl	12(%esi,%eax,1),%ecx
-	xorl	(%esi,%eax,1),%edx
-	xorl	4(%esi,%eax,1),%ebp
-	decl	%edi
-	js	.L004x86_break
-	movb	%bl,%al
-	shrdl	$4,%ecx,%ebx
-	andb	$15,%al
-	shrdl	$4,%edx,%ecx
-	shrdl	$4,%ebp,%edx
-	shrl	$4,%ebp
-	xorl	16(%esp,%eax,4),%ebp
-	movb	(%esp,%edi,1),%al
-	shlb	$4,%al
-	xorl	8(%esi,%eax,1),%ebx
-	xorl	12(%esi,%eax,1),%ecx
-	xorl	(%esi,%eax,1),%edx
-	xorl	4(%esi,%eax,1),%ebp
-	jmp	.L003x86_loop
-.align	16
-.L004x86_break:
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	bswap	%ebp
-	movl	112(%esp),%edi
-	leal	16(%edi),%edi
-	cmpl	116(%esp),%edi
-	movl	%edi,112(%esp)
-	jb	.L002x86_outer_loop
-	movl	104(%esp),%edi
-	movl	%ebx,12(%edi)
-	movl	%ecx,8(%edi)
-	movl	%edx,4(%edi)
-	movl	%ebp,(%edi)
-	addl	$84,%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
-.globl	gcm_gmult_4bit_mmx
-.type	gcm_gmult_4bit_mmx, at function
-.align	16
-gcm_gmult_4bit_mmx:
-.L_gcm_gmult_4bit_mmx_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%edi
-	movl	24(%esp),%esi
-	call	.L005pic_point
-.L005pic_point:
-	popl	%eax
-	leal	.Lrem_4bit-.L005pic_point(%eax),%eax
-	movzbl	15(%edi),%ebx
-	xorl	%ecx,%ecx
-	movl	%ebx,%edx
-	movb	%dl,%cl
-	movl	$14,%ebp
-	shlb	$4,%cl
-	andl	$240,%edx
-	movq	8(%esi,%ecx,1),%mm0
-	movq	(%esi,%ecx,1),%mm1
-	movd	%mm0,%ebx
-	jmp	.L006mmx_loop
-.align	16
-.L006mmx_loop:
-	psrlq	$4,%mm0
-	andl	$15,%ebx
-	movq	%mm1,%mm2
-	psrlq	$4,%mm1
-	pxor	8(%esi,%edx,1),%mm0
-	movb	(%edi,%ebp,1),%cl
-	psllq	$60,%mm2
-	pxor	(%eax,%ebx,8),%mm1
-	decl	%ebp
-	movd	%mm0,%ebx
-	pxor	(%esi,%edx,1),%mm1
-	movl	%ecx,%edx
-	pxor	%mm2,%mm0
-	js	.L007mmx_break
-	shlb	$4,%cl
-	andl	$15,%ebx
-	psrlq	$4,%mm0
-	andl	$240,%edx
-	movq	%mm1,%mm2
-	psrlq	$4,%mm1
-	pxor	8(%esi,%ecx,1),%mm0
-	psllq	$60,%mm2
-	pxor	(%eax,%ebx,8),%mm1
-	movd	%mm0,%ebx
-	pxor	(%esi,%ecx,1),%mm1
-	pxor	%mm2,%mm0
-	jmp	.L006mmx_loop
-.align	16
-.L007mmx_break:
-	shlb	$4,%cl
-	andl	$15,%ebx
-	psrlq	$4,%mm0
-	andl	$240,%edx
-	movq	%mm1,%mm2
-	psrlq	$4,%mm1
-	pxor	8(%esi,%ecx,1),%mm0
-	psllq	$60,%mm2
-	pxor	(%eax,%ebx,8),%mm1
-	movd	%mm0,%ebx
-	pxor	(%esi,%ecx,1),%mm1
-	pxor	%mm2,%mm0
-	psrlq	$4,%mm0
-	andl	$15,%ebx
-	movq	%mm1,%mm2
-	psrlq	$4,%mm1
-	pxor	8(%esi,%edx,1),%mm0
-	psllq	$60,%mm2
-	pxor	(%eax,%ebx,8),%mm1
-	movd	%mm0,%ebx
-	pxor	(%esi,%edx,1),%mm1
-	pxor	%mm2,%mm0
-	psrlq	$32,%mm0
-	movd	%mm1,%edx
-	psrlq	$32,%mm1
-	movd	%mm0,%ecx
-	movd	%mm1,%ebp
-	bswap	%ebx
-	bswap	%edx
-	bswap	%ecx
-	bswap	%ebp
-	emms
-	movl	%ebx,12(%edi)
-	movl	%edx,4(%edi)
-	movl	%ecx,8(%edi)
-	movl	%ebp,(%edi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
-.globl	gcm_ghash_4bit_mmx
-.type	gcm_ghash_4bit_mmx, at function
-.align	16
-gcm_ghash_4bit_mmx:
-.L_gcm_ghash_4bit_mmx_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%eax
-	movl	24(%esp),%ebx
-	movl	28(%esp),%ecx
-	movl	32(%esp),%edx
-	movl	%esp,%ebp
-	call	.L008pic_point
-.L008pic_point:
-	popl	%esi
-	leal	.Lrem_8bit-.L008pic_point(%esi),%esi
-	subl	$544,%esp
-	andl	$-64,%esp
-	subl	$16,%esp
-	addl	%ecx,%edx
-	movl	%eax,544(%esp)
-	movl	%edx,552(%esp)
-	movl	%ebp,556(%esp)
-	addl	$128,%ebx
-	leal	144(%esp),%edi
-	leal	400(%esp),%ebp
-	movl	-120(%ebx),%edx
-	movq	-120(%ebx),%mm0
-	movq	-128(%ebx),%mm3
-	shll	$4,%edx
-	movb	%dl,(%esp)
-	movl	-104(%ebx),%edx
-	movq	-104(%ebx),%mm2
-	movq	-112(%ebx),%mm5
-	movq	%mm0,-128(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,(%edi)
-	movq	%mm3,%mm7
-	psrlq	$4,%mm3
-	shll	$4,%edx
-	movb	%dl,1(%esp)
-	movl	-88(%ebx),%edx
-	movq	-88(%ebx),%mm1
-	psllq	$60,%mm7
-	movq	-96(%ebx),%mm4
-	por	%mm7,%mm0
-	movq	%mm2,-120(%edi)
-	psrlq	$4,%mm2
-	movq	%mm5,8(%edi)
-	movq	%mm5,%mm6
-	movq	%mm0,-128(%ebp)
-	psrlq	$4,%mm5
-	movq	%mm3,(%ebp)
-	shll	$4,%edx
-	movb	%dl,2(%esp)
-	movl	-72(%ebx),%edx
-	movq	-72(%ebx),%mm0
-	psllq	$60,%mm6
-	movq	-80(%ebx),%mm3
-	por	%mm6,%mm2
-	movq	%mm1,-112(%edi)
-	psrlq	$4,%mm1
-	movq	%mm4,16(%edi)
-	movq	%mm4,%mm7
-	movq	%mm2,-120(%ebp)
-	psrlq	$4,%mm4
-	movq	%mm5,8(%ebp)
-	shll	$4,%edx
-	movb	%dl,3(%esp)
-	movl	-56(%ebx),%edx
-	movq	-56(%ebx),%mm2
-	psllq	$60,%mm7
-	movq	-64(%ebx),%mm5
-	por	%mm7,%mm1
-	movq	%mm0,-104(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,24(%edi)
-	movq	%mm3,%mm6
-	movq	%mm1,-112(%ebp)
-	psrlq	$4,%mm3
-	movq	%mm4,16(%ebp)
-	shll	$4,%edx
-	movb	%dl,4(%esp)
-	movl	-40(%ebx),%edx
-	movq	-40(%ebx),%mm1
-	psllq	$60,%mm6
-	movq	-48(%ebx),%mm4
-	por	%mm6,%mm0
-	movq	%mm2,-96(%edi)
-	psrlq	$4,%mm2
-	movq	%mm5,32(%edi)
-	movq	%mm5,%mm7
-	movq	%mm0,-104(%ebp)
-	psrlq	$4,%mm5
-	movq	%mm3,24(%ebp)
-	shll	$4,%edx
-	movb	%dl,5(%esp)
-	movl	-24(%ebx),%edx
-	movq	-24(%ebx),%mm0
-	psllq	$60,%mm7
-	movq	-32(%ebx),%mm3
-	por	%mm7,%mm2
-	movq	%mm1,-88(%edi)
-	psrlq	$4,%mm1
-	movq	%mm4,40(%edi)
-	movq	%mm4,%mm6
-	movq	%mm2,-96(%ebp)
-	psrlq	$4,%mm4
-	movq	%mm5,32(%ebp)
-	shll	$4,%edx
-	movb	%dl,6(%esp)
-	movl	-8(%ebx),%edx
-	movq	-8(%ebx),%mm2
-	psllq	$60,%mm6
-	movq	-16(%ebx),%mm5
-	por	%mm6,%mm1
-	movq	%mm0,-80(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,48(%edi)
-	movq	%mm3,%mm7
-	movq	%mm1,-88(%ebp)
-	psrlq	$4,%mm3
-	movq	%mm4,40(%ebp)
-	shll	$4,%edx
-	movb	%dl,7(%esp)
-	movl	8(%ebx),%edx
-	movq	8(%ebx),%mm1
-	psllq	$60,%mm7
-	movq	(%ebx),%mm4
-	por	%mm7,%mm0
-	movq	%mm2,-72(%edi)
-	psrlq	$4,%mm2
-	movq	%mm5,56(%edi)
-	movq	%mm5,%mm6
-	movq	%mm0,-80(%ebp)
-	psrlq	$4,%mm5
-	movq	%mm3,48(%ebp)
-	shll	$4,%edx
-	movb	%dl,8(%esp)
-	movl	24(%ebx),%edx
-	movq	24(%ebx),%mm0
-	psllq	$60,%mm6
-	movq	16(%ebx),%mm3
-	por	%mm6,%mm2
-	movq	%mm1,-64(%edi)
-	psrlq	$4,%mm1
-	movq	%mm4,64(%edi)
-	movq	%mm4,%mm7
-	movq	%mm2,-72(%ebp)
-	psrlq	$4,%mm4
-	movq	%mm5,56(%ebp)
-	shll	$4,%edx
-	movb	%dl,9(%esp)
-	movl	40(%ebx),%edx
-	movq	40(%ebx),%mm2
-	psllq	$60,%mm7
-	movq	32(%ebx),%mm5
-	por	%mm7,%mm1
-	movq	%mm0,-56(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,72(%edi)
-	movq	%mm3,%mm6
-	movq	%mm1,-64(%ebp)
-	psrlq	$4,%mm3
-	movq	%mm4,64(%ebp)
-	shll	$4,%edx
-	movb	%dl,10(%esp)
-	movl	56(%ebx),%edx
-	movq	56(%ebx),%mm1
-	psllq	$60,%mm6
-	movq	48(%ebx),%mm4
-	por	%mm6,%mm0
-	movq	%mm2,-48(%edi)
-	psrlq	$4,%mm2
-	movq	%mm5,80(%edi)
-	movq	%mm5,%mm7
-	movq	%mm0,-56(%ebp)
-	psrlq	$4,%mm5
-	movq	%mm3,72(%ebp)
-	shll	$4,%edx
-	movb	%dl,11(%esp)
-	movl	72(%ebx),%edx
-	movq	72(%ebx),%mm0
-	psllq	$60,%mm7
-	movq	64(%ebx),%mm3
-	por	%mm7,%mm2
-	movq	%mm1,-40(%edi)
-	psrlq	$4,%mm1
-	movq	%mm4,88(%edi)
-	movq	%mm4,%mm6
-	movq	%mm2,-48(%ebp)
-	psrlq	$4,%mm4
-	movq	%mm5,80(%ebp)
-	shll	$4,%edx
-	movb	%dl,12(%esp)
-	movl	88(%ebx),%edx
-	movq	88(%ebx),%mm2
-	psllq	$60,%mm6
-	movq	80(%ebx),%mm5
-	por	%mm6,%mm1
-	movq	%mm0,-32(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,96(%edi)
-	movq	%mm3,%mm7
-	movq	%mm1,-40(%ebp)
-	psrlq	$4,%mm3
-	movq	%mm4,88(%ebp)
-	shll	$4,%edx
-	movb	%dl,13(%esp)
-	movl	104(%ebx),%edx
-	movq	104(%ebx),%mm1
-	psllq	$60,%mm7
-	movq	96(%ebx),%mm4
-	por	%mm7,%mm0
-	movq	%mm2,-24(%edi)
-	psrlq	$4,%mm2
-	movq	%mm5,104(%edi)
-	movq	%mm5,%mm6
-	movq	%mm0,-32(%ebp)
-	psrlq	$4,%mm5
-	movq	%mm3,96(%ebp)
-	shll	$4,%edx
-	movb	%dl,14(%esp)
-	movl	120(%ebx),%edx
-	movq	120(%ebx),%mm0
-	psllq	$60,%mm6
-	movq	112(%ebx),%mm3
-	por	%mm6,%mm2
-	movq	%mm1,-16(%edi)
-	psrlq	$4,%mm1
-	movq	%mm4,112(%edi)
-	movq	%mm4,%mm7
-	movq	%mm2,-24(%ebp)
-	psrlq	$4,%mm4
-	movq	%mm5,104(%ebp)
-	shll	$4,%edx
-	movb	%dl,15(%esp)
-	psllq	$60,%mm7
-	por	%mm7,%mm1
-	movq	%mm0,-8(%edi)
-	psrlq	$4,%mm0
-	movq	%mm3,120(%edi)
-	movq	%mm3,%mm6
-	movq	%mm1,-16(%ebp)
-	psrlq	$4,%mm3
-	movq	%mm4,112(%ebp)
-	psllq	$60,%mm6
-	por	%mm6,%mm0
-	movq	%mm0,-8(%ebp)
-	movq	%mm3,120(%ebp)
-	movq	(%eax),%mm6
-	movl	8(%eax),%ebx
-	movl	12(%eax),%edx
-.align	16
-.L009outer:
-	xorl	12(%ecx),%edx
-	xorl	8(%ecx),%ebx
-	pxor	(%ecx),%mm6
-	leal	16(%ecx),%ecx
-	movl	%ebx,536(%esp)
-	movq	%mm6,528(%esp)
-	movl	%ecx,548(%esp)
-	xorl	%eax,%eax
-	roll	$8,%edx
-	movb	%dl,%al
-	movl	%eax,%ebp
-	andb	$15,%al
-	shrl	$4,%ebp
-	pxor	%mm0,%mm0
-	roll	$8,%edx
-	pxor	%mm1,%mm1
-	pxor	%mm2,%mm2
-	movq	16(%esp,%eax,8),%mm7
-	movq	144(%esp,%eax,8),%mm6
-	movb	%dl,%al
-	movd	%mm7,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	shrl	$4,%edi
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm2
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movl	536(%esp),%edx
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm2,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm1
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm1,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm0
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm0,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm2
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm2,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm1
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movl	532(%esp),%edx
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm1,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm0
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm0,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm2
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm2,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm1
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm1,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm0
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movl	528(%esp),%edx
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm0,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm2
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm2,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm1
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm1,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm0
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	movb	%dl,%al
-	movd	%mm7,%ecx
-	movzbl	%bl,%ebx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%ebp
-	psrlq	$8,%mm6
-	pxor	272(%esp,%edi,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm0,%mm6
-	shrl	$4,%ebp
-	pinsrw	$2,(%esi,%ebx,2),%mm2
-	pxor	16(%esp,%eax,8),%mm7
-	roll	$8,%edx
-	pxor	144(%esp,%eax,8),%mm6
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%edi,8),%mm6
-	xorb	(%esp,%edi,1),%cl
-	movb	%dl,%al
-	movl	524(%esp),%edx
-	movd	%mm7,%ebx
-	movzbl	%cl,%ecx
-	psrlq	$8,%mm7
-	movq	%mm6,%mm3
-	movl	%eax,%edi
-	psrlq	$8,%mm6
-	pxor	272(%esp,%ebp,8),%mm7
-	andb	$15,%al
-	psllq	$56,%mm3
-	pxor	%mm2,%mm6
-	shrl	$4,%edi
-	pinsrw	$2,(%esi,%ecx,2),%mm1
-	pxor	16(%esp,%eax,8),%mm7
-	pxor	144(%esp,%eax,8),%mm6
-	xorb	(%esp,%ebp,1),%bl
-	pxor	%mm3,%mm7
-	pxor	400(%esp,%ebp,8),%mm6
-	movzbl	%bl,%ebx
-	pxor	%mm2,%mm2
-	psllq	$4,%mm1
-	movd	%mm7,%ecx
-	psrlq	$4,%mm7
-	movq	%mm6,%mm3
-	psrlq	$4,%mm6
-	shll	$4,%ecx
-	pxor	16(%esp,%edi,8),%mm7
-	psllq	$60,%mm3
-	movzbl	%cl,%ecx
-	pxor	%mm3,%mm7
-	pxor	144(%esp,%edi,8),%mm6
-	pinsrw	$2,(%esi,%ebx,2),%mm0
-	pxor	%mm1,%mm6
-	movd	%mm7,%edx
-	pinsrw	$3,(%esi,%ecx,2),%mm2
-	psllq	$12,%mm0
-	pxor	%mm0,%mm6
-	psrlq	$32,%mm7
-	pxor	%mm2,%mm6
-	movl	548(%esp),%ecx
-	movd	%mm7,%ebx
-	movq	%mm6,%mm3
-	psllw	$8,%mm6
-	psrlw	$8,%mm3
-	por	%mm3,%mm6
-	bswap	%edx
-	pshufw	$27,%mm6,%mm6
-	bswap	%ebx
-	cmpl	552(%esp),%ecx
-	jne	.L009outer
-	movl	544(%esp),%eax
-	movl	%edx,12(%eax)
-	movl	%ebx,8(%eax)
-	movq	%mm6,(%eax)
-	movl	556(%esp),%esp
-	emms
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
-.globl	gcm_init_clmul
-.type	gcm_init_clmul, at function
-.align	16
-gcm_init_clmul:
-.L_gcm_init_clmul_begin:
-	movl	4(%esp),%edx
-	movl	8(%esp),%eax
-	call	.L010pic
-.L010pic:
-	popl	%ecx
-	leal	.Lbswap-.L010pic(%ecx),%ecx
-	movdqu	(%eax),%xmm2
-	pshufd	$78,%xmm2,%xmm2
-	pshufd	$255,%xmm2,%xmm4
-	movdqa	%xmm2,%xmm3
-	psllq	$1,%xmm2
-	pxor	%xmm5,%xmm5
-	psrlq	$63,%xmm3
-	pcmpgtd	%xmm4,%xmm5
-	pslldq	$8,%xmm3
-	por	%xmm3,%xmm2
-	pand	16(%ecx),%xmm5
-	pxor	%xmm5,%xmm2
-	movdqa	%xmm2,%xmm0
-	movdqa	%xmm0,%xmm1
-	pshufd	$78,%xmm0,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm0,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,194,0
-.byte	102,15,58,68,202,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm0,%xmm3
-	xorps	%xmm1,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm1
-	pxor	%xmm4,%xmm0
-	movdqa	%xmm0,%xmm3
-	psllq	$1,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$5,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$57,%xmm0
-	movdqa	%xmm0,%xmm4
-	pslldq	$8,%xmm0
-	psrldq	$8,%xmm4
-	pxor	%xmm3,%xmm0
-	pxor	%xmm4,%xmm1
-	movdqa	%xmm0,%xmm4
-	psrlq	$5,%xmm0
-	pxor	%xmm4,%xmm0
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	pxor	%xmm1,%xmm4
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	movdqu	%xmm2,(%edx)
-	movdqu	%xmm0,16(%edx)
-	ret
-.size	gcm_init_clmul,.-.L_gcm_init_clmul_begin
-.globl	gcm_gmult_clmul
-.type	gcm_gmult_clmul, at function
-.align	16
-gcm_gmult_clmul:
-.L_gcm_gmult_clmul_begin:
-	movl	4(%esp),%eax
-	movl	8(%esp),%edx
-	call	.L011pic
-.L011pic:
-	popl	%ecx
-	leal	.Lbswap-.L011pic(%ecx),%ecx
-	movdqu	(%eax),%xmm0
-	movdqa	(%ecx),%xmm5
-	movups	(%edx),%xmm2
-.byte	102,15,56,0,197
-	movdqa	%xmm0,%xmm1
-	pshufd	$78,%xmm0,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm0,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,194,0
-.byte	102,15,58,68,202,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm0,%xmm3
-	xorps	%xmm1,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm1
-	pxor	%xmm4,%xmm0
-	movdqa	%xmm0,%xmm3
-	psllq	$1,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$5,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$57,%xmm0
-	movdqa	%xmm0,%xmm4
-	pslldq	$8,%xmm0
-	psrldq	$8,%xmm4
-	pxor	%xmm3,%xmm0
-	pxor	%xmm4,%xmm1
-	movdqa	%xmm0,%xmm4
-	psrlq	$5,%xmm0
-	pxor	%xmm4,%xmm0
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	pxor	%xmm1,%xmm4
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-.byte	102,15,56,0,197
-	movdqu	%xmm0,(%eax)
-	ret
-.size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin
-.globl	gcm_ghash_clmul
-.type	gcm_ghash_clmul, at function
-.align	16
-gcm_ghash_clmul:
-.L_gcm_ghash_clmul_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%eax
-	movl	24(%esp),%edx
-	movl	28(%esp),%esi
-	movl	32(%esp),%ebx
-	call	.L012pic
-.L012pic:
-	popl	%ecx
-	leal	.Lbswap-.L012pic(%ecx),%ecx
-	movdqu	(%eax),%xmm0
-	movdqa	(%ecx),%xmm5
-	movdqu	(%edx),%xmm2
-.byte	102,15,56,0,197
-	subl	$16,%ebx
-	jz	.L013odd_tail
-	movdqu	(%esi),%xmm3
-	movdqu	16(%esi),%xmm6
-.byte	102,15,56,0,221
-.byte	102,15,56,0,245
-	pxor	%xmm3,%xmm0
-	movdqa	%xmm6,%xmm7
-	pshufd	$78,%xmm6,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm6,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,242,0
-.byte	102,15,58,68,250,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm7
-	pxor	%xmm4,%xmm6
-	movups	16(%edx),%xmm2
-	leal	32(%esi),%esi
-	subl	$32,%ebx
-	jbe	.L014even_tail
-.L015mod_loop:
-	movdqa	%xmm0,%xmm1
-	pshufd	$78,%xmm0,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm0,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,194,0
-.byte	102,15,58,68,202,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm0,%xmm3
-	xorps	%xmm1,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm1
-	pxor	%xmm4,%xmm0
-	movdqu	(%esi),%xmm3
-	movups	(%edx),%xmm2
-	pxor	%xmm6,%xmm0
-	pxor	%xmm7,%xmm1
-	movdqu	16(%esi),%xmm6
-.byte	102,15,56,0,221
-.byte	102,15,56,0,245
-	movdqa	%xmm6,%xmm5
-	movdqa	%xmm6,%xmm7
-	pxor	%xmm3,%xmm1
-	movdqa	%xmm0,%xmm3
-	psllq	$1,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$5,%xmm0
-	pxor	%xmm3,%xmm0
-.byte	102,15,58,68,242,0
-	psllq	$57,%xmm0
-	movdqa	%xmm0,%xmm4
-	pslldq	$8,%xmm0
-	psrldq	$8,%xmm4
-	pxor	%xmm3,%xmm0
-	pshufd	$78,%xmm5,%xmm3
-	pxor	%xmm4,%xmm1
-	pxor	%xmm5,%xmm3
-	pshufd	$78,%xmm2,%xmm5
-	pxor	%xmm2,%xmm5
-.byte	102,15,58,68,250,17
-	movdqa	%xmm0,%xmm4
-	psrlq	$5,%xmm0
-	pxor	%xmm4,%xmm0
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	pxor	%xmm1,%xmm4
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-.byte	102,15,58,68,221,0
-	movups	16(%edx),%xmm2
-	xorps	%xmm6,%xmm3
-	xorps	%xmm7,%xmm3
-	movdqa	%xmm3,%xmm5
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm5
-	pxor	%xmm3,%xmm7
-	pxor	%xmm5,%xmm6
-	movdqa	(%ecx),%xmm5
-	leal	32(%esi),%esi
-	subl	$32,%ebx
-	ja	.L015mod_loop
-.L014even_tail:
-	movdqa	%xmm0,%xmm1
-	pshufd	$78,%xmm0,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm0,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,194,0
-.byte	102,15,58,68,202,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm0,%xmm3
-	xorps	%xmm1,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm1
-	pxor	%xmm4,%xmm0
-	pxor	%xmm6,%xmm0
-	pxor	%xmm7,%xmm1
-	movdqa	%xmm0,%xmm3
-	psllq	$1,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$5,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$57,%xmm0
-	movdqa	%xmm0,%xmm4
-	pslldq	$8,%xmm0
-	psrldq	$8,%xmm4
-	pxor	%xmm3,%xmm0
-	pxor	%xmm4,%xmm1
-	movdqa	%xmm0,%xmm4
-	psrlq	$5,%xmm0
-	pxor	%xmm4,%xmm0
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	pxor	%xmm1,%xmm4
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	testl	%ebx,%ebx
-	jnz	.L016done
-	movups	(%edx),%xmm2
-.L013odd_tail:
-	movdqu	(%esi),%xmm3
-.byte	102,15,56,0,221
-	pxor	%xmm3,%xmm0
-	movdqa	%xmm0,%xmm1
-	pshufd	$78,%xmm0,%xmm3
-	pshufd	$78,%xmm2,%xmm4
-	pxor	%xmm0,%xmm3
-	pxor	%xmm2,%xmm4
-.byte	102,15,58,68,194,0
-.byte	102,15,58,68,202,17
-.byte	102,15,58,68,220,0
-	xorps	%xmm0,%xmm3
-	xorps	%xmm1,%xmm3
-	movdqa	%xmm3,%xmm4
-	psrldq	$8,%xmm3
-	pslldq	$8,%xmm4
-	pxor	%xmm3,%xmm1
-	pxor	%xmm4,%xmm0
-	movdqa	%xmm0,%xmm3
-	psllq	$1,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$5,%xmm0
-	pxor	%xmm3,%xmm0
-	psllq	$57,%xmm0
-	movdqa	%xmm0,%xmm4
-	pslldq	$8,%xmm0
-	psrldq	$8,%xmm4
-	pxor	%xmm3,%xmm0
-	pxor	%xmm4,%xmm1
-	movdqa	%xmm0,%xmm4
-	psrlq	$5,%xmm0
-	pxor	%xmm4,%xmm0
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-	pxor	%xmm1,%xmm4
-	psrlq	$1,%xmm0
-	pxor	%xmm4,%xmm0
-.L016done:
-.byte	102,15,56,0,197
-	movdqu	%xmm0,(%eax)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin
-.align	64
-.Lbswap:
-.byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
-.byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194
-.align	64
-.Lrem_4bit:
-.long	0,0,0,471859200,0,943718400,0,610271232
-.long	0,1887436800,0,1822425088,0,1220542464,0,1423966208
-.long	0,3774873600,0,4246732800,0,3644850176,0,3311403008
-.long	0,2441084928,0,2376073216,0,2847932416,0,3051356160
-.align	64
-.Lrem_8bit:
-.value	0,450,900,582,1800,1738,1164,1358
-.value	3600,4050,3476,3158,2328,2266,2716,2910
-.value	7200,7650,8100,7782,6952,6890,6316,6510
-.value	4656,5106,4532,4214,5432,5370,5820,6014
-.value	14400,14722,15300,14854,16200,16010,15564,15630
-.value	13904,14226,13780,13334,12632,12442,13020,13086
-.value	9312,9634,10212,9766,9064,8874,8428,8494
-.value	10864,11186,10740,10294,11640,11450,12028,12094
-.value	28800,28994,29444,29382,30600,30282,29708,30158
-.value	32400,32594,32020,31958,31128,30810,31260,31710
-.value	27808,28002,28452,28390,27560,27242,26668,27118
-.value	25264,25458,24884,24822,26040,25722,26172,26622
-.value	18624,18690,19268,19078,20424,19978,19532,19854
-.value	18128,18194,17748,17558,16856,16410,16988,17310
-.value	21728,21794,22372,22182,21480,21034,20588,20910
-.value	23280,23346,22900,22710,24056,23610,24188,24510
-.value	57600,57538,57988,58182,58888,59338,58764,58446
-.value	61200,61138,60564,60758,59416,59866,60316,59998
-.value	64800,64738,65188,65382,64040,64490,63916,63598
-.value	62256,62194,61620,61814,62520,62970,63420,63102
-.value	55616,55426,56004,56070,56904,57226,56780,56334
-.value	55120,54930,54484,54550,53336,53658,54236,53790
-.value	50528,50338,50916,50982,49768,50090,49644,49198
-.value	52080,51890,51444,51510,52344,52666,53244,52798
-.value	37248,36930,37380,37830,38536,38730,38156,38094
-.value	40848,40530,39956,40406,39064,39258,39708,39646
-.value	36256,35938,36388,36838,35496,35690,35116,35054
-.value	33712,33394,32820,33270,33976,34170,34620,34558
-.value	43456,43010,43588,43910,44744,44810,44364,44174
-.value	42960,42514,42068,42390,41176,41242,41820,41630
-.value	46560,46114,46692,47014,45800,45866,45420,45230
-.value	48112,47666,47220,47542,48376,48442,49020,48830
-.byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
-.byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
-.byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
-.byte	0

Added: trunk/secure/lib/libcrypto/i386/md5-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/md5-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/md5-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1364 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/md5-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from md5-586.pl.
+#ifdef PIC
+.file	"md5-586.S"
+.text
+.globl	md5_block_asm_data_order
+.type	md5_block_asm_data_order, at function
+.align	16
+md5_block_asm_data_order:
+.L_md5_block_asm_data_order_begin:
+	pushl	%esi
+	pushl	%edi
+	movl	12(%esp),%edi
+	movl	16(%esp),%esi
+	movl	20(%esp),%ecx
+	pushl	%ebp
+	shll	$6,%ecx
+	pushl	%ebx
+	addl	%esi,%ecx
+	subl	$64,%ecx
+	movl	(%edi),%eax
+	pushl	%ecx
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+.L000start:
+
+
+	movl	%ecx,%edi
+	movl	(%esi),%ebp
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	3614090360(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	4(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	3905402710(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	8(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	606105819(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	12(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	3250441966(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	16(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	4118548399(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	20(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	1200080426(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	24(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	2821735955(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	28(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	4249261313(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	32(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	1770035416(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	36(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	2336552879(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	40(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	4294925233(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	44(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	2304563134(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	48(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	1804603682(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	52(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	4254626195(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	56(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	2792965006(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	60(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	1236535329(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	4(%esi),%ebp
+	addl	%ecx,%ebx
+
+
+
+	leal	4129170786(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	24(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	3225465664(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	44(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	643717713(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	3921069994(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	20(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	3593408605(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	40(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	38016083(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	60(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	3634488961(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	16(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	3889429448(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	36(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	568446438(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	56(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	3275163606(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	12(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	4107603335(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	32(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	1163531501(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	52(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	2850285829(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	8(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	4243563512(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	28(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	1735328473(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	48(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	2368359562(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	20(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	4294588738(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	32(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	2272392833(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	44(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	1839030562(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	56(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	4259657740(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	4(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	2763975236(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	16(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	1272893353(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	28(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	4139469664(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	40(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	3200236656(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	52(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	681279174(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	3936430074(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	12(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	3572445317(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	24(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	76029189(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	36(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	3654602809(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	48(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	3873151461(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	60(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	530742520(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	8(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	3299628645(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+
+
+	xorl	%edx,%edi
+	orl	%ebx,%edi
+	leal	4096336452(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	28(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	1126891415(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	56(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	2878612391(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	20(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	4237533241(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	48(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	1700485571(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	12(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	2399980690(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	40(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	4293915773(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	4(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	2240044497(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	32(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	1873313359(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	60(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	4264355552(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	24(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	2734768916(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	52(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	1309151649(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	16(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	4149444226(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	44(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	3174756917(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	8(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	718787259(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	36(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	3951481745(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	24(%esp),%ebp
+	addl	%edi,%ebx
+	addl	$64,%esi
+	roll	$21,%ebx
+	movl	(%ebp),%edi
+	addl	%ecx,%ebx
+	addl	%edi,%eax
+	movl	4(%ebp),%edi
+	addl	%edi,%ebx
+	movl	8(%ebp),%edi
+	addl	%edi,%ecx
+	movl	12(%ebp),%edi
+	addl	%edi,%edx
+	movl	%eax,(%ebp)
+	movl	%ebx,4(%ebp)
+	movl	(%esp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	cmpl	%esi,%edi
+	jae	.L000start
+	popl	%eax
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin
+#else
+.file	"md5-586.S"
+.text
+.globl	md5_block_asm_data_order
+.type	md5_block_asm_data_order, at function
+.align	16
+md5_block_asm_data_order:
+.L_md5_block_asm_data_order_begin:
+	pushl	%esi
+	pushl	%edi
+	movl	12(%esp),%edi
+	movl	16(%esp),%esi
+	movl	20(%esp),%ecx
+	pushl	%ebp
+	shll	$6,%ecx
+	pushl	%ebx
+	addl	%esi,%ecx
+	subl	$64,%ecx
+	movl	(%edi),%eax
+	pushl	%ecx
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+.L000start:
+
+
+	movl	%ecx,%edi
+	movl	(%esi),%ebp
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	3614090360(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	4(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	3905402710(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	8(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	606105819(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	12(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	3250441966(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	16(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	4118548399(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	20(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	1200080426(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	24(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	2821735955(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	28(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	4249261313(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	32(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	1770035416(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	36(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	2336552879(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	40(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	4294925233(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	44(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	2304563134(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	48(%esi),%ebp
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	leal	1804603682(%eax,%ebp,1),%eax
+	xorl	%edx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$7,%eax
+	movl	52(%esi),%ebp
+	addl	%ebx,%eax
+
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	leal	4254626195(%edx,%ebp,1),%edx
+	xorl	%ecx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$12,%edx
+	movl	56(%esi),%ebp
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	leal	2792965006(%ecx,%ebp,1),%ecx
+	xorl	%ebx,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$17,%ecx
+	movl	60(%esi),%ebp
+	addl	%edx,%ecx
+
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	leal	1236535329(%ebx,%ebp,1),%ebx
+	xorl	%eax,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$22,%ebx
+	movl	4(%esi),%ebp
+	addl	%ecx,%ebx
+
+
+
+	leal	4129170786(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	24(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	3225465664(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	44(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	643717713(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	3921069994(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	20(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	3593408605(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	40(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	38016083(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	60(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	3634488961(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	16(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	3889429448(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	36(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	568446438(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	56(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	3275163606(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	12(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	4107603335(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	32(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	1163531501(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	52(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+	leal	2850285829(%eax,%ebp,1),%eax
+	xorl	%ebx,%edi
+	andl	%edx,%edi
+	movl	8(%esi),%ebp
+	xorl	%ecx,%edi
+	addl	%edi,%eax
+	movl	%ebx,%edi
+	roll	$5,%eax
+	addl	%ebx,%eax
+
+	leal	4243563512(%edx,%ebp,1),%edx
+	xorl	%eax,%edi
+	andl	%ecx,%edi
+	movl	28(%esi),%ebp
+	xorl	%ebx,%edi
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$9,%edx
+	addl	%eax,%edx
+
+	leal	1735328473(%ecx,%ebp,1),%ecx
+	xorl	%edx,%edi
+	andl	%ebx,%edi
+	movl	48(%esi),%ebp
+	xorl	%eax,%edi
+	addl	%edi,%ecx
+	movl	%edx,%edi
+	roll	$14,%ecx
+	addl	%edx,%ecx
+
+	leal	2368359562(%ebx,%ebp,1),%ebx
+	xorl	%ecx,%edi
+	andl	%eax,%edi
+	movl	20(%esi),%ebp
+	xorl	%edx,%edi
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$20,%ebx
+	addl	%ecx,%ebx
+
+
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	4294588738(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	32(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	2272392833(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	44(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	1839030562(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	56(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	4259657740(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	4(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	2763975236(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	16(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	1272893353(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	28(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	4139469664(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	40(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	3200236656(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	52(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	681279174(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	3936430074(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	12(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	3572445317(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	24(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	76029189(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	36(%esi),%ebp
+	addl	%edi,%ebx
+	movl	%ecx,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+	xorl	%edx,%edi
+	xorl	%ebx,%edi
+	leal	3654602809(%eax,%ebp,1),%eax
+	addl	%edi,%eax
+	roll	$4,%eax
+	movl	48(%esi),%ebp
+	movl	%ebx,%edi
+
+	leal	3873151461(%edx,%ebp,1),%edx
+	addl	%ebx,%eax
+	xorl	%ecx,%edi
+	xorl	%eax,%edi
+	movl	60(%esi),%ebp
+	addl	%edi,%edx
+	movl	%eax,%edi
+	roll	$11,%edx
+	addl	%eax,%edx
+
+	xorl	%ebx,%edi
+	xorl	%edx,%edi
+	leal	530742520(%ecx,%ebp,1),%ecx
+	addl	%edi,%ecx
+	roll	$16,%ecx
+	movl	8(%esi),%ebp
+	movl	%edx,%edi
+
+	leal	3299628645(%ebx,%ebp,1),%ebx
+	addl	%edx,%ecx
+	xorl	%eax,%edi
+	xorl	%ecx,%edi
+	movl	(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$23,%ebx
+	addl	%ecx,%ebx
+
+
+
+	xorl	%edx,%edi
+	orl	%ebx,%edi
+	leal	4096336452(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	28(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	1126891415(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	56(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	2878612391(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	20(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	4237533241(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	48(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	1700485571(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	12(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	2399980690(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	40(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	4293915773(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	4(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	2240044497(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	32(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	1873313359(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	60(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	4264355552(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	24(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	2734768916(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	52(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	1309151649(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	16(%esi),%ebp
+	addl	%edi,%ebx
+	movl	$-1,%edi
+	roll	$21,%ebx
+	xorl	%edx,%edi
+	addl	%ecx,%ebx
+
+	orl	%ebx,%edi
+	leal	4149444226(%eax,%ebp,1),%eax
+	xorl	%ecx,%edi
+	movl	44(%esi),%ebp
+	addl	%edi,%eax
+	movl	$-1,%edi
+	roll	$6,%eax
+	xorl	%ecx,%edi
+	addl	%ebx,%eax
+
+	orl	%eax,%edi
+	leal	3174756917(%edx,%ebp,1),%edx
+	xorl	%ebx,%edi
+	movl	8(%esi),%ebp
+	addl	%edi,%edx
+	movl	$-1,%edi
+	roll	$10,%edx
+	xorl	%ebx,%edi
+	addl	%eax,%edx
+
+	orl	%edx,%edi
+	leal	718787259(%ecx,%ebp,1),%ecx
+	xorl	%eax,%edi
+	movl	36(%esi),%ebp
+	addl	%edi,%ecx
+	movl	$-1,%edi
+	roll	$15,%ecx
+	xorl	%eax,%edi
+	addl	%edx,%ecx
+
+	orl	%ecx,%edi
+	leal	3951481745(%ebx,%ebp,1),%ebx
+	xorl	%edx,%edi
+	movl	24(%esp),%ebp
+	addl	%edi,%ebx
+	addl	$64,%esi
+	roll	$21,%ebx
+	movl	(%ebp),%edi
+	addl	%ecx,%ebx
+	addl	%edi,%eax
+	movl	4(%ebp),%edi
+	addl	%edi,%ebx
+	movl	8(%ebp),%edi
+	addl	%edi,%ecx
+	movl	12(%ebp),%edi
+	addl	%edi,%edx
+	movl	%eax,(%ebp)
+	movl	%ebx,4(%ebp)
+	movl	(%esp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	cmpl	%esi,%edi
+	jae	.L000start
+	popl	%eax
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/md5-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/md5-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/md5-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/md5-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,680 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/md5-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"md5-586.s"
-.text
-.globl	md5_block_asm_data_order
-.type	md5_block_asm_data_order, at function
-.align	16
-md5_block_asm_data_order:
-.L_md5_block_asm_data_order_begin:
-	pushl	%esi
-	pushl	%edi
-	movl	12(%esp),%edi
-	movl	16(%esp),%esi
-	movl	20(%esp),%ecx
-	pushl	%ebp
-	shll	$6,%ecx
-	pushl	%ebx
-	addl	%esi,%ecx
-	subl	$64,%ecx
-	movl	(%edi),%eax
-	pushl	%ecx
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-.L000start:
-
-
-	movl	%ecx,%edi
-	movl	(%esi),%ebp
-
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	leal	3614090360(%eax,%ebp,1),%eax
-	xorl	%edx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$7,%eax
-	movl	4(%esi),%ebp
-	addl	%ebx,%eax
-
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	leal	3905402710(%edx,%ebp,1),%edx
-	xorl	%ecx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$12,%edx
-	movl	8(%esi),%ebp
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	leal	606105819(%ecx,%ebp,1),%ecx
-	xorl	%ebx,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$17,%ecx
-	movl	12(%esi),%ebp
-	addl	%edx,%ecx
-
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	leal	3250441966(%ebx,%ebp,1),%ebx
-	xorl	%eax,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$22,%ebx
-	movl	16(%esi),%ebp
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	leal	4118548399(%eax,%ebp,1),%eax
-	xorl	%edx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$7,%eax
-	movl	20(%esi),%ebp
-	addl	%ebx,%eax
-
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	leal	1200080426(%edx,%ebp,1),%edx
-	xorl	%ecx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$12,%edx
-	movl	24(%esi),%ebp
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	leal	2821735955(%ecx,%ebp,1),%ecx
-	xorl	%ebx,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$17,%ecx
-	movl	28(%esi),%ebp
-	addl	%edx,%ecx
-
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	leal	4249261313(%ebx,%ebp,1),%ebx
-	xorl	%eax,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$22,%ebx
-	movl	32(%esi),%ebp
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	leal	1770035416(%eax,%ebp,1),%eax
-	xorl	%edx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$7,%eax
-	movl	36(%esi),%ebp
-	addl	%ebx,%eax
-
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	leal	2336552879(%edx,%ebp,1),%edx
-	xorl	%ecx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$12,%edx
-	movl	40(%esi),%ebp
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	leal	4294925233(%ecx,%ebp,1),%ecx
-	xorl	%ebx,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$17,%ecx
-	movl	44(%esi),%ebp
-	addl	%edx,%ecx
-
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	leal	2304563134(%ebx,%ebp,1),%ebx
-	xorl	%eax,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$22,%ebx
-	movl	48(%esi),%ebp
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	leal	1804603682(%eax,%ebp,1),%eax
-	xorl	%edx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$7,%eax
-	movl	52(%esi),%ebp
-	addl	%ebx,%eax
-
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	leal	4254626195(%edx,%ebp,1),%edx
-	xorl	%ecx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$12,%edx
-	movl	56(%esi),%ebp
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	leal	2792965006(%ecx,%ebp,1),%ecx
-	xorl	%ebx,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$17,%ecx
-	movl	60(%esi),%ebp
-	addl	%edx,%ecx
-
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	leal	1236535329(%ebx,%ebp,1),%ebx
-	xorl	%eax,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$22,%ebx
-	movl	4(%esi),%ebp
-	addl	%ecx,%ebx
-
-
-
-	leal	4129170786(%eax,%ebp,1),%eax
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	movl	24(%esi),%ebp
-	xorl	%ecx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$5,%eax
-	addl	%ebx,%eax
-
-	leal	3225465664(%edx,%ebp,1),%edx
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	movl	44(%esi),%ebp
-	xorl	%ebx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$9,%edx
-	addl	%eax,%edx
-
-	leal	643717713(%ecx,%ebp,1),%ecx
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	movl	(%esi),%ebp
-	xorl	%eax,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$14,%ecx
-	addl	%edx,%ecx
-
-	leal	3921069994(%ebx,%ebp,1),%ebx
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	movl	20(%esi),%ebp
-	xorl	%edx,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$20,%ebx
-	addl	%ecx,%ebx
-
-	leal	3593408605(%eax,%ebp,1),%eax
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	movl	40(%esi),%ebp
-	xorl	%ecx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$5,%eax
-	addl	%ebx,%eax
-
-	leal	38016083(%edx,%ebp,1),%edx
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	movl	60(%esi),%ebp
-	xorl	%ebx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$9,%edx
-	addl	%eax,%edx
-
-	leal	3634488961(%ecx,%ebp,1),%ecx
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	movl	16(%esi),%ebp
-	xorl	%eax,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$14,%ecx
-	addl	%edx,%ecx
-
-	leal	3889429448(%ebx,%ebp,1),%ebx
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	movl	36(%esi),%ebp
-	xorl	%edx,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$20,%ebx
-	addl	%ecx,%ebx
-
-	leal	568446438(%eax,%ebp,1),%eax
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	movl	56(%esi),%ebp
-	xorl	%ecx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$5,%eax
-	addl	%ebx,%eax
-
-	leal	3275163606(%edx,%ebp,1),%edx
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	movl	12(%esi),%ebp
-	xorl	%ebx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$9,%edx
-	addl	%eax,%edx
-
-	leal	4107603335(%ecx,%ebp,1),%ecx
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	movl	32(%esi),%ebp
-	xorl	%eax,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$14,%ecx
-	addl	%edx,%ecx
-
-	leal	1163531501(%ebx,%ebp,1),%ebx
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	movl	52(%esi),%ebp
-	xorl	%edx,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$20,%ebx
-	addl	%ecx,%ebx
-
-	leal	2850285829(%eax,%ebp,1),%eax
-	xorl	%ebx,%edi
-	andl	%edx,%edi
-	movl	8(%esi),%ebp
-	xorl	%ecx,%edi
-	addl	%edi,%eax
-	movl	%ebx,%edi
-	roll	$5,%eax
-	addl	%ebx,%eax
-
-	leal	4243563512(%edx,%ebp,1),%edx
-	xorl	%eax,%edi
-	andl	%ecx,%edi
-	movl	28(%esi),%ebp
-	xorl	%ebx,%edi
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$9,%edx
-	addl	%eax,%edx
-
-	leal	1735328473(%ecx,%ebp,1),%ecx
-	xorl	%edx,%edi
-	andl	%ebx,%edi
-	movl	48(%esi),%ebp
-	xorl	%eax,%edi
-	addl	%edi,%ecx
-	movl	%edx,%edi
-	roll	$14,%ecx
-	addl	%edx,%ecx
-
-	leal	2368359562(%ebx,%ebp,1),%ebx
-	xorl	%ecx,%edi
-	andl	%eax,%edi
-	movl	20(%esi),%ebp
-	xorl	%edx,%edi
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$20,%ebx
-	addl	%ecx,%ebx
-
-
-
-	xorl	%edx,%edi
-	xorl	%ebx,%edi
-	leal	4294588738(%eax,%ebp,1),%eax
-	addl	%edi,%eax
-	roll	$4,%eax
-	movl	32(%esi),%ebp
-	movl	%ebx,%edi
-
-	leal	2272392833(%edx,%ebp,1),%edx
-	addl	%ebx,%eax
-	xorl	%ecx,%edi
-	xorl	%eax,%edi
-	movl	44(%esi),%ebp
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$11,%edx
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	xorl	%edx,%edi
-	leal	1839030562(%ecx,%ebp,1),%ecx
-	addl	%edi,%ecx
-	roll	$16,%ecx
-	movl	56(%esi),%ebp
-	movl	%edx,%edi
-
-	leal	4259657740(%ebx,%ebp,1),%ebx
-	addl	%edx,%ecx
-	xorl	%eax,%edi
-	xorl	%ecx,%edi
-	movl	4(%esi),%ebp
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$23,%ebx
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	xorl	%ebx,%edi
-	leal	2763975236(%eax,%ebp,1),%eax
-	addl	%edi,%eax
-	roll	$4,%eax
-	movl	16(%esi),%ebp
-	movl	%ebx,%edi
-
-	leal	1272893353(%edx,%ebp,1),%edx
-	addl	%ebx,%eax
-	xorl	%ecx,%edi
-	xorl	%eax,%edi
-	movl	28(%esi),%ebp
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$11,%edx
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	xorl	%edx,%edi
-	leal	4139469664(%ecx,%ebp,1),%ecx
-	addl	%edi,%ecx
-	roll	$16,%ecx
-	movl	40(%esi),%ebp
-	movl	%edx,%edi
-
-	leal	3200236656(%ebx,%ebp,1),%ebx
-	addl	%edx,%ecx
-	xorl	%eax,%edi
-	xorl	%ecx,%edi
-	movl	52(%esi),%ebp
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$23,%ebx
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	xorl	%ebx,%edi
-	leal	681279174(%eax,%ebp,1),%eax
-	addl	%edi,%eax
-	roll	$4,%eax
-	movl	(%esi),%ebp
-	movl	%ebx,%edi
-
-	leal	3936430074(%edx,%ebp,1),%edx
-	addl	%ebx,%eax
-	xorl	%ecx,%edi
-	xorl	%eax,%edi
-	movl	12(%esi),%ebp
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$11,%edx
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	xorl	%edx,%edi
-	leal	3572445317(%ecx,%ebp,1),%ecx
-	addl	%edi,%ecx
-	roll	$16,%ecx
-	movl	24(%esi),%ebp
-	movl	%edx,%edi
-
-	leal	76029189(%ebx,%ebp,1),%ebx
-	addl	%edx,%ecx
-	xorl	%eax,%edi
-	xorl	%ecx,%edi
-	movl	36(%esi),%ebp
-	addl	%edi,%ebx
-	movl	%ecx,%edi
-	roll	$23,%ebx
-	addl	%ecx,%ebx
-
-	xorl	%edx,%edi
-	xorl	%ebx,%edi
-	leal	3654602809(%eax,%ebp,1),%eax
-	addl	%edi,%eax
-	roll	$4,%eax
-	movl	48(%esi),%ebp
-	movl	%ebx,%edi
-
-	leal	3873151461(%edx,%ebp,1),%edx
-	addl	%ebx,%eax
-	xorl	%ecx,%edi
-	xorl	%eax,%edi
-	movl	60(%esi),%ebp
-	addl	%edi,%edx
-	movl	%eax,%edi
-	roll	$11,%edx
-	addl	%eax,%edx
-
-	xorl	%ebx,%edi
-	xorl	%edx,%edi
-	leal	530742520(%ecx,%ebp,1),%ecx
-	addl	%edi,%ecx
-	roll	$16,%ecx
-	movl	8(%esi),%ebp
-	movl	%edx,%edi
-
-	leal	3299628645(%ebx,%ebp,1),%ebx
-	addl	%edx,%ecx
-	xorl	%eax,%edi
-	xorl	%ecx,%edi
-	movl	(%esi),%ebp
-	addl	%edi,%ebx
-	movl	$-1,%edi
-	roll	$23,%ebx
-	addl	%ecx,%ebx
-
-
-
-	xorl	%edx,%edi
-	orl	%ebx,%edi
-	leal	4096336452(%eax,%ebp,1),%eax
-	xorl	%ecx,%edi
-	movl	28(%esi),%ebp
-	addl	%edi,%eax
-	movl	$-1,%edi
-	roll	$6,%eax
-	xorl	%ecx,%edi
-	addl	%ebx,%eax
-
-	orl	%eax,%edi
-	leal	1126891415(%edx,%ebp,1),%edx
-	xorl	%ebx,%edi
-	movl	56(%esi),%ebp
-	addl	%edi,%edx
-	movl	$-1,%edi
-	roll	$10,%edx
-	xorl	%ebx,%edi
-	addl	%eax,%edx
-
-	orl	%edx,%edi
-	leal	2878612391(%ecx,%ebp,1),%ecx
-	xorl	%eax,%edi
-	movl	20(%esi),%ebp
-	addl	%edi,%ecx
-	movl	$-1,%edi
-	roll	$15,%ecx
-	xorl	%eax,%edi
-	addl	%edx,%ecx
-
-	orl	%ecx,%edi
-	leal	4237533241(%ebx,%ebp,1),%ebx
-	xorl	%edx,%edi
-	movl	48(%esi),%ebp
-	addl	%edi,%ebx
-	movl	$-1,%edi
-	roll	$21,%ebx
-	xorl	%edx,%edi
-	addl	%ecx,%ebx
-
-	orl	%ebx,%edi
-	leal	1700485571(%eax,%ebp,1),%eax
-	xorl	%ecx,%edi
-	movl	12(%esi),%ebp
-	addl	%edi,%eax
-	movl	$-1,%edi
-	roll	$6,%eax
-	xorl	%ecx,%edi
-	addl	%ebx,%eax
-
-	orl	%eax,%edi
-	leal	2399980690(%edx,%ebp,1),%edx
-	xorl	%ebx,%edi
-	movl	40(%esi),%ebp
-	addl	%edi,%edx
-	movl	$-1,%edi
-	roll	$10,%edx
-	xorl	%ebx,%edi
-	addl	%eax,%edx
-
-	orl	%edx,%edi
-	leal	4293915773(%ecx,%ebp,1),%ecx
-	xorl	%eax,%edi
-	movl	4(%esi),%ebp
-	addl	%edi,%ecx
-	movl	$-1,%edi
-	roll	$15,%ecx
-	xorl	%eax,%edi
-	addl	%edx,%ecx
-
-	orl	%ecx,%edi
-	leal	2240044497(%ebx,%ebp,1),%ebx
-	xorl	%edx,%edi
-	movl	32(%esi),%ebp
-	addl	%edi,%ebx
-	movl	$-1,%edi
-	roll	$21,%ebx
-	xorl	%edx,%edi
-	addl	%ecx,%ebx
-
-	orl	%ebx,%edi
-	leal	1873313359(%eax,%ebp,1),%eax
-	xorl	%ecx,%edi
-	movl	60(%esi),%ebp
-	addl	%edi,%eax
-	movl	$-1,%edi
-	roll	$6,%eax
-	xorl	%ecx,%edi
-	addl	%ebx,%eax
-
-	orl	%eax,%edi
-	leal	4264355552(%edx,%ebp,1),%edx
-	xorl	%ebx,%edi
-	movl	24(%esi),%ebp
-	addl	%edi,%edx
-	movl	$-1,%edi
-	roll	$10,%edx
-	xorl	%ebx,%edi
-	addl	%eax,%edx
-
-	orl	%edx,%edi
-	leal	2734768916(%ecx,%ebp,1),%ecx
-	xorl	%eax,%edi
-	movl	52(%esi),%ebp
-	addl	%edi,%ecx
-	movl	$-1,%edi
-	roll	$15,%ecx
-	xorl	%eax,%edi
-	addl	%edx,%ecx
-
-	orl	%ecx,%edi
-	leal	1309151649(%ebx,%ebp,1),%ebx
-	xorl	%edx,%edi
-	movl	16(%esi),%ebp
-	addl	%edi,%ebx
-	movl	$-1,%edi
-	roll	$21,%ebx
-	xorl	%edx,%edi
-	addl	%ecx,%ebx
-
-	orl	%ebx,%edi
-	leal	4149444226(%eax,%ebp,1),%eax
-	xorl	%ecx,%edi
-	movl	44(%esi),%ebp
-	addl	%edi,%eax
-	movl	$-1,%edi
-	roll	$6,%eax
-	xorl	%ecx,%edi
-	addl	%ebx,%eax
-
-	orl	%eax,%edi
-	leal	3174756917(%edx,%ebp,1),%edx
-	xorl	%ebx,%edi
-	movl	8(%esi),%ebp
-	addl	%edi,%edx
-	movl	$-1,%edi
-	roll	$10,%edx
-	xorl	%ebx,%edi
-	addl	%eax,%edx
-
-	orl	%edx,%edi
-	leal	718787259(%ecx,%ebp,1),%ecx
-	xorl	%eax,%edi
-	movl	36(%esi),%ebp
-	addl	%edi,%ecx
-	movl	$-1,%edi
-	roll	$15,%ecx
-	xorl	%eax,%edi
-	addl	%edx,%ecx
-
-	orl	%ecx,%edi
-	leal	3951481745(%ebx,%ebp,1),%ebx
-	xorl	%edx,%edi
-	movl	24(%esp),%ebp
-	addl	%edi,%ebx
-	addl	$64,%esi
-	roll	$21,%ebx
-	movl	(%ebp),%edi
-	addl	%ecx,%ebx
-	addl	%edi,%eax
-	movl	4(%ebp),%edi
-	addl	%edi,%ebx
-	movl	8(%ebp),%edi
-	addl	%edi,%ecx
-	movl	12(%ebp),%edi
-	addl	%edi,%edx
-	movl	%eax,(%ebp)
-	movl	%ebx,4(%ebp)
-	movl	(%esp),%edi
-	movl	%ecx,8(%ebp)
-	movl	%edx,12(%ebp)
-	cmpl	%esi,%edi
-	jae	.L000start
-	popl	%eax
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin

Added: trunk/secure/lib/libcrypto/i386/rc4-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/rc4-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/rc4-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,762 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rc4-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from rc4-586.pl.
+#ifdef PIC
+.file	"rc4-586.S"
+.text
+.globl	RC4
+.type	RC4, at function
+.align	16
+RC4:
+.L_RC4_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%edx
+	movl	28(%esp),%esi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	cmpl	$0,%edx
+	je	.L000abort
+	movb	(%edi),%al
+	movb	4(%edi),%bl
+	addl	$8,%edi
+	leal	(%esi,%edx,1),%ecx
+	subl	%esi,%ebp
+	movl	%ecx,24(%esp)
+	incb	%al
+	cmpl	$-1,256(%edi)
+	je	.L001RC4_CHAR
+	movl	(%edi,%eax,4),%ecx
+	andl	$-4,%edx
+	jz	.L002loop1
+	testl	$-8,%edx
+	movl	%ebp,32(%esp)
+	jz	.L003go4loop4
+	call	.L004PIC_me_up
+.L004PIC_me_up:
+	popl	%ebp
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L004PIC_me_up](%ebp),%ebp
+	movl	OPENSSL_ia32cap_P at GOT(%ebp),%ebp
+	btl	$26,(%ebp)
+	jnc	.L003go4loop4
+	movl	32(%esp),%ebp
+	andl	$-8,%edx
+	leal	-8(%esi,%edx,1),%edx
+	movl	%edx,-4(%edi)
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	movq	(%esi),%mm0
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm2
+	jmp	.L005loop_mmx_enter
+.align	16
+.L006loop_mmx:
+	addb	%cl,%bl
+	psllq	$56,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movq	(%esi),%mm0
+	movq	%mm2,-8(%ebp,%esi,1)
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm2
+.L005loop_mmx_enter:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm0,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$8,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$16,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$24,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$32,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$40,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$48,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	movl	%ebx,%edx
+	xorl	%ebx,%ebx
+	movb	%dl,%bl
+	cmpl	-4(%edi),%esi
+	leal	8(%esi),%esi
+	jb	.L006loop_mmx
+	psllq	$56,%mm1
+	pxor	%mm1,%mm2
+	movq	%mm2,-8(%ebp,%esi,1)
+	emms
+	cmpl	24(%esp),%esi
+	je	.L007done
+	jmp	.L002loop1
+.align	16
+.L003go4loop4:
+	leal	-4(%esi,%edx,1),%edx
+	movl	%edx,28(%esp)
+.L008loop4:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	movl	(%edi,%eax,4),%ecx
+	movl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	(%edi,%eax,4),%ecx
+	orl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	(%edi,%eax,4),%ecx
+	orl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	32(%esp),%ecx
+	orl	(%edi,%edx,4),%ebp
+	rorl	$8,%ebp
+	xorl	(%esi),%ebp
+	cmpl	28(%esp),%esi
+	movl	%ebp,(%ecx,%esi,1)
+	leal	4(%esi),%esi
+	movl	(%edi,%eax,4),%ecx
+	jb	.L008loop4
+	cmpl	24(%esp),%esi
+	je	.L007done
+	movl	32(%esp),%ebp
+.align	16
+.L002loop1:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	movl	(%edi,%edx,4),%edx
+	xorb	(%esi),%dl
+	leal	1(%esi),%esi
+	movl	(%edi,%eax,4),%ecx
+	cmpl	24(%esp),%esi
+	movb	%dl,-1(%ebp,%esi,1)
+	jb	.L002loop1
+	jmp	.L007done
+.align	16
+.L001RC4_CHAR:
+	movzbl	(%edi,%eax,1),%ecx
+.L009cloop1:
+	addb	%cl,%bl
+	movzbl	(%edi,%ebx,1),%edx
+	movb	%cl,(%edi,%ebx,1)
+	movb	%dl,(%edi,%eax,1)
+	addb	%cl,%dl
+	movzbl	(%edi,%edx,1),%edx
+	addb	$1,%al
+	xorb	(%esi),%dl
+	leal	1(%esi),%esi
+	movzbl	(%edi,%eax,1),%ecx
+	cmpl	24(%esp),%esi
+	movb	%dl,-1(%ebp,%esi,1)
+	jb	.L009cloop1
+.L007done:
+	decb	%al
+	movl	%ebx,-4(%edi)
+	movb	%al,-8(%edi)
+.L000abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	RC4,.-.L_RC4_begin
+.globl	private_RC4_set_key
+.type	private_RC4_set_key, at function
+.align	16
+private_RC4_set_key:
+.L_private_RC4_set_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%esi
+	call	.L010PIC_me_up
+.L010PIC_me_up:
+	popl	%edx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L010PIC_me_up](%edx),%edx
+	movl	OPENSSL_ia32cap_P at GOT(%edx),%edx
+	leal	8(%edi),%edi
+	leal	(%esi,%ebp,1),%esi
+	negl	%ebp
+	xorl	%eax,%eax
+	movl	%ebp,-4(%edi)
+	btl	$20,(%edx)
+	jc	.L011c1stloop
+.align	16
+.L012w1stloop:
+	movl	%eax,(%edi,%eax,4)
+	addb	$1,%al
+	jnc	.L012w1stloop
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+.align	16
+.L013w2ndloop:
+	movl	(%edi,%ecx,4),%eax
+	addb	(%esi,%ebp,1),%dl
+	addb	%al,%dl
+	addl	$1,%ebp
+	movl	(%edi,%edx,4),%ebx
+	jnz	.L014wnowrap
+	movl	-4(%edi),%ebp
+.L014wnowrap:
+	movl	%eax,(%edi,%edx,4)
+	movl	%ebx,(%edi,%ecx,4)
+	addb	$1,%cl
+	jnc	.L013w2ndloop
+	jmp	.L015exit
+.align	16
+.L011c1stloop:
+	movb	%al,(%edi,%eax,1)
+	addb	$1,%al
+	jnc	.L011c1stloop
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	xorl	%ebx,%ebx
+.align	16
+.L016c2ndloop:
+	movb	(%edi,%ecx,1),%al
+	addb	(%esi,%ebp,1),%dl
+	addb	%al,%dl
+	addl	$1,%ebp
+	movb	(%edi,%edx,1),%bl
+	jnz	.L017cnowrap
+	movl	-4(%edi),%ebp
+.L017cnowrap:
+	movb	%al,(%edi,%edx,1)
+	movb	%bl,(%edi,%ecx,1)
+	addb	$1,%cl
+	jnc	.L016c2ndloop
+	movl	$-1,256(%edi)
+.L015exit:
+	xorl	%eax,%eax
+	movl	%eax,-8(%edi)
+	movl	%eax,-4(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	private_RC4_set_key,.-.L_private_RC4_set_key_begin
+.globl	RC4_options
+.type	RC4_options, at function
+.align	16
+RC4_options:
+.L_RC4_options_begin:
+	call	.L018pic_point
+.L018pic_point:
+	popl	%eax
+	leal	.L019opts-.L018pic_point(%eax),%eax
+	call	.L020PIC_me_up
+.L020PIC_me_up:
+	popl	%edx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L020PIC_me_up](%edx),%edx
+	movl	OPENSSL_ia32cap_P at GOT(%edx),%edx
+	movl	(%edx),%edx
+	btl	$20,%edx
+	jc	.L0211xchar
+	btl	$26,%edx
+	jnc	.L022ret
+	addl	$25,%eax
+	ret
+.L0211xchar:
+	addl	$12,%eax
+.L022ret:
+	ret
+.align	64
+.L019opts:
+.byte	114,99,52,40,52,120,44,105,110,116,41,0
+.byte	114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte	114,99,52,40,56,120,44,109,109,120,41,0
+.byte	82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align	64
+.size	RC4_options,.-.L_RC4_options_begin
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"rc4-586.S"
+.text
+.globl	RC4
+.type	RC4, at function
+.align	16
+RC4:
+.L_RC4_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%edx
+	movl	28(%esp),%esi
+	movl	32(%esp),%ebp
+	xorl	%eax,%eax
+	xorl	%ebx,%ebx
+	cmpl	$0,%edx
+	je	.L000abort
+	movb	(%edi),%al
+	movb	4(%edi),%bl
+	addl	$8,%edi
+	leal	(%esi,%edx,1),%ecx
+	subl	%esi,%ebp
+	movl	%ecx,24(%esp)
+	incb	%al
+	cmpl	$-1,256(%edi)
+	je	.L001RC4_CHAR
+	movl	(%edi,%eax,4),%ecx
+	andl	$-4,%edx
+	jz	.L002loop1
+	testl	$-8,%edx
+	movl	%ebp,32(%esp)
+	jz	.L003go4loop4
+	leal	OPENSSL_ia32cap_P,%ebp
+	btl	$26,(%ebp)
+	jnc	.L003go4loop4
+	movl	32(%esp),%ebp
+	andl	$-8,%edx
+	leal	-8(%esi,%edx,1),%edx
+	movl	%edx,-4(%edi)
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	movq	(%esi),%mm0
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm2
+	jmp	.L004loop_mmx_enter
+.align	16
+.L005loop_mmx:
+	addb	%cl,%bl
+	psllq	$56,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movq	(%esi),%mm0
+	movq	%mm2,-8(%ebp,%esi,1)
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm2
+.L004loop_mmx_enter:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm0,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$8,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$16,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$24,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$32,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$40,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	addb	%cl,%bl
+	psllq	$48,%mm1
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	incl	%eax
+	addl	%ecx,%edx
+	movzbl	%al,%eax
+	movzbl	%dl,%edx
+	pxor	%mm1,%mm2
+	movl	(%edi,%eax,4),%ecx
+	movd	(%edi,%edx,4),%mm1
+	movl	%ebx,%edx
+	xorl	%ebx,%ebx
+	movb	%dl,%bl
+	cmpl	-4(%edi),%esi
+	leal	8(%esi),%esi
+	jb	.L005loop_mmx
+	psllq	$56,%mm1
+	pxor	%mm1,%mm2
+	movq	%mm2,-8(%ebp,%esi,1)
+	emms
+	cmpl	24(%esp),%esi
+	je	.L006done
+	jmp	.L002loop1
+.align	16
+.L003go4loop4:
+	leal	-4(%esi,%edx,1),%edx
+	movl	%edx,28(%esp)
+.L007loop4:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	movl	(%edi,%eax,4),%ecx
+	movl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	(%edi,%eax,4),%ecx
+	orl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	(%edi,%eax,4),%ecx
+	orl	(%edi,%edx,4),%ebp
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	rorl	$8,%ebp
+	movl	32(%esp),%ecx
+	orl	(%edi,%edx,4),%ebp
+	rorl	$8,%ebp
+	xorl	(%esi),%ebp
+	cmpl	28(%esp),%esi
+	movl	%ebp,(%ecx,%esi,1)
+	leal	4(%esi),%esi
+	movl	(%edi,%eax,4),%ecx
+	jb	.L007loop4
+	cmpl	24(%esp),%esi
+	je	.L006done
+	movl	32(%esp),%ebp
+.align	16
+.L002loop1:
+	addb	%cl,%bl
+	movl	(%edi,%ebx,4),%edx
+	movl	%ecx,(%edi,%ebx,4)
+	movl	%edx,(%edi,%eax,4)
+	addl	%ecx,%edx
+	incb	%al
+	andl	$255,%edx
+	movl	(%edi,%edx,4),%edx
+	xorb	(%esi),%dl
+	leal	1(%esi),%esi
+	movl	(%edi,%eax,4),%ecx
+	cmpl	24(%esp),%esi
+	movb	%dl,-1(%ebp,%esi,1)
+	jb	.L002loop1
+	jmp	.L006done
+.align	16
+.L001RC4_CHAR:
+	movzbl	(%edi,%eax,1),%ecx
+.L008cloop1:
+	addb	%cl,%bl
+	movzbl	(%edi,%ebx,1),%edx
+	movb	%cl,(%edi,%ebx,1)
+	movb	%dl,(%edi,%eax,1)
+	addb	%cl,%dl
+	movzbl	(%edi,%edx,1),%edx
+	addb	$1,%al
+	xorb	(%esi),%dl
+	leal	1(%esi),%esi
+	movzbl	(%edi,%eax,1),%ecx
+	cmpl	24(%esp),%esi
+	movb	%dl,-1(%ebp,%esi,1)
+	jb	.L008cloop1
+.L006done:
+	decb	%al
+	movl	%ebx,-4(%edi)
+	movb	%al,-8(%edi)
+.L000abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	RC4,.-.L_RC4_begin
+.globl	private_RC4_set_key
+.type	private_RC4_set_key, at function
+.align	16
+private_RC4_set_key:
+.L_private_RC4_set_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%esi
+	leal	OPENSSL_ia32cap_P,%edx
+	leal	8(%edi),%edi
+	leal	(%esi,%ebp,1),%esi
+	negl	%ebp
+	xorl	%eax,%eax
+	movl	%ebp,-4(%edi)
+	btl	$20,(%edx)
+	jc	.L009c1stloop
+.align	16
+.L010w1stloop:
+	movl	%eax,(%edi,%eax,4)
+	addb	$1,%al
+	jnc	.L010w1stloop
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+.align	16
+.L011w2ndloop:
+	movl	(%edi,%ecx,4),%eax
+	addb	(%esi,%ebp,1),%dl
+	addb	%al,%dl
+	addl	$1,%ebp
+	movl	(%edi,%edx,4),%ebx
+	jnz	.L012wnowrap
+	movl	-4(%edi),%ebp
+.L012wnowrap:
+	movl	%eax,(%edi,%edx,4)
+	movl	%ebx,(%edi,%ecx,4)
+	addb	$1,%cl
+	jnc	.L011w2ndloop
+	jmp	.L013exit
+.align	16
+.L009c1stloop:
+	movb	%al,(%edi,%eax,1)
+	addb	$1,%al
+	jnc	.L009c1stloop
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	xorl	%ebx,%ebx
+.align	16
+.L014c2ndloop:
+	movb	(%edi,%ecx,1),%al
+	addb	(%esi,%ebp,1),%dl
+	addb	%al,%dl
+	addl	$1,%ebp
+	movb	(%edi,%edx,1),%bl
+	jnz	.L015cnowrap
+	movl	-4(%edi),%ebp
+.L015cnowrap:
+	movb	%al,(%edi,%edx,1)
+	movb	%bl,(%edi,%ecx,1)
+	addb	$1,%cl
+	jnc	.L014c2ndloop
+	movl	$-1,256(%edi)
+.L013exit:
+	xorl	%eax,%eax
+	movl	%eax,-8(%edi)
+	movl	%eax,-4(%edi)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	private_RC4_set_key,.-.L_private_RC4_set_key_begin
+.globl	RC4_options
+.type	RC4_options, at function
+.align	16
+RC4_options:
+.L_RC4_options_begin:
+	call	.L016pic_point
+.L016pic_point:
+	popl	%eax
+	leal	.L017opts-.L016pic_point(%eax),%eax
+	leal	OPENSSL_ia32cap_P,%edx
+	movl	(%edx),%edx
+	btl	$20,%edx
+	jc	.L0181xchar
+	btl	$26,%edx
+	jnc	.L019ret
+	addl	$25,%eax
+	ret
+.L0181xchar:
+	addl	$12,%eax
+.L019ret:
+	ret
+.align	64
+.L017opts:
+.byte	114,99,52,40,52,120,44,105,110,116,41,0
+.byte	114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte	114,99,52,40,56,120,44,109,109,120,41,0
+.byte	82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align	64
+.size	RC4_options,.-.L_RC4_options_begin
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/rc4-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/rc4-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/rc4-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/rc4-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,373 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rc4-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"rc4-586.s"
-.text
-.globl	RC4
-.type	RC4, at function
-.align	16
-RC4:
-.L_RC4_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%edi
-	movl	24(%esp),%edx
-	movl	28(%esp),%esi
-	movl	32(%esp),%ebp
-	xorl	%eax,%eax
-	xorl	%ebx,%ebx
-	cmpl	$0,%edx
-	je	.L000abort
-	movb	(%edi),%al
-	movb	4(%edi),%bl
-	addl	$8,%edi
-	leal	(%esi,%edx,1),%ecx
-	subl	%esi,%ebp
-	movl	%ecx,24(%esp)
-	incb	%al
-	cmpl	$-1,256(%edi)
-	je	.L001RC4_CHAR
-	movl	(%edi,%eax,4),%ecx
-	andl	$-4,%edx
-	jz	.L002loop1
-	testl	$-8,%edx
-	movl	%ebp,32(%esp)
-	jz	.L003go4loop4
-	leal	OPENSSL_ia32cap_P,%ebp
-	btl	$26,(%ebp)
-	jnc	.L003go4loop4
-	movl	32(%esp),%ebp
-	andl	$-8,%edx
-	leal	-8(%esi,%edx,1),%edx
-	movl	%edx,-4(%edi)
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	movq	(%esi),%mm0
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm2
-	jmp	.L004loop_mmx_enter
-.align	16
-.L005loop_mmx:
-	addb	%cl,%bl
-	psllq	$56,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movq	(%esi),%mm0
-	movq	%mm2,-8(%ebp,%esi,1)
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm2
-.L004loop_mmx_enter:
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm0,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$8,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$16,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$24,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$32,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$40,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	addb	%cl,%bl
-	psllq	$48,%mm1
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	incl	%eax
-	addl	%ecx,%edx
-	movzbl	%al,%eax
-	movzbl	%dl,%edx
-	pxor	%mm1,%mm2
-	movl	(%edi,%eax,4),%ecx
-	movd	(%edi,%edx,4),%mm1
-	movl	%ebx,%edx
-	xorl	%ebx,%ebx
-	movb	%dl,%bl
-	cmpl	-4(%edi),%esi
-	leal	8(%esi),%esi
-	jb	.L005loop_mmx
-	psllq	$56,%mm1
-	pxor	%mm1,%mm2
-	movq	%mm2,-8(%ebp,%esi,1)
-	emms
-	cmpl	24(%esp),%esi
-	je	.L006done
-	jmp	.L002loop1
-.align	16
-.L003go4loop4:
-	leal	-4(%esi,%edx,1),%edx
-	movl	%edx,28(%esp)
-.L007loop4:
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	addl	%ecx,%edx
-	incb	%al
-	andl	$255,%edx
-	movl	(%edi,%eax,4),%ecx
-	movl	(%edi,%edx,4),%ebp
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	addl	%ecx,%edx
-	incb	%al
-	andl	$255,%edx
-	rorl	$8,%ebp
-	movl	(%edi,%eax,4),%ecx
-	orl	(%edi,%edx,4),%ebp
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	addl	%ecx,%edx
-	incb	%al
-	andl	$255,%edx
-	rorl	$8,%ebp
-	movl	(%edi,%eax,4),%ecx
-	orl	(%edi,%edx,4),%ebp
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	addl	%ecx,%edx
-	incb	%al
-	andl	$255,%edx
-	rorl	$8,%ebp
-	movl	32(%esp),%ecx
-	orl	(%edi,%edx,4),%ebp
-	rorl	$8,%ebp
-	xorl	(%esi),%ebp
-	cmpl	28(%esp),%esi
-	movl	%ebp,(%ecx,%esi,1)
-	leal	4(%esi),%esi
-	movl	(%edi,%eax,4),%ecx
-	jb	.L007loop4
-	cmpl	24(%esp),%esi
-	je	.L006done
-	movl	32(%esp),%ebp
-.align	16
-.L002loop1:
-	addb	%cl,%bl
-	movl	(%edi,%ebx,4),%edx
-	movl	%ecx,(%edi,%ebx,4)
-	movl	%edx,(%edi,%eax,4)
-	addl	%ecx,%edx
-	incb	%al
-	andl	$255,%edx
-	movl	(%edi,%edx,4),%edx
-	xorb	(%esi),%dl
-	leal	1(%esi),%esi
-	movl	(%edi,%eax,4),%ecx
-	cmpl	24(%esp),%esi
-	movb	%dl,-1(%ebp,%esi,1)
-	jb	.L002loop1
-	jmp	.L006done
-.align	16
-.L001RC4_CHAR:
-	movzbl	(%edi,%eax,1),%ecx
-.L008cloop1:
-	addb	%cl,%bl
-	movzbl	(%edi,%ebx,1),%edx
-	movb	%cl,(%edi,%ebx,1)
-	movb	%dl,(%edi,%eax,1)
-	addb	%cl,%dl
-	movzbl	(%edi,%edx,1),%edx
-	addb	$1,%al
-	xorb	(%esi),%dl
-	leal	1(%esi),%esi
-	movzbl	(%edi,%eax,1),%ecx
-	cmpl	24(%esp),%esi
-	movb	%dl,-1(%ebp,%esi,1)
-	jb	.L008cloop1
-.L006done:
-	decb	%al
-	movl	%ebx,-4(%edi)
-	movb	%al,-8(%edi)
-.L000abort:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	RC4,.-.L_RC4_begin
-.globl	private_RC4_set_key
-.type	private_RC4_set_key, at function
-.align	16
-private_RC4_set_key:
-.L_private_RC4_set_key_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%edi
-	movl	24(%esp),%ebp
-	movl	28(%esp),%esi
-	leal	OPENSSL_ia32cap_P,%edx
-	leal	8(%edi),%edi
-	leal	(%esi,%ebp,1),%esi
-	negl	%ebp
-	xorl	%eax,%eax
-	movl	%ebp,-4(%edi)
-	btl	$20,(%edx)
-	jc	.L009c1stloop
-.align	16
-.L010w1stloop:
-	movl	%eax,(%edi,%eax,4)
-	addb	$1,%al
-	jnc	.L010w1stloop
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-.align	16
-.L011w2ndloop:
-	movl	(%edi,%ecx,4),%eax
-	addb	(%esi,%ebp,1),%dl
-	addb	%al,%dl
-	addl	$1,%ebp
-	movl	(%edi,%edx,4),%ebx
-	jnz	.L012wnowrap
-	movl	-4(%edi),%ebp
-.L012wnowrap:
-	movl	%eax,(%edi,%edx,4)
-	movl	%ebx,(%edi,%ecx,4)
-	addb	$1,%cl
-	jnc	.L011w2ndloop
-	jmp	.L013exit
-.align	16
-.L009c1stloop:
-	movb	%al,(%edi,%eax,1)
-	addb	$1,%al
-	jnc	.L009c1stloop
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	xorl	%ebx,%ebx
-.align	16
-.L014c2ndloop:
-	movb	(%edi,%ecx,1),%al
-	addb	(%esi,%ebp,1),%dl
-	addb	%al,%dl
-	addl	$1,%ebp
-	movb	(%edi,%edx,1),%bl
-	jnz	.L015cnowrap
-	movl	-4(%edi),%ebp
-.L015cnowrap:
-	movb	%al,(%edi,%edx,1)
-	movb	%bl,(%edi,%ecx,1)
-	addb	$1,%cl
-	jnc	.L014c2ndloop
-	movl	$-1,256(%edi)
-.L013exit:
-	xorl	%eax,%eax
-	movl	%eax,-8(%edi)
-	movl	%eax,-4(%edi)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	private_RC4_set_key,.-.L_private_RC4_set_key_begin
-.globl	RC4_options
-.type	RC4_options, at function
-.align	16
-RC4_options:
-.L_RC4_options_begin:
-	call	.L016pic_point
-.L016pic_point:
-	popl	%eax
-	leal	.L017opts-.L016pic_point(%eax),%eax
-	leal	OPENSSL_ia32cap_P,%edx
-	movl	(%edx),%edx
-	btl	$20,%edx
-	jc	.L0181xchar
-	btl	$26,%edx
-	jnc	.L019ret
-	addl	$25,%eax
-	ret
-.L0181xchar:
-	addl	$12,%eax
-.L019ret:
-	ret
-.align	64
-.L017opts:
-.byte	114,99,52,40,52,120,44,105,110,116,41,0
-.byte	114,99,52,40,49,120,44,99,104,97,114,41,0
-.byte	114,99,52,40,56,120,44,109,109,120,41,0
-.byte	82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
-.byte	80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
-.byte	111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.align	64
-.size	RC4_options,.-.L_RC4_options_begin
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/rc5-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/rc5-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/rc5-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1134 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rc5-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from rc5-586.pl.
+#ifdef PIC
+.file	"rc5-586.S"
+.text
+.globl	RC5_32_encrypt
+.type	RC5_32_encrypt, at function
+.align	16
+RC5_32_encrypt:
+.L_RC5_32_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+	movl	16(%esp),%edx
+	movl	20(%esp),%ebp
+
+	movl	(%edx),%edi
+	movl	4(%edx),%esi
+	pushl	%ebx
+	movl	(%ebp),%ebx
+	addl	4(%ebp),%edi
+	addl	8(%ebp),%esi
+	xorl	%esi,%edi
+	movl	12(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	16(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	20(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	24(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	28(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	32(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	36(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	40(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	44(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	48(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	52(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	56(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	60(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	64(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	68(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	72(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	cmpl	$8,%ebx
+	je	.L000rc5_exit
+	xorl	%esi,%edi
+	movl	76(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	80(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	84(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	88(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	92(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	96(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	100(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	104(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	cmpl	$12,%ebx
+	je	.L000rc5_exit
+	xorl	%esi,%edi
+	movl	108(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	112(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	116(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	120(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	124(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	128(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	132(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	136(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+.L000rc5_exit:
+	movl	%edi,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	ret
+.size	RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
+.globl	RC5_32_decrypt
+.type	RC5_32_decrypt, at function
+.align	16
+RC5_32_decrypt:
+.L_RC5_32_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+	movl	16(%esp),%edx
+	movl	20(%esp),%ebp
+
+	movl	(%edx),%edi
+	movl	4(%edx),%esi
+	pushl	%ebx
+	movl	(%ebp),%ebx
+	cmpl	$12,%ebx
+	je	.L001rc5_dec_12
+	cmpl	$8,%ebx
+	je	.L002rc5_dec_8
+	movl	136(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	132(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	128(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	124(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	120(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	116(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	112(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	108(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+.L001rc5_dec_12:
+	movl	104(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	100(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	96(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	92(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	88(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	84(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	80(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	76(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+.L002rc5_dec_8:
+	movl	72(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	68(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	64(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	60(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	56(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	52(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	48(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	44(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	40(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	36(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	32(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	28(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	24(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	20(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	16(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	12(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	subl	8(%ebp),%esi
+	subl	4(%ebp),%edi
+.L003rc5_exit:
+	movl	%edi,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	ret
+.size	RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
+.globl	RC5_32_cbc_encrypt
+.type	RC5_32_cbc_encrypt, at function
+.align	16
+RC5_32_cbc_encrypt:
+.L_RC5_32_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L004decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L005encrypt_finish
+.L006encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L006encrypt_loop
+.L005encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L007finish
+	call	.L008PIC_point
+.L008PIC_point:
+	popl	%edx
+	leal	.L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L010ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L011ej6:
+	movb	5(%esi),%dh
+.L012ej5:
+	movb	4(%esi),%dl
+.L013ej4:
+	movl	(%esi),%ecx
+	jmp	.L014ejend
+.L015ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L016ej2:
+	movb	1(%esi),%ch
+.L017ej1:
+	movb	(%esi),%cl
+.L014ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L007finish
+.L004decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L018decrypt_finish
+.L019decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L019decrypt_loop
+.L018decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L007finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L020dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L021dj6:
+	movb	%dh,5(%edi)
+.L022dj5:
+	movb	%dl,4(%edi)
+.L023dj4:
+	movl	%ecx,(%edi)
+	jmp	.L024djend
+.L025dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L026dj2:
+	movb	%ch,1(%esi)
+.L027dj1:
+	movb	%cl,(%esi)
+.L024djend:
+	jmp	.L007finish
+.L007finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L009cbc_enc_jmp_table:
+.long	0
+.long	.L017ej1-.L008PIC_point
+.long	.L016ej2-.L008PIC_point
+.long	.L015ej3-.L008PIC_point
+.long	.L013ej4-.L008PIC_point
+.long	.L012ej5-.L008PIC_point
+.long	.L011ej6-.L008PIC_point
+.long	.L010ej7-.L008PIC_point
+.align	64
+.size	RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin
+#else
+.file	"rc5-586.S"
+.text
+.globl	RC5_32_encrypt
+.type	RC5_32_encrypt, at function
+.align	16
+RC5_32_encrypt:
+.L_RC5_32_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+	movl	16(%esp),%edx
+	movl	20(%esp),%ebp
+
+	movl	(%edx),%edi
+	movl	4(%edx),%esi
+	pushl	%ebx
+	movl	(%ebp),%ebx
+	addl	4(%ebp),%edi
+	addl	8(%ebp),%esi
+	xorl	%esi,%edi
+	movl	12(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	16(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	20(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	24(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	28(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	32(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	36(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	40(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	44(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	48(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	52(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	56(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	60(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	64(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	68(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	72(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	cmpl	$8,%ebx
+	je	.L000rc5_exit
+	xorl	%esi,%edi
+	movl	76(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	80(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	84(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	88(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	92(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	96(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	100(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	104(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	cmpl	$12,%ebx
+	je	.L000rc5_exit
+	xorl	%esi,%edi
+	movl	108(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	112(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	116(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	120(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	124(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	128(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+	xorl	%esi,%edi
+	movl	132(%ebp),%eax
+	movl	%esi,%ecx
+	roll	%cl,%edi
+	addl	%eax,%edi
+	xorl	%edi,%esi
+	movl	136(%ebp),%eax
+	movl	%edi,%ecx
+	roll	%cl,%esi
+	addl	%eax,%esi
+.L000rc5_exit:
+	movl	%edi,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	ret
+.size	RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
+.globl	RC5_32_decrypt
+.type	RC5_32_decrypt, at function
+.align	16
+RC5_32_decrypt:
+.L_RC5_32_decrypt_begin:
+
+	pushl	%ebp
+	pushl	%esi
+	pushl	%edi
+	movl	16(%esp),%edx
+	movl	20(%esp),%ebp
+
+	movl	(%edx),%edi
+	movl	4(%edx),%esi
+	pushl	%ebx
+	movl	(%ebp),%ebx
+	cmpl	$12,%ebx
+	je	.L001rc5_dec_12
+	cmpl	$8,%ebx
+	je	.L002rc5_dec_8
+	movl	136(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	132(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	128(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	124(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	120(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	116(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	112(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	108(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+.L001rc5_dec_12:
+	movl	104(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	100(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	96(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	92(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	88(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	84(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	80(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	76(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+.L002rc5_dec_8:
+	movl	72(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	68(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	64(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	60(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	56(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	52(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	48(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	44(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	40(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	36(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	32(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	28(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	24(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	20(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	movl	16(%ebp),%eax
+	subl	%eax,%esi
+	movl	%edi,%ecx
+	rorl	%cl,%esi
+	xorl	%edi,%esi
+	movl	12(%ebp),%eax
+	subl	%eax,%edi
+	movl	%esi,%ecx
+	rorl	%cl,%edi
+	xorl	%esi,%edi
+	subl	8(%ebp),%esi
+	subl	4(%ebp),%edi
+.L003rc5_exit:
+	movl	%edi,(%edx)
+	movl	%esi,4(%edx)
+	popl	%ebx
+	popl	%edi
+	popl	%esi
+	popl	%ebp
+	ret
+.size	RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
+.globl	RC5_32_cbc_encrypt
+.type	RC5_32_cbc_encrypt, at function
+.align	16
+RC5_32_cbc_encrypt:
+.L_RC5_32_cbc_encrypt_begin:
+
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	28(%esp),%ebp
+
+	movl	36(%esp),%ebx
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	pushl	%edi
+	pushl	%esi
+	pushl	%edi
+	pushl	%esi
+	movl	%esp,%ebx
+	movl	36(%esp),%esi
+	movl	40(%esp),%edi
+
+	movl	56(%esp),%ecx
+
+	movl	48(%esp),%eax
+	pushl	%eax
+	pushl	%ebx
+	cmpl	$0,%ecx
+	jz	.L004decrypt
+	andl	$4294967288,%ebp
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	jz	.L005encrypt_finish
+.L006encrypt_loop:
+	movl	(%esi),%ecx
+	movl	4(%esi),%edx
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L006encrypt_loop
+.L005encrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L007finish
+	call	.L008PIC_point
+.L008PIC_point:
+	popl	%edx
+	leal	.L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
+	movl	(%ecx,%ebp,4),%ebp
+	addl	%edx,%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	jmp	*%ebp
+.L010ej7:
+	movb	6(%esi),%dh
+	shll	$8,%edx
+.L011ej6:
+	movb	5(%esi),%dh
+.L012ej5:
+	movb	4(%esi),%dl
+.L013ej4:
+	movl	(%esi),%ecx
+	jmp	.L014ejend
+.L015ej3:
+	movb	2(%esi),%ch
+	shll	$8,%ecx
+.L016ej2:
+	movb	1(%esi),%ch
+.L017ej1:
+	movb	(%esi),%cl
+.L014ejend:
+	xorl	%ecx,%eax
+	xorl	%edx,%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_encrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	%eax,(%edi)
+	movl	%ebx,4(%edi)
+	jmp	.L007finish
+.L004decrypt:
+	andl	$4294967288,%ebp
+	movl	16(%esp),%eax
+	movl	20(%esp),%ebx
+	jz	.L018decrypt_finish
+.L019decrypt_loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%ecx,(%edi)
+	movl	%edx,4(%edi)
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	addl	$8,%esi
+	addl	$8,%edi
+	subl	$8,%ebp
+	jnz	.L019decrypt_loop
+.L018decrypt_finish:
+	movl	52(%esp),%ebp
+	andl	$7,%ebp
+	jz	.L007finish
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	call	.L_RC5_32_decrypt_begin
+	movl	8(%esp),%eax
+	movl	12(%esp),%ebx
+	movl	16(%esp),%ecx
+	movl	20(%esp),%edx
+	xorl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+.L020dj7:
+	rorl	$16,%edx
+	movb	%dl,6(%edi)
+	shrl	$16,%edx
+.L021dj6:
+	movb	%dh,5(%edi)
+.L022dj5:
+	movb	%dl,4(%edi)
+.L023dj4:
+	movl	%ecx,(%edi)
+	jmp	.L024djend
+.L025dj3:
+	rorl	$16,%ecx
+	movb	%cl,2(%edi)
+	shll	$16,%ecx
+.L026dj2:
+	movb	%ch,1(%esi)
+.L027dj1:
+	movb	%cl,(%esi)
+.L024djend:
+	jmp	.L007finish
+.L007finish:
+	movl	60(%esp),%ecx
+	addl	$24,%esp
+	movl	%eax,(%ecx)
+	movl	%ebx,4(%ecx)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L009cbc_enc_jmp_table:
+.long	0
+.long	.L017ej1-.L008PIC_point
+.long	.L016ej2-.L008PIC_point
+.long	.L015ej3-.L008PIC_point
+.long	.L013ej4-.L008PIC_point
+.long	.L012ej5-.L008PIC_point
+.long	.L011ej6-.L008PIC_point
+.long	.L010ej7-.L008PIC_point
+.align	64
+.size	RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/rc5-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/rc5-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/rc5-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/rc5-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,565 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rc5-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"rc5-586.s"
-.text
-.globl	RC5_32_encrypt
-.type	RC5_32_encrypt, at function
-.align	16
-RC5_32_encrypt:
-.L_RC5_32_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%esi
-	pushl	%edi
-	movl	16(%esp),%edx
-	movl	20(%esp),%ebp
-
-	movl	(%edx),%edi
-	movl	4(%edx),%esi
-	pushl	%ebx
-	movl	(%ebp),%ebx
-	addl	4(%ebp),%edi
-	addl	8(%ebp),%esi
-	xorl	%esi,%edi
-	movl	12(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	16(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	20(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	24(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	28(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	32(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	36(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	40(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	44(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	48(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	52(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	56(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	60(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	64(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	68(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	72(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	cmpl	$8,%ebx
-	je	.L000rc5_exit
-	xorl	%esi,%edi
-	movl	76(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	80(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	84(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	88(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	92(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	96(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	100(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	104(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	cmpl	$12,%ebx
-	je	.L000rc5_exit
-	xorl	%esi,%edi
-	movl	108(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	112(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	116(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	120(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	124(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	128(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-	xorl	%esi,%edi
-	movl	132(%ebp),%eax
-	movl	%esi,%ecx
-	roll	%cl,%edi
-	addl	%eax,%edi
-	xorl	%edi,%esi
-	movl	136(%ebp),%eax
-	movl	%edi,%ecx
-	roll	%cl,%esi
-	addl	%eax,%esi
-.L000rc5_exit:
-	movl	%edi,(%edx)
-	movl	%esi,4(%edx)
-	popl	%ebx
-	popl	%edi
-	popl	%esi
-	popl	%ebp
-	ret
-.size	RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
-.globl	RC5_32_decrypt
-.type	RC5_32_decrypt, at function
-.align	16
-RC5_32_decrypt:
-.L_RC5_32_decrypt_begin:
-
-	pushl	%ebp
-	pushl	%esi
-	pushl	%edi
-	movl	16(%esp),%edx
-	movl	20(%esp),%ebp
-
-	movl	(%edx),%edi
-	movl	4(%edx),%esi
-	pushl	%ebx
-	movl	(%ebp),%ebx
-	cmpl	$12,%ebx
-	je	.L001rc5_dec_12
-	cmpl	$8,%ebx
-	je	.L002rc5_dec_8
-	movl	136(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	132(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	128(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	124(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	120(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	116(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	112(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	108(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-.L001rc5_dec_12:
-	movl	104(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	100(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	96(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	92(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	88(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	84(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	80(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	76(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-.L002rc5_dec_8:
-	movl	72(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	68(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	64(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	60(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	56(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	52(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	48(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	44(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	40(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	36(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	32(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	28(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	24(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	20(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	movl	16(%ebp),%eax
-	subl	%eax,%esi
-	movl	%edi,%ecx
-	rorl	%cl,%esi
-	xorl	%edi,%esi
-	movl	12(%ebp),%eax
-	subl	%eax,%edi
-	movl	%esi,%ecx
-	rorl	%cl,%edi
-	xorl	%esi,%edi
-	subl	8(%ebp),%esi
-	subl	4(%ebp),%edi
-.L003rc5_exit:
-	movl	%edi,(%edx)
-	movl	%esi,4(%edx)
-	popl	%ebx
-	popl	%edi
-	popl	%esi
-	popl	%ebp
-	ret
-.size	RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
-.globl	RC5_32_cbc_encrypt
-.type	RC5_32_cbc_encrypt, at function
-.align	16
-RC5_32_cbc_encrypt:
-.L_RC5_32_cbc_encrypt_begin:
-
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	28(%esp),%ebp
-
-	movl	36(%esp),%ebx
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	pushl	%edi
-	pushl	%esi
-	pushl	%edi
-	pushl	%esi
-	movl	%esp,%ebx
-	movl	36(%esp),%esi
-	movl	40(%esp),%edi
-
-	movl	56(%esp),%ecx
-
-	movl	48(%esp),%eax
-	pushl	%eax
-	pushl	%ebx
-	cmpl	$0,%ecx
-	jz	.L004decrypt
-	andl	$4294967288,%ebp
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	jz	.L005encrypt_finish
-.L006encrypt_loop:
-	movl	(%esi),%ecx
-	movl	4(%esi),%edx
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_RC5_32_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L006encrypt_loop
-.L005encrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L007finish
-	call	.L008PIC_point
-.L008PIC_point:
-	popl	%edx
-	leal	.L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
-	movl	(%ecx,%ebp,4),%ebp
-	addl	%edx,%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	jmp	*%ebp
-.L010ej7:
-	movb	6(%esi),%dh
-	shll	$8,%edx
-.L011ej6:
-	movb	5(%esi),%dh
-.L012ej5:
-	movb	4(%esi),%dl
-.L013ej4:
-	movl	(%esi),%ecx
-	jmp	.L014ejend
-.L015ej3:
-	movb	2(%esi),%ch
-	shll	$8,%ecx
-.L016ej2:
-	movb	1(%esi),%ch
-.L017ej1:
-	movb	(%esi),%cl
-.L014ejend:
-	xorl	%ecx,%eax
-	xorl	%edx,%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_RC5_32_encrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	movl	%eax,(%edi)
-	movl	%ebx,4(%edi)
-	jmp	.L007finish
-.L004decrypt:
-	andl	$4294967288,%ebp
-	movl	16(%esp),%eax
-	movl	20(%esp),%ebx
-	jz	.L018decrypt_finish
-.L019decrypt_loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_RC5_32_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%ecx,(%edi)
-	movl	%edx,4(%edi)
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	addl	$8,%esi
-	addl	$8,%edi
-	subl	$8,%ebp
-	jnz	.L019decrypt_loop
-.L018decrypt_finish:
-	movl	52(%esp),%ebp
-	andl	$7,%ebp
-	jz	.L007finish
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	call	.L_RC5_32_decrypt_begin
-	movl	8(%esp),%eax
-	movl	12(%esp),%ebx
-	movl	16(%esp),%ecx
-	movl	20(%esp),%edx
-	xorl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-.L020dj7:
-	rorl	$16,%edx
-	movb	%dl,6(%edi)
-	shrl	$16,%edx
-.L021dj6:
-	movb	%dh,5(%edi)
-.L022dj5:
-	movb	%dl,4(%edi)
-.L023dj4:
-	movl	%ecx,(%edi)
-	jmp	.L024djend
-.L025dj3:
-	rorl	$16,%ecx
-	movb	%cl,2(%edi)
-	shll	$16,%ecx
-.L026dj2:
-	movb	%ch,1(%esi)
-.L027dj1:
-	movb	%cl,(%esi)
-.L024djend:
-	jmp	.L007finish
-.L007finish:
-	movl	60(%esp),%ecx
-	addl	$24,%esp
-	movl	%eax,(%ecx)
-	movl	%ebx,4(%ecx)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L009cbc_enc_jmp_table:
-.long	0
-.long	.L017ej1-.L008PIC_point
-.long	.L016ej2-.L008PIC_point
-.long	.L015ej3-.L008PIC_point
-.long	.L013ej4-.L008PIC_point
-.long	.L012ej5-.L008PIC_point
-.long	.L011ej6-.L008PIC_point
-.long	.L010ej7-.L008PIC_point
-.align	64
-.size	RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin

Added: trunk/secure/lib/libcrypto/i386/rmd-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/rmd-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/rmd-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,3936 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rmd-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from rmd-586.pl.
+#ifdef PIC
+.file	"rmd-586.S"
+.text
+.globl	ripemd160_block_asm_data_order
+.type	ripemd160_block_asm_data_order, at function
+.align	16
+ripemd160_block_asm_data_order:
+.L_ripemd160_block_asm_data_order_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	pushl	%esi
+	movl	(%edx),%ecx
+	pushl	%edi
+	movl	4(%edx),%esi
+	pushl	%ebp
+	movl	8(%edx),%edi
+	pushl	%ebx
+	subl	$108,%esp
+.L000start:
+
+	movl	(%eax),%ebx
+	movl	4(%eax),%ebp
+	movl	%ebx,(%esp)
+	movl	%ebp,4(%esp)
+	movl	8(%eax),%ebx
+	movl	12(%eax),%ebp
+	movl	%ebx,8(%esp)
+	movl	%ebp,12(%esp)
+	movl	16(%eax),%ebx
+	movl	20(%eax),%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	24(%eax),%ebx
+	movl	28(%eax),%ebp
+	movl	%ebx,24(%esp)
+	movl	%ebp,28(%esp)
+	movl	32(%eax),%ebx
+	movl	36(%eax),%ebp
+	movl	%ebx,32(%esp)
+	movl	%ebp,36(%esp)
+	movl	40(%eax),%ebx
+	movl	44(%eax),%ebp
+	movl	%ebx,40(%esp)
+	movl	%ebp,44(%esp)
+	movl	48(%eax),%ebx
+	movl	52(%eax),%ebp
+	movl	%ebx,48(%esp)
+	movl	%ebp,52(%esp)
+	movl	56(%eax),%ebx
+	movl	60(%eax),%ebp
+	movl	%ebx,56(%esp)
+	movl	%ebp,60(%esp)
+	movl	%edi,%eax
+	movl	12(%edx),%ebx
+	movl	16(%edx),%ebp
+
+	xorl	%ebx,%eax
+	movl	(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	4(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$14,%ebp
+	addl	%ebx,%ebp
+
+	movl	8(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$15,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	12(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	16(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	20(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	24(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	28(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	32(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	36(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$10,%ebx
+	addl	%edx,%esi
+	xorl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	40(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	44(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	52(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	56(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	60(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	$-1,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	movl	28(%esp),%edx
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%esi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	52(%esp),%edx
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$8,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	40(%esp),%edx
+	roll	$10,%ebx
+	leal	1518500249(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%edi
+	leal	1518500249(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	60(%esp),%edx
+	roll	$10,%esi
+	leal	1518500249(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$7,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%edx,%eax
+	movl	48(%esp),%edx
+	roll	$10,%ebp
+	leal	1518500249(%edi,%eax,1),%edi
+	movl	$-1,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	addl	%edx,%esi
+	movl	%ebx,%edx
+	subl	%edi,%eax
+	andl	%edi,%edx
+	andl	%ebp,%eax
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ebx
+	leal	1518500249(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	subl	%esi,%edx
+	andl	%esi,%eax
+	andl	%ebx,%edx
+	orl	%edx,%eax
+	movl	36(%esp),%edx
+	roll	$10,%edi
+	leal	1518500249(%ecx,%eax,1),%ecx
+	movl	$-1,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%esi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	8(%esp),%edx
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	44(%esp),%edx
+	roll	$10,%ebx
+	leal	1518500249(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%edi
+	leal	1518500249(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1518500249(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	movl	12(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$11,%ebx
+	addl	%edi,%ebx
+
+	movl	40(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1859775393(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	56(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1859775393(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	movl	16(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1859775393(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$7,%ecx
+	addl	%ebp,%ecx
+
+	movl	36(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1859775393(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$14,%ebp
+	addl	%ebx,%ebp
+
+	movl	60(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%eax,1),%ebx
+	subl	%ebp,%edx
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	32(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%edi
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1859775393(%edi,%edx,1),%edi
+	subl	%ebx,%eax
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	4(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%esi
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1859775393(%esi,%eax,1),%esi
+	subl	%edi,%edx
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1859775393(%ecx,%edx,1),%ecx
+	subl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	movl	28(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebp
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1859775393(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	movl	(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	movl	24(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1859775393(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1859775393(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	movl	44(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1859775393(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	movl	20(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1859775393(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	36(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$12,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	40(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$5,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	60(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	8(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	subl	%ebp,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	16(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	2840853838(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	movl	(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	2840853838(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$15,%ecx
+	addl	%ebp,%ecx
+
+	movl	20(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	2840853838(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$5,%ebp
+	addl	%ebx,%ebp
+
+	movl	36(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$11,%ebx
+	addl	%edi,%ebx
+
+	movl	28(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	2840853838(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	48(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	2840853838(%esi,%eax,1),%esi
+	subl	%ebx,%edx
+	roll	$8,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	2840853838(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	movl	40(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	2840853838(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	movl	56(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	4(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	2840853838(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	12(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	2840853838(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	32(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	2840853838(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	2840853838(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	24(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$8,%ebx
+	addl	%edi,%ebx
+
+	movl	60(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	2840853838(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$5,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	128(%esp),%edx
+	roll	$10,%ebx
+	leal	2840853838(%esi,%eax,1),%esi
+	movl	%ecx,64(%esp)
+	roll	$6,%esi
+	addl	%ecx,%esi
+	movl	(%edx),%ecx
+	movl	%esi,68(%esp)
+	movl	%edi,72(%esp)
+	movl	4(%edx),%esi
+	movl	%ebx,76(%esp)
+	movl	8(%edx),%edi
+	movl	%ebp,80(%esp)
+	movl	12(%edx),%ebx
+	movl	16(%edx),%ebp
+
+	movl	$-1,%edx
+	subl	%ebx,%edx
+	movl	20(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1352829926(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	56(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1352829926(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	movl	28(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1352829926(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	movl	36(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1352829926(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1352829926(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$15,%ecx
+	addl	%ebp,%ecx
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1352829926(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	movl	16(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	52(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1352829926(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	24(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1352829926(%esi,%eax,1),%esi
+	subl	%ebx,%edx
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	movl	60(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1352829926(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	32(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1352829926(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	4(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	movl	40(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1352829926(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	movl	12(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1352829926(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$12,%esi
+	addl	%ecx,%esi
+
+	movl	48(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1352829926(%ecx,%eax,1),%ecx
+	movl	%edi,%eax
+	roll	$6,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	40(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	60(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$7,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$7,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	36(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	8(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	subl	%ecx,%edx
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	60(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	20(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1836072691(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	4(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1836072691(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	movl	12(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1836072691(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	movl	28(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1836072691(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	movl	56(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%eax,1),%ebx
+	subl	%ebp,%edx
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	movl	24(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%edi
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1836072691(%edi,%edx,1),%edi
+	subl	%ebx,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	36(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%esi
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1836072691(%esi,%eax,1),%esi
+	subl	%edi,%edx
+	roll	$14,%esi
+	addl	%ecx,%esi
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1836072691(%ecx,%edx,1),%ecx
+	subl	%esi,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	movl	32(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebp
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1836072691(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$13,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	8(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1836072691(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	movl	40(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1836072691(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1836072691(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	movl	16(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1836072691(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	movl	52(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	32(%esp),%edx
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	16(%esp),%edx
+	roll	$10,%ebx
+	leal	2053994217(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%edi
+	leal	2053994217(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	12(%esp),%edx
+	roll	$10,%esi
+	leal	2053994217(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%edx,%eax
+	movl	60(%esp),%edx
+	roll	$10,%ebp
+	leal	2053994217(%edi,%eax,1),%edi
+	movl	$-1,%eax
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	addl	%edx,%esi
+	movl	%ebx,%edx
+	subl	%edi,%eax
+	andl	%edi,%edx
+	andl	%ebp,%eax
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ebx
+	leal	2053994217(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	subl	%esi,%edx
+	andl	%esi,%eax
+	andl	%ebx,%edx
+	orl	%edx,%eax
+	movl	20(%esp),%edx
+	roll	$10,%edi
+	leal	2053994217(%ecx,%eax,1),%ecx
+	movl	$-1,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%esi
+	leal	2053994217(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	8(%esp),%edx
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	36(%esp),%edx
+	roll	$10,%ebx
+	leal	2053994217(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%edi
+	leal	2053994217(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	40(%esp),%edx
+	roll	$10,%esi
+	leal	2053994217(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$5,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$15,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	xorl	%ebp,%eax
+	roll	$8,%edi
+	addl	%esi,%edi
+
+	movl	48(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$8,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	60(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$5,%ecx
+	addl	%ebp,%ecx
+
+	movl	40(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	16(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	4(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	20(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$10,%ebx
+	addl	%edx,%esi
+	xorl	%ebx,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	movl	32(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	28(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	movl	24(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$8,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	8(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	56(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$5,%ecx
+	addl	%ebp,%ecx
+
+	movl	(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	12(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	movl	36(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	44(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	roll	$10,%ebx
+	addl	%edx,%esi
+	movl	128(%esp),%edx
+	roll	$11,%esi
+	addl	%ecx,%esi
+	movl	4(%edx),%eax
+	addl	%eax,%ebx
+	movl	72(%esp),%eax
+	addl	%eax,%ebx
+	movl	8(%edx),%eax
+	addl	%eax,%ebp
+	movl	76(%esp),%eax
+	addl	%eax,%ebp
+	movl	12(%edx),%eax
+	addl	%eax,%ecx
+	movl	80(%esp),%eax
+	addl	%eax,%ecx
+	movl	16(%edx),%eax
+	addl	%eax,%esi
+	movl	64(%esp),%eax
+	addl	%eax,%esi
+	movl	(%edx),%eax
+	addl	%eax,%edi
+	movl	68(%esp),%eax
+	addl	%eax,%edi
+	movl	136(%esp),%eax
+	movl	%ebx,(%edx)
+	movl	%ebp,4(%edx)
+	movl	%ecx,8(%edx)
+	subl	$1,%eax
+	movl	%esi,12(%edx)
+	movl	%edi,16(%edx)
+	jle	.L001get_out
+	movl	%eax,136(%esp)
+	movl	%ecx,%edi
+	movl	132(%esp),%eax
+	movl	%ebx,%ecx
+	addl	$64,%eax
+	movl	%ebp,%esi
+	movl	%eax,132(%esp)
+	jmp	.L000start
+.L001get_out:
+	addl	$108,%esp
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	ripemd160_block_asm_data_order,.-.L_ripemd160_block_asm_data_order_begin
+#else
+.file	"rmd-586.S"
+.text
+.globl	ripemd160_block_asm_data_order
+.type	ripemd160_block_asm_data_order, at function
+.align	16
+ripemd160_block_asm_data_order:
+.L_ripemd160_block_asm_data_order_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%eax
+	pushl	%esi
+	movl	(%edx),%ecx
+	pushl	%edi
+	movl	4(%edx),%esi
+	pushl	%ebp
+	movl	8(%edx),%edi
+	pushl	%ebx
+	subl	$108,%esp
+.L000start:
+
+	movl	(%eax),%ebx
+	movl	4(%eax),%ebp
+	movl	%ebx,(%esp)
+	movl	%ebp,4(%esp)
+	movl	8(%eax),%ebx
+	movl	12(%eax),%ebp
+	movl	%ebx,8(%esp)
+	movl	%ebp,12(%esp)
+	movl	16(%eax),%ebx
+	movl	20(%eax),%ebp
+	movl	%ebx,16(%esp)
+	movl	%ebp,20(%esp)
+	movl	24(%eax),%ebx
+	movl	28(%eax),%ebp
+	movl	%ebx,24(%esp)
+	movl	%ebp,28(%esp)
+	movl	32(%eax),%ebx
+	movl	36(%eax),%ebp
+	movl	%ebx,32(%esp)
+	movl	%ebp,36(%esp)
+	movl	40(%eax),%ebx
+	movl	44(%eax),%ebp
+	movl	%ebx,40(%esp)
+	movl	%ebp,44(%esp)
+	movl	48(%eax),%ebx
+	movl	52(%eax),%ebp
+	movl	%ebx,48(%esp)
+	movl	%ebp,52(%esp)
+	movl	56(%eax),%ebx
+	movl	60(%eax),%ebp
+	movl	%ebx,56(%esp)
+	movl	%ebp,60(%esp)
+	movl	%edi,%eax
+	movl	12(%edx),%ebx
+	movl	16(%edx),%ebp
+
+	xorl	%ebx,%eax
+	movl	(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	4(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$14,%ebp
+	addl	%ebx,%ebp
+
+	movl	8(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$15,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	12(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	16(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	20(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	24(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	28(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	32(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	36(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$10,%ebx
+	addl	%edx,%esi
+	xorl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	40(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	44(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	52(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	56(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	60(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	$-1,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	movl	28(%esp),%edx
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%esi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	52(%esp),%edx
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$8,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	40(%esp),%edx
+	roll	$10,%ebx
+	leal	1518500249(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%edi
+	leal	1518500249(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	60(%esp),%edx
+	roll	$10,%esi
+	leal	1518500249(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$7,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%edx,%eax
+	movl	48(%esp),%edx
+	roll	$10,%ebp
+	leal	1518500249(%edi,%eax,1),%edi
+	movl	$-1,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	addl	%edx,%esi
+	movl	%ebx,%edx
+	subl	%edi,%eax
+	andl	%edi,%edx
+	andl	%ebp,%eax
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ebx
+	leal	1518500249(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	subl	%esi,%edx
+	andl	%esi,%eax
+	andl	%ebx,%edx
+	orl	%edx,%eax
+	movl	36(%esp),%edx
+	roll	$10,%edi
+	leal	1518500249(%ecx,%eax,1),%ecx
+	movl	$-1,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%esi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	8(%esp),%edx
+	roll	$10,%ecx
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	44(%esp),%edx
+	roll	$10,%ebx
+	leal	1518500249(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%edi
+	leal	1518500249(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1518500249(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	movl	12(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$11,%ebx
+	addl	%edi,%ebx
+
+	movl	40(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1859775393(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	56(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1859775393(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	movl	16(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1859775393(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$7,%ecx
+	addl	%ebp,%ecx
+
+	movl	36(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1859775393(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$14,%ebp
+	addl	%ebx,%ebp
+
+	movl	60(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%eax,1),%ebx
+	subl	%ebp,%edx
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	32(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%edi
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1859775393(%edi,%edx,1),%edi
+	subl	%ebx,%eax
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	4(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%esi
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1859775393(%esi,%eax,1),%esi
+	subl	%edi,%edx
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1859775393(%ecx,%edx,1),%ecx
+	subl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	movl	28(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebp
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1859775393(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	movl	(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	movl	24(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1859775393(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1859775393(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	movl	44(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1859775393(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	movl	20(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1859775393(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	36(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$12,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	40(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$5,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	60(%esp),%eax
+	roll	$10,%ebx
+	leal	2400959708(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%edi
+	leal	2400959708(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%esi
+	leal	2400959708(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%ecx
+	leal	2400959708(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	8(%esp),%eax
+	roll	$10,%ebp
+	leal	2400959708(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	subl	%ebp,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	16(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	2840853838(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	movl	(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	2840853838(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$15,%ecx
+	addl	%ebp,%ecx
+
+	movl	20(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	2840853838(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$5,%ebp
+	addl	%ebx,%ebp
+
+	movl	36(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$11,%ebx
+	addl	%edi,%ebx
+
+	movl	28(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	2840853838(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	48(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	2840853838(%esi,%eax,1),%esi
+	subl	%ebx,%edx
+	roll	$8,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	2840853838(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	movl	40(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	2840853838(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	movl	56(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	4(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	2840853838(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	movl	12(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	2840853838(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	32(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	2840853838(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	2840853838(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	24(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	2840853838(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$8,%ebx
+	addl	%edi,%ebx
+
+	movl	60(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	2840853838(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$5,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	128(%esp),%edx
+	roll	$10,%ebx
+	leal	2840853838(%esi,%eax,1),%esi
+	movl	%ecx,64(%esp)
+	roll	$6,%esi
+	addl	%ecx,%esi
+	movl	(%edx),%ecx
+	movl	%esi,68(%esp)
+	movl	%edi,72(%esp)
+	movl	4(%edx),%esi
+	movl	%ebx,76(%esp)
+	movl	8(%edx),%edi
+	movl	%ebp,80(%esp)
+	movl	12(%edx),%ebx
+	movl	16(%edx),%ebp
+
+	movl	$-1,%edx
+	subl	%ebx,%edx
+	movl	20(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1352829926(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	56(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1352829926(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	movl	28(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1352829926(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	movl	36(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1352829926(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	8(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1352829926(%ecx,%eax,1),%ecx
+	subl	%edi,%edx
+	roll	$15,%ecx
+	addl	%ebp,%ecx
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ebp
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1352829926(%ebp,%edx,1),%ebp
+	subl	%esi,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	movl	16(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebx
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%eax,1),%ebx
+	subl	%ecx,%edx
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	52(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%edi
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1352829926(%edi,%edx,1),%edi
+	subl	%ebp,%eax
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	24(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%esi
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1352829926(%esi,%eax,1),%esi
+	subl	%ebx,%edx
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	movl	60(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%ecx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1352829926(%ecx,%edx,1),%ecx
+	subl	%edi,%eax
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	movl	32(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ebp
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1352829926(%ebp,%eax,1),%ebp
+	subl	%esi,%edx
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	4(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebx
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1352829926(%ebx,%edx,1),%ebx
+	subl	%ecx,%eax
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	movl	40(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%edi
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1352829926(%edi,%eax,1),%edi
+	subl	%ebp,%edx
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	movl	12(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%esi
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1352829926(%esi,%edx,1),%esi
+	subl	%ebx,%eax
+	roll	$12,%esi
+	addl	%ecx,%esi
+
+	movl	48(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%ecx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1352829926(%ecx,%eax,1),%ecx
+	movl	%edi,%eax
+	roll	$6,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$9,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	12(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	20(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	40(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$7,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	60(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$7,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	32(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	subl	%esi,%edx
+	andl	%ebp,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%ecx
+	leal	1548603684(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	roll	$7,%ebx
+	addl	%edi,%ebx
+
+	subl	%ecx,%edx
+	andl	%ebx,%eax
+	andl	%ebp,%edx
+	orl	%eax,%edx
+	movl	16(%esp),%eax
+	roll	$10,%ebp
+	leal	1548603684(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	subl	%ebp,%edx
+	andl	%edi,%eax
+	andl	%ebx,%edx
+	orl	%eax,%edx
+	movl	36(%esp),%eax
+	roll	$10,%ebx
+	leal	1548603684(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	subl	%ebx,%edx
+	andl	%esi,%eax
+	andl	%edi,%edx
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%edi
+	leal	1548603684(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	subl	%edi,%edx
+	andl	%ecx,%eax
+	andl	%esi,%edx
+	orl	%eax,%edx
+	movl	8(%esp),%eax
+	roll	$10,%esi
+	leal	1548603684(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	addl	%eax,%ebp
+	subl	%ecx,%edx
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	movl	60(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	20(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1836072691(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$7,%edi
+	addl	%esi,%edi
+
+	movl	4(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1836072691(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$15,%esi
+	addl	%ecx,%esi
+
+	movl	12(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1836072691(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$11,%ecx
+	addl	%ebp,%ecx
+
+	movl	28(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1836072691(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$8,%ebp
+	addl	%ebx,%ebp
+
+	movl	56(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	$-1,%edx
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%eax,1),%ebx
+	subl	%ebp,%edx
+	roll	$6,%ebx
+	addl	%edi,%ebx
+
+	movl	24(%esp),%eax
+	orl	%ebx,%edx
+	addl	%eax,%edi
+	xorl	%ecx,%edx
+	movl	$-1,%eax
+	roll	$10,%ebp
+	leal	1836072691(%edi,%edx,1),%edi
+	subl	%ebx,%eax
+	roll	$6,%edi
+	addl	%esi,%edi
+
+	movl	36(%esp),%edx
+	orl	%edi,%eax
+	addl	%edx,%esi
+	xorl	%ebp,%eax
+	movl	$-1,%edx
+	roll	$10,%ebx
+	leal	1836072691(%esi,%eax,1),%esi
+	subl	%edi,%edx
+	roll	$14,%esi
+	addl	%ecx,%esi
+
+	movl	44(%esp),%eax
+	orl	%esi,%edx
+	addl	%eax,%ecx
+	xorl	%ebx,%edx
+	movl	$-1,%eax
+	roll	$10,%edi
+	leal	1836072691(%ecx,%edx,1),%ecx
+	subl	%esi,%eax
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	movl	32(%esp),%edx
+	orl	%ecx,%eax
+	addl	%edx,%ebp
+	xorl	%edi,%eax
+	movl	$-1,%edx
+	roll	$10,%esi
+	leal	1836072691(%ebp,%eax,1),%ebp
+	subl	%ecx,%edx
+	roll	$13,%ebp
+	addl	%ebx,%ebp
+
+	movl	48(%esp),%eax
+	orl	%ebp,%edx
+	addl	%eax,%ebx
+	xorl	%esi,%edx
+	movl	$-1,%eax
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%edx,1),%ebx
+	subl	%ebp,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	movl	8(%esp),%edx
+	orl	%ebx,%eax
+	addl	%edx,%edi
+	xorl	%ecx,%eax
+	movl	$-1,%edx
+	roll	$10,%ebp
+	leal	1836072691(%edi,%eax,1),%edi
+	subl	%ebx,%edx
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	movl	40(%esp),%eax
+	orl	%edi,%edx
+	addl	%eax,%esi
+	xorl	%ebp,%edx
+	movl	$-1,%eax
+	roll	$10,%ebx
+	leal	1836072691(%esi,%edx,1),%esi
+	subl	%edi,%eax
+	roll	$13,%esi
+	addl	%ecx,%esi
+
+	movl	(%esp),%edx
+	orl	%esi,%eax
+	addl	%edx,%ecx
+	xorl	%ebx,%eax
+	movl	$-1,%edx
+	roll	$10,%edi
+	leal	1836072691(%ecx,%eax,1),%ecx
+	subl	%esi,%edx
+	roll	$13,%ecx
+	addl	%ebp,%ecx
+
+	movl	16(%esp),%eax
+	orl	%ecx,%edx
+	addl	%eax,%ebp
+	xorl	%edi,%edx
+	movl	$-1,%eax
+	roll	$10,%esi
+	leal	1836072691(%ebp,%edx,1),%ebp
+	subl	%ecx,%eax
+	roll	$7,%ebp
+	addl	%ebx,%ebp
+
+	movl	52(%esp),%edx
+	orl	%ebp,%eax
+	addl	%edx,%ebx
+	xorl	%esi,%eax
+	movl	32(%esp),%edx
+	roll	$10,%ecx
+	leal	1836072691(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$5,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	24(%esp),%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$15,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	16(%esp),%edx
+	roll	$10,%ebx
+	leal	2053994217(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	4(%esp),%eax
+	roll	$10,%edi
+	leal	2053994217(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$8,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	12(%esp),%edx
+	roll	$10,%esi
+	leal	2053994217(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$11,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	44(%esp),%eax
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$14,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%edx,%eax
+	movl	60(%esp),%edx
+	roll	$10,%ebp
+	leal	2053994217(%edi,%eax,1),%edi
+	movl	$-1,%eax
+	roll	$14,%edi
+	addl	%esi,%edi
+
+	addl	%edx,%esi
+	movl	%ebx,%edx
+	subl	%edi,%eax
+	andl	%edi,%edx
+	andl	%ebp,%eax
+	orl	%eax,%edx
+	movl	(%esp),%eax
+	roll	$10,%ebx
+	leal	2053994217(%esi,%edx,1),%esi
+	movl	$-1,%edx
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	addl	%eax,%ecx
+	movl	%edi,%eax
+	subl	%esi,%edx
+	andl	%esi,%eax
+	andl	%ebx,%edx
+	orl	%edx,%eax
+	movl	20(%esp),%edx
+	roll	$10,%edi
+	leal	2053994217(%ecx,%eax,1),%ecx
+	movl	$-1,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	addl	%edx,%ebp
+	movl	%esi,%edx
+	subl	%ecx,%eax
+	andl	%ecx,%edx
+	andl	%edi,%eax
+	orl	%eax,%edx
+	movl	48(%esp),%eax
+	roll	$10,%esi
+	leal	2053994217(%ebp,%edx,1),%ebp
+	movl	$-1,%edx
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	addl	%eax,%ebx
+	movl	%ecx,%eax
+	subl	%ebp,%edx
+	andl	%ebp,%eax
+	andl	%esi,%edx
+	orl	%edx,%eax
+	movl	8(%esp),%edx
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%eax,1),%ebx
+	movl	$-1,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	addl	%edx,%edi
+	movl	%ebp,%edx
+	subl	%ebx,%eax
+	andl	%ebx,%edx
+	andl	%ecx,%eax
+	orl	%eax,%edx
+	movl	52(%esp),%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	movl	$-1,%edx
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	addl	%eax,%esi
+	movl	%ebx,%eax
+	subl	%edi,%edx
+	andl	%edi,%eax
+	andl	%ebp,%edx
+	orl	%edx,%eax
+	movl	36(%esp),%edx
+	roll	$10,%ebx
+	leal	2053994217(%esi,%eax,1),%esi
+	movl	$-1,%eax
+	roll	$9,%esi
+	addl	%ecx,%esi
+
+	addl	%edx,%ecx
+	movl	%edi,%edx
+	subl	%esi,%eax
+	andl	%esi,%edx
+	andl	%ebx,%eax
+	orl	%eax,%edx
+	movl	28(%esp),%eax
+	roll	$10,%edi
+	leal	2053994217(%ecx,%edx,1),%ecx
+	movl	$-1,%edx
+	roll	$12,%ecx
+	addl	%ebp,%ecx
+
+	addl	%eax,%ebp
+	movl	%esi,%eax
+	subl	%ecx,%edx
+	andl	%ecx,%eax
+	andl	%edi,%edx
+	orl	%edx,%eax
+	movl	40(%esp),%edx
+	roll	$10,%esi
+	leal	2053994217(%ebp,%eax,1),%ebp
+	movl	$-1,%eax
+	roll	$5,%ebp
+	addl	%ebx,%ebp
+
+	addl	%edx,%ebx
+	movl	%ecx,%edx
+	subl	%ebp,%eax
+	andl	%ebp,%edx
+	andl	%esi,%eax
+	orl	%eax,%edx
+	movl	56(%esp),%eax
+	roll	$10,%ecx
+	leal	2053994217(%ebx,%edx,1),%ebx
+	movl	$-1,%edx
+	roll	$15,%ebx
+	addl	%edi,%ebx
+
+	addl	%eax,%edi
+	movl	%ebp,%eax
+	subl	%ebx,%edx
+	andl	%ebx,%eax
+	andl	%ecx,%edx
+	orl	%eax,%edx
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	leal	2053994217(%edi,%edx,1),%edi
+	xorl	%ebp,%eax
+	roll	$8,%edi
+	addl	%esi,%edi
+
+	movl	48(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$8,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	60(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$5,%ecx
+	addl	%ebp,%ecx
+
+	movl	40(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$12,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	16(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$9,%ebx
+	addl	%edi,%ebx
+
+	movl	4(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$12,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	20(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$10,%ebx
+	addl	%edx,%esi
+	xorl	%ebx,%eax
+	roll	$5,%esi
+	addl	%ecx,%esi
+
+	movl	32(%esp),%edx
+	xorl	%esi,%eax
+	addl	%edx,%ecx
+	roll	$10,%edi
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$14,%ecx
+	addl	%ebp,%ecx
+
+	xorl	%edi,%eax
+	movl	28(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$10,%esi
+	addl	%edx,%ebp
+	xorl	%esi,%eax
+	roll	$6,%ebp
+	addl	%ebx,%ebp
+
+	movl	24(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%edx,%ebx
+	roll	$10,%ecx
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$8,%ebx
+	addl	%edi,%ebx
+
+	xorl	%ecx,%eax
+	movl	8(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$10,%ebp
+	addl	%edx,%edi
+	xorl	%ebp,%eax
+	roll	$13,%edi
+	addl	%esi,%edi
+
+	movl	52(%esp),%edx
+	xorl	%edi,%eax
+	addl	%edx,%esi
+	roll	$10,%ebx
+	addl	%eax,%esi
+	movl	%edi,%eax
+	roll	$6,%esi
+	addl	%ecx,%esi
+
+	xorl	%ebx,%eax
+	movl	56(%esp),%edx
+	xorl	%esi,%eax
+	addl	%eax,%ecx
+	movl	%esi,%eax
+	roll	$10,%edi
+	addl	%edx,%ecx
+	xorl	%edi,%eax
+	roll	$5,%ecx
+	addl	%ebp,%ecx
+
+	movl	(%esp),%edx
+	xorl	%ecx,%eax
+	addl	%edx,%ebp
+	roll	$10,%esi
+	addl	%eax,%ebp
+	movl	%ecx,%eax
+	roll	$15,%ebp
+	addl	%ebx,%ebp
+
+	xorl	%esi,%eax
+	movl	12(%esp),%edx
+	xorl	%ebp,%eax
+	addl	%eax,%ebx
+	movl	%ebp,%eax
+	roll	$10,%ecx
+	addl	%edx,%ebx
+	xorl	%ecx,%eax
+	roll	$13,%ebx
+	addl	%edi,%ebx
+
+	movl	36(%esp),%edx
+	xorl	%ebx,%eax
+	addl	%edx,%edi
+	roll	$10,%ebp
+	addl	%eax,%edi
+	movl	%ebx,%eax
+	roll	$11,%edi
+	addl	%esi,%edi
+
+	xorl	%ebp,%eax
+	movl	44(%esp),%edx
+	xorl	%edi,%eax
+	addl	%eax,%esi
+	roll	$10,%ebx
+	addl	%edx,%esi
+	movl	128(%esp),%edx
+	roll	$11,%esi
+	addl	%ecx,%esi
+	movl	4(%edx),%eax
+	addl	%eax,%ebx
+	movl	72(%esp),%eax
+	addl	%eax,%ebx
+	movl	8(%edx),%eax
+	addl	%eax,%ebp
+	movl	76(%esp),%eax
+	addl	%eax,%ebp
+	movl	12(%edx),%eax
+	addl	%eax,%ecx
+	movl	80(%esp),%eax
+	addl	%eax,%ecx
+	movl	16(%edx),%eax
+	addl	%eax,%esi
+	movl	64(%esp),%eax
+	addl	%eax,%esi
+	movl	(%edx),%eax
+	addl	%eax,%edi
+	movl	68(%esp),%eax
+	addl	%eax,%edi
+	movl	136(%esp),%eax
+	movl	%ebx,(%edx)
+	movl	%ebp,4(%edx)
+	movl	%ecx,8(%edx)
+	subl	$1,%eax
+	movl	%esi,12(%edx)
+	movl	%edi,16(%edx)
+	jle	.L001get_out
+	movl	%eax,136(%esp)
+	movl	%ecx,%edi
+	movl	132(%esp),%eax
+	movl	%ebx,%ecx
+	addl	$64,%eax
+	movl	%ebp,%esi
+	movl	%eax,132(%esp)
+	jmp	.L000start
+.L001get_out:
+	addl	$108,%esp
+	popl	%ebx
+	popl	%ebp
+	popl	%edi
+	popl	%esi
+	ret
+.size	ripemd160_block_asm_data_order,.-.L_ripemd160_block_asm_data_order_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/rmd-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/rmd-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/rmd-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/rmd-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1966 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/rmd-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"rmd-586.s"
-.text
-.globl	ripemd160_block_asm_data_order
-.type	ripemd160_block_asm_data_order, at function
-.align	16
-ripemd160_block_asm_data_order:
-.L_ripemd160_block_asm_data_order_begin:
-	movl	4(%esp),%edx
-	movl	8(%esp),%eax
-	pushl	%esi
-	movl	(%edx),%ecx
-	pushl	%edi
-	movl	4(%edx),%esi
-	pushl	%ebp
-	movl	8(%edx),%edi
-	pushl	%ebx
-	subl	$108,%esp
-.L000start:
-
-	movl	(%eax),%ebx
-	movl	4(%eax),%ebp
-	movl	%ebx,(%esp)
-	movl	%ebp,4(%esp)
-	movl	8(%eax),%ebx
-	movl	12(%eax),%ebp
-	movl	%ebx,8(%esp)
-	movl	%ebp,12(%esp)
-	movl	16(%eax),%ebx
-	movl	20(%eax),%ebp
-	movl	%ebx,16(%esp)
-	movl	%ebp,20(%esp)
-	movl	24(%eax),%ebx
-	movl	28(%eax),%ebp
-	movl	%ebx,24(%esp)
-	movl	%ebp,28(%esp)
-	movl	32(%eax),%ebx
-	movl	36(%eax),%ebp
-	movl	%ebx,32(%esp)
-	movl	%ebp,36(%esp)
-	movl	40(%eax),%ebx
-	movl	44(%eax),%ebp
-	movl	%ebx,40(%esp)
-	movl	%ebp,44(%esp)
-	movl	48(%eax),%ebx
-	movl	52(%eax),%ebp
-	movl	%ebx,48(%esp)
-	movl	%ebp,52(%esp)
-	movl	56(%eax),%ebx
-	movl	60(%eax),%ebp
-	movl	%ebx,56(%esp)
-	movl	%ebp,60(%esp)
-	movl	%edi,%eax
-	movl	12(%edx),%ebx
-	movl	16(%edx),%ebp
-
-	xorl	%ebx,%eax
-	movl	(%esp),%edx
-	xorl	%esi,%eax
-	addl	%edx,%ecx
-	roll	$10,%edi
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$11,%ecx
-	addl	%ebp,%ecx
-
-	xorl	%edi,%eax
-	movl	4(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$10,%esi
-	addl	%edx,%ebp
-	xorl	%esi,%eax
-	roll	$14,%ebp
-	addl	%ebx,%ebp
-
-	movl	8(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%edx,%ebx
-	roll	$10,%ecx
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$15,%ebx
-	addl	%edi,%ebx
-
-	xorl	%ecx,%eax
-	movl	12(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$10,%ebp
-	addl	%edx,%edi
-	xorl	%ebp,%eax
-	roll	$12,%edi
-	addl	%esi,%edi
-
-	movl	16(%esp),%edx
-	xorl	%edi,%eax
-	addl	%edx,%esi
-	roll	$10,%ebx
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$5,%esi
-	addl	%ecx,%esi
-
-	xorl	%ebx,%eax
-	movl	20(%esp),%edx
-	xorl	%esi,%eax
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$10,%edi
-	addl	%edx,%ecx
-	xorl	%edi,%eax
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	movl	24(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%edx,%ebp
-	roll	$10,%esi
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$7,%ebp
-	addl	%ebx,%ebp
-
-	xorl	%esi,%eax
-	movl	28(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$10,%ecx
-	addl	%edx,%ebx
-	xorl	%ecx,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	movl	32(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%edx,%edi
-	roll	$10,%ebp
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	xorl	%ebp,%eax
-	movl	36(%esp),%edx
-	xorl	%edi,%eax
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$10,%ebx
-	addl	%edx,%esi
-	xorl	%ebx,%eax
-	roll	$13,%esi
-	addl	%ecx,%esi
-
-	movl	40(%esp),%edx
-	xorl	%esi,%eax
-	addl	%edx,%ecx
-	roll	$10,%edi
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	xorl	%edi,%eax
-	movl	44(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$10,%esi
-	addl	%edx,%ebp
-	xorl	%esi,%eax
-	roll	$15,%ebp
-	addl	%ebx,%ebp
-
-	movl	48(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%edx,%ebx
-	roll	$10,%ecx
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$6,%ebx
-	addl	%edi,%ebx
-
-	xorl	%ecx,%eax
-	movl	52(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$10,%ebp
-	addl	%edx,%edi
-	xorl	%ebp,%eax
-	roll	$7,%edi
-	addl	%esi,%edi
-
-	movl	56(%esp),%edx
-	xorl	%edi,%eax
-	addl	%edx,%esi
-	roll	$10,%ebx
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$9,%esi
-	addl	%ecx,%esi
-
-	xorl	%ebx,%eax
-	movl	60(%esp),%edx
-	xorl	%esi,%eax
-	addl	%eax,%ecx
-	movl	$-1,%eax
-	roll	$10,%edi
-	addl	%edx,%ecx
-	movl	28(%esp),%edx
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	addl	%edx,%ebp
-	movl	%esi,%edx
-	subl	%ecx,%eax
-	andl	%ecx,%edx
-	andl	%edi,%eax
-	orl	%eax,%edx
-	movl	16(%esp),%eax
-	roll	$10,%esi
-	leal	1518500249(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	roll	$7,%ebp
-	addl	%ebx,%ebp
-
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	subl	%ebp,%edx
-	andl	%ebp,%eax
-	andl	%esi,%edx
-	orl	%edx,%eax
-	movl	52(%esp),%edx
-	roll	$10,%ecx
-	leal	1518500249(%ebx,%eax,1),%ebx
-	movl	$-1,%eax
-	roll	$6,%ebx
-	addl	%edi,%ebx
-
-	addl	%edx,%edi
-	movl	%ebp,%edx
-	subl	%ebx,%eax
-	andl	%ebx,%edx
-	andl	%ecx,%eax
-	orl	%eax,%edx
-	movl	4(%esp),%eax
-	roll	$10,%ebp
-	leal	1518500249(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	roll	$8,%edi
-	addl	%esi,%edi
-
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	subl	%edi,%edx
-	andl	%edi,%eax
-	andl	%ebp,%edx
-	orl	%edx,%eax
-	movl	40(%esp),%edx
-	roll	$10,%ebx
-	leal	1518500249(%esi,%eax,1),%esi
-	movl	$-1,%eax
-	roll	$13,%esi
-	addl	%ecx,%esi
-
-	addl	%edx,%ecx
-	movl	%edi,%edx
-	subl	%esi,%eax
-	andl	%esi,%edx
-	andl	%ebx,%eax
-	orl	%eax,%edx
-	movl	24(%esp),%eax
-	roll	$10,%edi
-	leal	1518500249(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	roll	$11,%ecx
-	addl	%ebp,%ecx
-
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	subl	%ecx,%edx
-	andl	%ecx,%eax
-	andl	%edi,%edx
-	orl	%edx,%eax
-	movl	60(%esp),%edx
-	roll	$10,%esi
-	leal	1518500249(%ebp,%eax,1),%ebp
-	movl	$-1,%eax
-	roll	$9,%ebp
-	addl	%ebx,%ebp
-
-	addl	%edx,%ebx
-	movl	%ecx,%edx
-	subl	%ebp,%eax
-	andl	%ebp,%edx
-	andl	%esi,%eax
-	orl	%eax,%edx
-	movl	12(%esp),%eax
-	roll	$10,%ecx
-	leal	1518500249(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	roll	$7,%ebx
-	addl	%edi,%ebx
-
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	subl	%ebx,%edx
-	andl	%ebx,%eax
-	andl	%ecx,%edx
-	orl	%edx,%eax
-	movl	48(%esp),%edx
-	roll	$10,%ebp
-	leal	1518500249(%edi,%eax,1),%edi
-	movl	$-1,%eax
-	roll	$15,%edi
-	addl	%esi,%edi
-
-	addl	%edx,%esi
-	movl	%ebx,%edx
-	subl	%edi,%eax
-	andl	%edi,%edx
-	andl	%ebp,%eax
-	orl	%eax,%edx
-	movl	(%esp),%eax
-	roll	$10,%ebx
-	leal	1518500249(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	roll	$7,%esi
-	addl	%ecx,%esi
-
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	subl	%esi,%edx
-	andl	%esi,%eax
-	andl	%ebx,%edx
-	orl	%edx,%eax
-	movl	36(%esp),%edx
-	roll	$10,%edi
-	leal	1518500249(%ecx,%eax,1),%ecx
-	movl	$-1,%eax
-	roll	$12,%ecx
-	addl	%ebp,%ecx
-
-	addl	%edx,%ebp
-	movl	%esi,%edx
-	subl	%ecx,%eax
-	andl	%ecx,%edx
-	andl	%edi,%eax
-	orl	%eax,%edx
-	movl	20(%esp),%eax
-	roll	$10,%esi
-	leal	1518500249(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	roll	$15,%ebp
-	addl	%ebx,%ebp
-
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	subl	%ebp,%edx
-	andl	%ebp,%eax
-	andl	%esi,%edx
-	orl	%edx,%eax
-	movl	8(%esp),%edx
-	roll	$10,%ecx
-	leal	1518500249(%ebx,%eax,1),%ebx
-	movl	$-1,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	addl	%edx,%edi
-	movl	%ebp,%edx
-	subl	%ebx,%eax
-	andl	%ebx,%edx
-	andl	%ecx,%eax
-	orl	%eax,%edx
-	movl	56(%esp),%eax
-	roll	$10,%ebp
-	leal	1518500249(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	subl	%edi,%edx
-	andl	%edi,%eax
-	andl	%ebp,%edx
-	orl	%edx,%eax
-	movl	44(%esp),%edx
-	roll	$10,%ebx
-	leal	1518500249(%esi,%eax,1),%esi
-	movl	$-1,%eax
-	roll	$7,%esi
-	addl	%ecx,%esi
-
-	addl	%edx,%ecx
-	movl	%edi,%edx
-	subl	%esi,%eax
-	andl	%esi,%edx
-	andl	%ebx,%eax
-	orl	%eax,%edx
-	movl	32(%esp),%eax
-	roll	$10,%edi
-	leal	1518500249(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	roll	$13,%ecx
-	addl	%ebp,%ecx
-
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	subl	%ecx,%edx
-	andl	%ecx,%eax
-	andl	%edi,%edx
-	orl	%edx,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	1518500249(%ebp,%eax,1),%ebp
-	subl	%ecx,%edx
-	roll	$12,%ebp
-	addl	%ebx,%ebp
-
-	movl	12(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%ebx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1859775393(%ebx,%edx,1),%ebx
-	subl	%ebp,%eax
-	roll	$11,%ebx
-	addl	%edi,%ebx
-
-	movl	40(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%edi
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1859775393(%edi,%eax,1),%edi
-	subl	%ebx,%edx
-	roll	$13,%edi
-	addl	%esi,%edi
-
-	movl	56(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%esi
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1859775393(%esi,%edx,1),%esi
-	subl	%edi,%eax
-	roll	$6,%esi
-	addl	%ecx,%esi
-
-	movl	16(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ecx
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1859775393(%ecx,%eax,1),%ecx
-	subl	%esi,%edx
-	roll	$7,%ecx
-	addl	%ebp,%ecx
-
-	movl	36(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebp
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	1859775393(%ebp,%edx,1),%ebp
-	subl	%ecx,%eax
-	roll	$14,%ebp
-	addl	%ebx,%ebp
-
-	movl	60(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%ebx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	1859775393(%ebx,%eax,1),%ebx
-	subl	%ebp,%edx
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	movl	32(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%edi
-	xorl	%ecx,%edx
-	movl	$-1,%eax
-	roll	$10,%ebp
-	leal	1859775393(%edi,%edx,1),%edi
-	subl	%ebx,%eax
-	roll	$13,%edi
-	addl	%esi,%edi
-
-	movl	4(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%esi
-	xorl	%ebp,%eax
-	movl	$-1,%edx
-	roll	$10,%ebx
-	leal	1859775393(%esi,%eax,1),%esi
-	subl	%edi,%edx
-	roll	$15,%esi
-	addl	%ecx,%esi
-
-	movl	8(%esp),%eax
-	orl	%esi,%edx
-	addl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	$-1,%eax
-	roll	$10,%edi
-	leal	1859775393(%ecx,%edx,1),%ecx
-	subl	%esi,%eax
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	movl	28(%esp),%edx
-	orl	%ecx,%eax
-	addl	%edx,%ebp
-	xorl	%edi,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	1859775393(%ebp,%eax,1),%ebp
-	subl	%ecx,%edx
-	roll	$8,%ebp
-	addl	%ebx,%ebp
-
-	movl	(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%ebx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1859775393(%ebx,%edx,1),%ebx
-	subl	%ebp,%eax
-	roll	$13,%ebx
-	addl	%edi,%ebx
-
-	movl	24(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%edi
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1859775393(%edi,%eax,1),%edi
-	subl	%ebx,%edx
-	roll	$6,%edi
-	addl	%esi,%edi
-
-	movl	52(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%esi
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1859775393(%esi,%edx,1),%esi
-	subl	%edi,%eax
-	roll	$5,%esi
-	addl	%ecx,%esi
-
-	movl	44(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ecx
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1859775393(%ecx,%eax,1),%ecx
-	subl	%esi,%edx
-	roll	$12,%ecx
-	addl	%ebp,%ecx
-
-	movl	20(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebp
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	1859775393(%ebp,%edx,1),%ebp
-	subl	%ecx,%eax
-	roll	$7,%ebp
-	addl	%ebx,%ebp
-
-	movl	48(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%ebx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	1859775393(%ebx,%eax,1),%ebx
-	movl	%ecx,%eax
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	4(%esp),%eax
-	roll	$10,%ebp
-	leal	2400959708(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	36(%esp),%eax
-	roll	$10,%ebx
-	leal	2400959708(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$12,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	44(%esp),%eax
-	roll	$10,%edi
-	leal	2400959708(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	40(%esp),%eax
-	roll	$10,%esi
-	leal	2400959708(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$15,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	(%esp),%eax
-	roll	$10,%ecx
-	leal	2400959708(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$14,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	32(%esp),%eax
-	roll	$10,%ebp
-	leal	2400959708(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$15,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	48(%esp),%eax
-	roll	$10,%ebx
-	leal	2400959708(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$9,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	16(%esp),%eax
-	roll	$10,%edi
-	leal	2400959708(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	52(%esp),%eax
-	roll	$10,%esi
-	leal	2400959708(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$9,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	12(%esp),%eax
-	roll	$10,%ecx
-	leal	2400959708(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$14,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	28(%esp),%eax
-	roll	$10,%ebp
-	leal	2400959708(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$5,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	60(%esp),%eax
-	roll	$10,%ebx
-	leal	2400959708(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$6,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	56(%esp),%eax
-	roll	$10,%edi
-	leal	2400959708(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	20(%esp),%eax
-	roll	$10,%esi
-	leal	2400959708(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$6,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	24(%esp),%eax
-	roll	$10,%ecx
-	leal	2400959708(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	8(%esp),%eax
-	roll	$10,%ebp
-	leal	2400959708(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	subl	%ebp,%edx
-	roll	$12,%edi
-	addl	%esi,%edi
-
-	movl	16(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%esi
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	2840853838(%esi,%edx,1),%esi
-	subl	%ebx,%eax
-	roll	$9,%esi
-	addl	%ecx,%esi
-
-	movl	(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%ecx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	2840853838(%ecx,%eax,1),%ecx
-	subl	%edi,%edx
-	roll	$15,%ecx
-	addl	%ebp,%ecx
-
-	movl	20(%esp),%eax
-	orl	%esi,%edx
-	addl	%eax,%ebp
-	xorl	%ecx,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	2840853838(%ebp,%edx,1),%ebp
-	subl	%esi,%eax
-	roll	$5,%ebp
-	addl	%ebx,%ebp
-
-	movl	36(%esp),%edx
-	orl	%ecx,%eax
-	addl	%edx,%ebx
-	xorl	%ebp,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	2840853838(%ebx,%eax,1),%ebx
-	subl	%ecx,%edx
-	roll	$11,%ebx
-	addl	%edi,%ebx
-
-	movl	28(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%edi
-	xorl	%ebx,%edx
-	movl	$-1,%eax
-	roll	$10,%ebp
-	leal	2840853838(%edi,%edx,1),%edi
-	subl	%ebp,%eax
-	roll	$6,%edi
-	addl	%esi,%edi
-
-	movl	48(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%esi
-	xorl	%edi,%eax
-	movl	$-1,%edx
-	roll	$10,%ebx
-	leal	2840853838(%esi,%eax,1),%esi
-	subl	%ebx,%edx
-	roll	$8,%esi
-	addl	%ecx,%esi
-
-	movl	8(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%ecx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%edi
-	leal	2840853838(%ecx,%edx,1),%ecx
-	subl	%edi,%eax
-	roll	$13,%ecx
-	addl	%ebp,%ecx
-
-	movl	40(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ebp
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	2840853838(%ebp,%eax,1),%ebp
-	subl	%esi,%edx
-	roll	$12,%ebp
-	addl	%ebx,%ebp
-
-	movl	56(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebx
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	2840853838(%ebx,%edx,1),%ebx
-	subl	%ecx,%eax
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	movl	4(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%edi
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	2840853838(%edi,%eax,1),%edi
-	subl	%ebp,%edx
-	roll	$12,%edi
-	addl	%esi,%edi
-
-	movl	12(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%esi
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	2840853838(%esi,%edx,1),%esi
-	subl	%ebx,%eax
-	roll	$13,%esi
-	addl	%ecx,%esi
-
-	movl	32(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%ecx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	2840853838(%ecx,%eax,1),%ecx
-	subl	%edi,%edx
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	movl	44(%esp),%eax
-	orl	%esi,%edx
-	addl	%eax,%ebp
-	xorl	%ecx,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	2840853838(%ebp,%edx,1),%ebp
-	subl	%esi,%eax
-	roll	$11,%ebp
-	addl	%ebx,%ebp
-
-	movl	24(%esp),%edx
-	orl	%ecx,%eax
-	addl	%edx,%ebx
-	xorl	%ebp,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	2840853838(%ebx,%eax,1),%ebx
-	subl	%ecx,%edx
-	roll	$8,%ebx
-	addl	%edi,%ebx
-
-	movl	60(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%edi
-	xorl	%ebx,%edx
-	movl	$-1,%eax
-	roll	$10,%ebp
-	leal	2840853838(%edi,%edx,1),%edi
-	subl	%ebp,%eax
-	roll	$5,%edi
-	addl	%esi,%edi
-
-	movl	52(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%esi
-	xorl	%edi,%eax
-	movl	128(%esp),%edx
-	roll	$10,%ebx
-	leal	2840853838(%esi,%eax,1),%esi
-	movl	%ecx,64(%esp)
-	roll	$6,%esi
-	addl	%ecx,%esi
-	movl	(%edx),%ecx
-	movl	%esi,68(%esp)
-	movl	%edi,72(%esp)
-	movl	4(%edx),%esi
-	movl	%ebx,76(%esp)
-	movl	8(%edx),%edi
-	movl	%ebp,80(%esp)
-	movl	12(%edx),%ebx
-	movl	16(%edx),%ebp
-
-	movl	$-1,%edx
-	subl	%ebx,%edx
-	movl	20(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%ecx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%edi
-	leal	1352829926(%ecx,%edx,1),%ecx
-	subl	%edi,%eax
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	movl	56(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ebp
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	1352829926(%ebp,%eax,1),%ebp
-	subl	%esi,%edx
-	roll	$9,%ebp
-	addl	%ebx,%ebp
-
-	movl	28(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebx
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1352829926(%ebx,%edx,1),%ebx
-	subl	%ecx,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	movl	(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%edi
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1352829926(%edi,%eax,1),%edi
-	subl	%ebp,%edx
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	movl	36(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%esi
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1352829926(%esi,%edx,1),%esi
-	subl	%ebx,%eax
-	roll	$13,%esi
-	addl	%ecx,%esi
-
-	movl	8(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%ecx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1352829926(%ecx,%eax,1),%ecx
-	subl	%edi,%edx
-	roll	$15,%ecx
-	addl	%ebp,%ecx
-
-	movl	44(%esp),%eax
-	orl	%esi,%edx
-	addl	%eax,%ebp
-	xorl	%ecx,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	1352829926(%ebp,%edx,1),%ebp
-	subl	%esi,%eax
-	roll	$15,%ebp
-	addl	%ebx,%ebp
-
-	movl	16(%esp),%edx
-	orl	%ecx,%eax
-	addl	%edx,%ebx
-	xorl	%ebp,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	1352829926(%ebx,%eax,1),%ebx
-	subl	%ecx,%edx
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	movl	52(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%edi
-	xorl	%ebx,%edx
-	movl	$-1,%eax
-	roll	$10,%ebp
-	leal	1352829926(%edi,%edx,1),%edi
-	subl	%ebp,%eax
-	roll	$7,%edi
-	addl	%esi,%edi
-
-	movl	24(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%esi
-	xorl	%edi,%eax
-	movl	$-1,%edx
-	roll	$10,%ebx
-	leal	1352829926(%esi,%eax,1),%esi
-	subl	%ebx,%edx
-	roll	$7,%esi
-	addl	%ecx,%esi
-
-	movl	60(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%ecx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%edi
-	leal	1352829926(%ecx,%edx,1),%ecx
-	subl	%edi,%eax
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	movl	32(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ebp
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	1352829926(%ebp,%eax,1),%ebp
-	subl	%esi,%edx
-	roll	$11,%ebp
-	addl	%ebx,%ebp
-
-	movl	4(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebx
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1352829926(%ebx,%edx,1),%ebx
-	subl	%ecx,%eax
-	roll	$14,%ebx
-	addl	%edi,%ebx
-
-	movl	40(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%edi
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1352829926(%edi,%eax,1),%edi
-	subl	%ebp,%edx
-	roll	$14,%edi
-	addl	%esi,%edi
-
-	movl	12(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%esi
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1352829926(%esi,%edx,1),%esi
-	subl	%ebx,%eax
-	roll	$12,%esi
-	addl	%ecx,%esi
-
-	movl	48(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%ecx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1352829926(%ecx,%eax,1),%ecx
-	movl	%edi,%eax
-	roll	$6,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	24(%esp),%eax
-	roll	$10,%esi
-	leal	1548603684(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$9,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	44(%esp),%eax
-	roll	$10,%ecx
-	leal	1548603684(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$13,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	12(%esp),%eax
-	roll	$10,%ebp
-	leal	1548603684(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$15,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	28(%esp),%eax
-	roll	$10,%ebx
-	leal	1548603684(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$7,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	(%esp),%eax
-	roll	$10,%edi
-	leal	1548603684(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$12,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	52(%esp),%eax
-	roll	$10,%esi
-	leal	1548603684(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$8,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	20(%esp),%eax
-	roll	$10,%ecx
-	leal	1548603684(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	40(%esp),%eax
-	roll	$10,%ebp
-	leal	1548603684(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	56(%esp),%eax
-	roll	$10,%ebx
-	leal	1548603684(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$7,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	60(%esp),%eax
-	roll	$10,%edi
-	leal	1548603684(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$7,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	32(%esp),%eax
-	roll	$10,%esi
-	leal	1548603684(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	roll	$12,%ebp
-	addl	%ebx,%ebp
-
-	subl	%esi,%edx
-	andl	%ebp,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	48(%esp),%eax
-	roll	$10,%ecx
-	leal	1548603684(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	roll	$7,%ebx
-	addl	%edi,%ebx
-
-	subl	%ecx,%edx
-	andl	%ebx,%eax
-	andl	%ebp,%edx
-	orl	%eax,%edx
-	movl	16(%esp),%eax
-	roll	$10,%ebp
-	leal	1548603684(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	roll	$6,%edi
-	addl	%esi,%edi
-
-	subl	%ebp,%edx
-	andl	%edi,%eax
-	andl	%ebx,%edx
-	orl	%eax,%edx
-	movl	36(%esp),%eax
-	roll	$10,%ebx
-	leal	1548603684(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	roll	$15,%esi
-	addl	%ecx,%esi
-
-	subl	%ebx,%edx
-	andl	%esi,%eax
-	andl	%edi,%edx
-	orl	%eax,%edx
-	movl	4(%esp),%eax
-	roll	$10,%edi
-	leal	1548603684(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	roll	$13,%ecx
-	addl	%ebp,%ecx
-
-	subl	%edi,%edx
-	andl	%ecx,%eax
-	andl	%esi,%edx
-	orl	%eax,%edx
-	movl	8(%esp),%eax
-	roll	$10,%esi
-	leal	1548603684(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	addl	%eax,%ebp
-	subl	%ecx,%edx
-	roll	$11,%ebp
-	addl	%ebx,%ebp
-
-	movl	60(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%ebx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1836072691(%ebx,%edx,1),%ebx
-	subl	%ebp,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	movl	20(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%edi
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1836072691(%edi,%eax,1),%edi
-	subl	%ebx,%edx
-	roll	$7,%edi
-	addl	%esi,%edi
-
-	movl	4(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%esi
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1836072691(%esi,%edx,1),%esi
-	subl	%edi,%eax
-	roll	$15,%esi
-	addl	%ecx,%esi
-
-	movl	12(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ecx
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1836072691(%ecx,%eax,1),%ecx
-	subl	%esi,%edx
-	roll	$11,%ecx
-	addl	%ebp,%ecx
-
-	movl	28(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebp
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	1836072691(%ebp,%edx,1),%ebp
-	subl	%ecx,%eax
-	roll	$8,%ebp
-	addl	%ebx,%ebp
-
-	movl	56(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%ebx
-	xorl	%esi,%eax
-	movl	$-1,%edx
-	roll	$10,%ecx
-	leal	1836072691(%ebx,%eax,1),%ebx
-	subl	%ebp,%edx
-	roll	$6,%ebx
-	addl	%edi,%ebx
-
-	movl	24(%esp),%eax
-	orl	%ebx,%edx
-	addl	%eax,%edi
-	xorl	%ecx,%edx
-	movl	$-1,%eax
-	roll	$10,%ebp
-	leal	1836072691(%edi,%edx,1),%edi
-	subl	%ebx,%eax
-	roll	$6,%edi
-	addl	%esi,%edi
-
-	movl	36(%esp),%edx
-	orl	%edi,%eax
-	addl	%edx,%esi
-	xorl	%ebp,%eax
-	movl	$-1,%edx
-	roll	$10,%ebx
-	leal	1836072691(%esi,%eax,1),%esi
-	subl	%edi,%edx
-	roll	$14,%esi
-	addl	%ecx,%esi
-
-	movl	44(%esp),%eax
-	orl	%esi,%edx
-	addl	%eax,%ecx
-	xorl	%ebx,%edx
-	movl	$-1,%eax
-	roll	$10,%edi
-	leal	1836072691(%ecx,%edx,1),%ecx
-	subl	%esi,%eax
-	roll	$12,%ecx
-	addl	%ebp,%ecx
-
-	movl	32(%esp),%edx
-	orl	%ecx,%eax
-	addl	%edx,%ebp
-	xorl	%edi,%eax
-	movl	$-1,%edx
-	roll	$10,%esi
-	leal	1836072691(%ebp,%eax,1),%ebp
-	subl	%ecx,%edx
-	roll	$13,%ebp
-	addl	%ebx,%ebp
-
-	movl	48(%esp),%eax
-	orl	%ebp,%edx
-	addl	%eax,%ebx
-	xorl	%esi,%edx
-	movl	$-1,%eax
-	roll	$10,%ecx
-	leal	1836072691(%ebx,%edx,1),%ebx
-	subl	%ebp,%eax
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	movl	8(%esp),%edx
-	orl	%ebx,%eax
-	addl	%edx,%edi
-	xorl	%ecx,%eax
-	movl	$-1,%edx
-	roll	$10,%ebp
-	leal	1836072691(%edi,%eax,1),%edi
-	subl	%ebx,%edx
-	roll	$14,%edi
-	addl	%esi,%edi
-
-	movl	40(%esp),%eax
-	orl	%edi,%edx
-	addl	%eax,%esi
-	xorl	%ebp,%edx
-	movl	$-1,%eax
-	roll	$10,%ebx
-	leal	1836072691(%esi,%edx,1),%esi
-	subl	%edi,%eax
-	roll	$13,%esi
-	addl	%ecx,%esi
-
-	movl	(%esp),%edx
-	orl	%esi,%eax
-	addl	%edx,%ecx
-	xorl	%ebx,%eax
-	movl	$-1,%edx
-	roll	$10,%edi
-	leal	1836072691(%ecx,%eax,1),%ecx
-	subl	%esi,%edx
-	roll	$13,%ecx
-	addl	%ebp,%ecx
-
-	movl	16(%esp),%eax
-	orl	%ecx,%edx
-	addl	%eax,%ebp
-	xorl	%edi,%edx
-	movl	$-1,%eax
-	roll	$10,%esi
-	leal	1836072691(%ebp,%edx,1),%ebp
-	subl	%ecx,%eax
-	roll	$7,%ebp
-	addl	%ebx,%ebp
-
-	movl	52(%esp),%edx
-	orl	%ebp,%eax
-	addl	%edx,%ebx
-	xorl	%esi,%eax
-	movl	32(%esp),%edx
-	roll	$10,%ecx
-	leal	1836072691(%ebx,%eax,1),%ebx
-	movl	$-1,%eax
-	roll	$5,%ebx
-	addl	%edi,%ebx
-
-	addl	%edx,%edi
-	movl	%ebp,%edx
-	subl	%ebx,%eax
-	andl	%ebx,%edx
-	andl	%ecx,%eax
-	orl	%eax,%edx
-	movl	24(%esp),%eax
-	roll	$10,%ebp
-	leal	2053994217(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	roll	$15,%edi
-	addl	%esi,%edi
-
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	subl	%edi,%edx
-	andl	%edi,%eax
-	andl	%ebp,%edx
-	orl	%edx,%eax
-	movl	16(%esp),%edx
-	roll	$10,%ebx
-	leal	2053994217(%esi,%eax,1),%esi
-	movl	$-1,%eax
-	roll	$5,%esi
-	addl	%ecx,%esi
-
-	addl	%edx,%ecx
-	movl	%edi,%edx
-	subl	%esi,%eax
-	andl	%esi,%edx
-	andl	%ebx,%eax
-	orl	%eax,%edx
-	movl	4(%esp),%eax
-	roll	$10,%edi
-	leal	2053994217(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	roll	$8,%ecx
-	addl	%ebp,%ecx
-
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	subl	%ecx,%edx
-	andl	%ecx,%eax
-	andl	%edi,%edx
-	orl	%edx,%eax
-	movl	12(%esp),%edx
-	roll	$10,%esi
-	leal	2053994217(%ebp,%eax,1),%ebp
-	movl	$-1,%eax
-	roll	$11,%ebp
-	addl	%ebx,%ebp
-
-	addl	%edx,%ebx
-	movl	%ecx,%edx
-	subl	%ebp,%eax
-	andl	%ebp,%edx
-	andl	%esi,%eax
-	orl	%eax,%edx
-	movl	44(%esp),%eax
-	roll	$10,%ecx
-	leal	2053994217(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	roll	$14,%ebx
-	addl	%edi,%ebx
-
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	subl	%ebx,%edx
-	andl	%ebx,%eax
-	andl	%ecx,%edx
-	orl	%edx,%eax
-	movl	60(%esp),%edx
-	roll	$10,%ebp
-	leal	2053994217(%edi,%eax,1),%edi
-	movl	$-1,%eax
-	roll	$14,%edi
-	addl	%esi,%edi
-
-	addl	%edx,%esi
-	movl	%ebx,%edx
-	subl	%edi,%eax
-	andl	%edi,%edx
-	andl	%ebp,%eax
-	orl	%eax,%edx
-	movl	(%esp),%eax
-	roll	$10,%ebx
-	leal	2053994217(%esi,%edx,1),%esi
-	movl	$-1,%edx
-	roll	$6,%esi
-	addl	%ecx,%esi
-
-	addl	%eax,%ecx
-	movl	%edi,%eax
-	subl	%esi,%edx
-	andl	%esi,%eax
-	andl	%ebx,%edx
-	orl	%edx,%eax
-	movl	20(%esp),%edx
-	roll	$10,%edi
-	leal	2053994217(%ecx,%eax,1),%ecx
-	movl	$-1,%eax
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	addl	%edx,%ebp
-	movl	%esi,%edx
-	subl	%ecx,%eax
-	andl	%ecx,%edx
-	andl	%edi,%eax
-	orl	%eax,%edx
-	movl	48(%esp),%eax
-	roll	$10,%esi
-	leal	2053994217(%ebp,%edx,1),%ebp
-	movl	$-1,%edx
-	roll	$6,%ebp
-	addl	%ebx,%ebp
-
-	addl	%eax,%ebx
-	movl	%ecx,%eax
-	subl	%ebp,%edx
-	andl	%ebp,%eax
-	andl	%esi,%edx
-	orl	%edx,%eax
-	movl	8(%esp),%edx
-	roll	$10,%ecx
-	leal	2053994217(%ebx,%eax,1),%ebx
-	movl	$-1,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	addl	%edx,%edi
-	movl	%ebp,%edx
-	subl	%ebx,%eax
-	andl	%ebx,%edx
-	andl	%ecx,%eax
-	orl	%eax,%edx
-	movl	52(%esp),%eax
-	roll	$10,%ebp
-	leal	2053994217(%edi,%edx,1),%edi
-	movl	$-1,%edx
-	roll	$12,%edi
-	addl	%esi,%edi
-
-	addl	%eax,%esi
-	movl	%ebx,%eax
-	subl	%edi,%edx
-	andl	%edi,%eax
-	andl	%ebp,%edx
-	orl	%edx,%eax
-	movl	36(%esp),%edx
-	roll	$10,%ebx
-	leal	2053994217(%esi,%eax,1),%esi
-	movl	$-1,%eax
-	roll	$9,%esi
-	addl	%ecx,%esi
-
-	addl	%edx,%ecx
-	movl	%edi,%edx
-	subl	%esi,%eax
-	andl	%esi,%edx
-	andl	%ebx,%eax
-	orl	%eax,%edx
-	movl	28(%esp),%eax
-	roll	$10,%edi
-	leal	2053994217(%ecx,%edx,1),%ecx
-	movl	$-1,%edx
-	roll	$12,%ecx
-	addl	%ebp,%ecx
-
-	addl	%eax,%ebp
-	movl	%esi,%eax
-	subl	%ecx,%edx
-	andl	%ecx,%eax
-	andl	%edi,%edx
-	orl	%edx,%eax
-	movl	40(%esp),%edx
-	roll	$10,%esi
-	leal	2053994217(%ebp,%eax,1),%ebp
-	movl	$-1,%eax
-	roll	$5,%ebp
-	addl	%ebx,%ebp
-
-	addl	%edx,%ebx
-	movl	%ecx,%edx
-	subl	%ebp,%eax
-	andl	%ebp,%edx
-	andl	%esi,%eax
-	orl	%eax,%edx
-	movl	56(%esp),%eax
-	roll	$10,%ecx
-	leal	2053994217(%ebx,%edx,1),%ebx
-	movl	$-1,%edx
-	roll	$15,%ebx
-	addl	%edi,%ebx
-
-	addl	%eax,%edi
-	movl	%ebp,%eax
-	subl	%ebx,%edx
-	andl	%ebx,%eax
-	andl	%ecx,%edx
-	orl	%eax,%edx
-	movl	%ebx,%eax
-	roll	$10,%ebp
-	leal	2053994217(%edi,%edx,1),%edi
-	xorl	%ebp,%eax
-	roll	$8,%edi
-	addl	%esi,%edi
-
-	movl	48(%esp),%edx
-	xorl	%edi,%eax
-	addl	%edx,%esi
-	roll	$10,%ebx
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$8,%esi
-	addl	%ecx,%esi
-
-	xorl	%ebx,%eax
-	movl	60(%esp),%edx
-	xorl	%esi,%eax
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$10,%edi
-	addl	%edx,%ecx
-	xorl	%edi,%eax
-	roll	$5,%ecx
-	addl	%ebp,%ecx
-
-	movl	40(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%edx,%ebp
-	roll	$10,%esi
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$12,%ebp
-	addl	%ebx,%ebp
-
-	xorl	%esi,%eax
-	movl	16(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$10,%ecx
-	addl	%edx,%ebx
-	xorl	%ecx,%eax
-	roll	$9,%ebx
-	addl	%edi,%ebx
-
-	movl	4(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%edx,%edi
-	roll	$10,%ebp
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$12,%edi
-	addl	%esi,%edi
-
-	xorl	%ebp,%eax
-	movl	20(%esp),%edx
-	xorl	%edi,%eax
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$10,%ebx
-	addl	%edx,%esi
-	xorl	%ebx,%eax
-	roll	$5,%esi
-	addl	%ecx,%esi
-
-	movl	32(%esp),%edx
-	xorl	%esi,%eax
-	addl	%edx,%ecx
-	roll	$10,%edi
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$14,%ecx
-	addl	%ebp,%ecx
-
-	xorl	%edi,%eax
-	movl	28(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$10,%esi
-	addl	%edx,%ebp
-	xorl	%esi,%eax
-	roll	$6,%ebp
-	addl	%ebx,%ebp
-
-	movl	24(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%edx,%ebx
-	roll	$10,%ecx
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$8,%ebx
-	addl	%edi,%ebx
-
-	xorl	%ecx,%eax
-	movl	8(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$10,%ebp
-	addl	%edx,%edi
-	xorl	%ebp,%eax
-	roll	$13,%edi
-	addl	%esi,%edi
-
-	movl	52(%esp),%edx
-	xorl	%edi,%eax
-	addl	%edx,%esi
-	roll	$10,%ebx
-	addl	%eax,%esi
-	movl	%edi,%eax
-	roll	$6,%esi
-	addl	%ecx,%esi
-
-	xorl	%ebx,%eax
-	movl	56(%esp),%edx
-	xorl	%esi,%eax
-	addl	%eax,%ecx
-	movl	%esi,%eax
-	roll	$10,%edi
-	addl	%edx,%ecx
-	xorl	%edi,%eax
-	roll	$5,%ecx
-	addl	%ebp,%ecx
-
-	movl	(%esp),%edx
-	xorl	%ecx,%eax
-	addl	%edx,%ebp
-	roll	$10,%esi
-	addl	%eax,%ebp
-	movl	%ecx,%eax
-	roll	$15,%ebp
-	addl	%ebx,%ebp
-
-	xorl	%esi,%eax
-	movl	12(%esp),%edx
-	xorl	%ebp,%eax
-	addl	%eax,%ebx
-	movl	%ebp,%eax
-	roll	$10,%ecx
-	addl	%edx,%ebx
-	xorl	%ecx,%eax
-	roll	$13,%ebx
-	addl	%edi,%ebx
-
-	movl	36(%esp),%edx
-	xorl	%ebx,%eax
-	addl	%edx,%edi
-	roll	$10,%ebp
-	addl	%eax,%edi
-	movl	%ebx,%eax
-	roll	$11,%edi
-	addl	%esi,%edi
-
-	xorl	%ebp,%eax
-	movl	44(%esp),%edx
-	xorl	%edi,%eax
-	addl	%eax,%esi
-	roll	$10,%ebx
-	addl	%edx,%esi
-	movl	128(%esp),%edx
-	roll	$11,%esi
-	addl	%ecx,%esi
-	movl	4(%edx),%eax
-	addl	%eax,%ebx
-	movl	72(%esp),%eax
-	addl	%eax,%ebx
-	movl	8(%edx),%eax
-	addl	%eax,%ebp
-	movl	76(%esp),%eax
-	addl	%eax,%ebp
-	movl	12(%edx),%eax
-	addl	%eax,%ecx
-	movl	80(%esp),%eax
-	addl	%eax,%ecx
-	movl	16(%edx),%eax
-	addl	%eax,%esi
-	movl	64(%esp),%eax
-	addl	%eax,%esi
-	movl	(%edx),%eax
-	addl	%eax,%edi
-	movl	68(%esp),%eax
-	addl	%eax,%edi
-	movl	136(%esp),%eax
-	movl	%ebx,(%edx)
-	movl	%ebp,4(%edx)
-	movl	%ecx,8(%edx)
-	subl	$1,%eax
-	movl	%esi,12(%edx)
-	movl	%edi,16(%edx)
-	jle	.L001get_out
-	movl	%eax,136(%esp)
-	movl	%ecx,%edi
-	movl	132(%esp),%eax
-	movl	%ebx,%ecx
-	addl	$64,%eax
-	movl	%ebp,%esi
-	movl	%eax,132(%esp)
-	jmp	.L000start
-.L001get_out:
-	addl	$108,%esp
-	popl	%ebx
-	popl	%ebp
-	popl	%edi
-	popl	%esi
-	ret
-.size	ripemd160_block_asm_data_order,.-.L_ripemd160_block_asm_data_order_begin

Added: trunk/secure/lib/libcrypto/i386/sha1-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha1-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/sha1-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,7667 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha1-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from sha1-586.pl.
+#ifdef PIC
+.file	"sha1-586.S"
+.text
+.globl	sha1_block_data_order
+.type	sha1_block_data_order, at function
+.align	16
+sha1_block_data_order:
+.L_sha1_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L000pic_point](%ebp),%esi
+	movl	OPENSSL_ia32cap_P at GOT(%esi),%esi
+	leal	.LK_XX_XX-.L000pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%edx
+	testl	$512,%edx
+	jz	.L001x86
+	testl	$16777216,%eax
+	jz	.L001x86
+	andl	$268435456,%edx
+	andl	$1073741824,%eax
+	orl	%edx,%eax
+	cmpl	$1342177280,%eax
+	je	.Lavx_shortcut
+	jmp	.Lssse3_shortcut
+.align	16
+.L001x86:
+	movl	20(%esp),%ebp
+	movl	24(%esp),%esi
+	movl	28(%esp),%eax
+	subl	$76,%esp
+	shll	$6,%eax
+	addl	%esi,%eax
+	movl	%eax,104(%esp)
+	movl	16(%ebp),%edi
+	jmp	.L002loop
+.align	16
+.L002loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%edx,28(%esp)
+	movl	32(%esi),%eax
+	movl	36(%esi),%ebx
+	movl	40(%esi),%ecx
+	movl	44(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edx,44(%esp)
+	movl	48(%esi),%eax
+	movl	52(%esi),%ebx
+	movl	56(%esi),%ecx
+	movl	60(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,48(%esp)
+	movl	%ebx,52(%esp)
+	movl	%ecx,56(%esp)
+	movl	%edx,60(%esp)
+	movl	%esi,100(%esp)
+	movl	(%ebp),%eax
+	movl	4(%ebp),%ebx
+	movl	8(%ebp),%ecx
+	movl	12(%ebp),%edx
+
+	movl	%ecx,%esi
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	4(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	8(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	12(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	addl	%ecx,%ebp
+
+	movl	%edi,%ebx
+	movl	%ebp,%ecx
+	roll	$5,%ebp
+	xorl	%esi,%ebx
+	addl	%eax,%ebp
+	movl	16(%esp),%eax
+	andl	%edx,%ebx
+	rorl	$2,%edx
+	xorl	%esi,%ebx
+	leal	1518500249(%ebp,%eax,1),%ebp
+	addl	%ebx,%ebp
+
+	movl	%edx,%eax
+	movl	%ebp,%ebx
+	roll	$5,%ebp
+	xorl	%edi,%eax
+	addl	%esi,%ebp
+	movl	20(%esp),%esi
+	andl	%ecx,%eax
+	rorl	$2,%ecx
+	xorl	%edi,%eax
+	leal	1518500249(%ebp,%esi,1),%ebp
+	addl	%eax,%ebp
+
+	movl	%ecx,%esi
+	movl	%ebp,%eax
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	24(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	28(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	32(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	36(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	addl	%ecx,%ebp
+
+	movl	%edi,%ebx
+	movl	%ebp,%ecx
+	roll	$5,%ebp
+	xorl	%esi,%ebx
+	addl	%eax,%ebp
+	movl	40(%esp),%eax
+	andl	%edx,%ebx
+	rorl	$2,%edx
+	xorl	%esi,%ebx
+	leal	1518500249(%ebp,%eax,1),%ebp
+	addl	%ebx,%ebp
+
+	movl	%edx,%eax
+	movl	%ebp,%ebx
+	roll	$5,%ebp
+	xorl	%edi,%eax
+	addl	%esi,%ebp
+	movl	44(%esp),%esi
+	andl	%ecx,%eax
+	rorl	$2,%ecx
+	xorl	%edi,%eax
+	leal	1518500249(%ebp,%esi,1),%ebp
+	addl	%eax,%ebp
+
+	movl	%ecx,%esi
+	movl	%ebp,%eax
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	48(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	52(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	56(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	60(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	movl	(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	8(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	32(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	52(%esp),%ebx
+	roll	$1,%ebx
+	xorl	%esi,%ebp
+	addl	%ebp,%eax
+	movl	%ecx,%ebp
+	rorl	$2,%edx
+	movl	%ebx,(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	4(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	12(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	36(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	56(%esp),%eax
+	roll	$1,%eax
+	xorl	%edi,%ebp
+	addl	%ebp,%esi
+	movl	%ebx,%ebp
+	rorl	$2,%ecx
+	movl	%eax,4(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%eax,%esi,1),%eax
+	movl	8(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	40(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	60(%esp),%esi
+	roll	$1,%esi
+	xorl	%edx,%ebp
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	rorl	$2,%ebx
+	movl	%esi,8(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%esi,%edi,1),%esi
+	movl	12(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	44(%esp),%edi
+	andl	%eax,%ebp
+	xorl	(%esp),%edi
+	roll	$1,%edi
+	xorl	%ecx,%ebp
+	addl	%ebp,%edx
+	movl	%esi,%ebp
+	rorl	$2,%eax
+	movl	%edi,12(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	16(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	24(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,16(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	20(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,20(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	24(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,24(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	28(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	16(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,28(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	32(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	20(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,32(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	36(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,36(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	40(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	48(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,40(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	44(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,44(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	48(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,48(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	52(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	40(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,52(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	56(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	44(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,56(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	60(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,60(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	8(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	52(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	4(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	56(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,4(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	8(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	40(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	60(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,8(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	12(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	44(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,12(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	16(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	4(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,16(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	20(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	52(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	8(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,20(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	24(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	32(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	56(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	12(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,24(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	28(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	36(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	60(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	16(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,28(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	32(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	40(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	20(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,32(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	36(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	44(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	4(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	24(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,36(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	40(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	48(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	8(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	28(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,40(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	44(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	52(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	12(%esp),%edi
+	andl	%eax,%ebp
+	xorl	32(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,44(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	48(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	56(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	16(%esp),%edx
+	andl	%esi,%ebp
+	xorl	36(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,48(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	52(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	60(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	20(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	40(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,52(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	56(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	24(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	44(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,56(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	60(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	4(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	28(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	48(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,60(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	8(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	32(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	52(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	4(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	12(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	36(%esp),%edi
+	andl	%eax,%ebp
+	xorl	56(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,4(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	8(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	16(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	40(%esp),%edx
+	andl	%esi,%ebp
+	xorl	60(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,8(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	12(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	20(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	44(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,12(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	16(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	24(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	48(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	4(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,16(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	20(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	28(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	52(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	8(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,20(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	24(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	32(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	56(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	12(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,24(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	28(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	36(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	60(%esp),%edi
+	andl	%eax,%ebp
+	xorl	16(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,28(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	32(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	40(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	(%esp),%edx
+	andl	%esi,%ebp
+	xorl	20(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,32(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	36(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	44(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	4(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	24(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,36(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	40(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	48(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	8(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	28(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,40(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	44(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	52(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	12(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	32(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,44(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	48(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	56(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	36(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,48(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	52(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	60(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	40(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,52(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	56(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	24(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	44(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,56(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	60(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	4(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,60(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	8(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	4(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	12(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	56(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,4(%esp)
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	8(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	60(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,8(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	12(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,12(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	16(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	24(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,16(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	20(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,20(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	24(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,24(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	28(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	16(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,28(%esp)
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	32(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	20(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,32(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	36(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,36(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	40(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	48(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,40(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	44(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,44(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	48(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,48(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	52(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	40(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	56(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	44(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	60(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%edi,%edx,1),%edi
+	addl	%ebp,%edi
+	movl	96(%esp),%ebp
+	movl	100(%esp),%edx
+	addl	(%ebp),%edi
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%eax
+	addl	12(%ebp),%ebx
+	addl	16(%ebp),%ecx
+	movl	%edi,(%ebp)
+	addl	$64,%edx
+	movl	%esi,4(%ebp)
+	cmpl	104(%esp),%edx
+	movl	%eax,8(%ebp)
+	movl	%ecx,%edi
+	movl	%ebx,12(%ebp)
+	movl	%edx,%esi
+	movl	%ecx,16(%ebp)
+	jb	.L002loop
+	addl	$76,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	sha1_block_data_order,.-.L_sha1_block_data_order_begin
+.type	_sha1_block_data_order_ssse3, at function
+.align	16
+_sha1_block_data_order_ssse3:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L003pic_point
+.L003pic_point:
+	popl	%ebp
+	leal	.LK_XX_XX-.L003pic_point(%ebp),%ebp
+.Lssse3_shortcut:
+	movdqa	(%ebp),%xmm7
+	movdqa	16(%ebp),%xmm0
+	movdqa	32(%ebp),%xmm1
+	movdqa	48(%ebp),%xmm2
+	movdqa	64(%ebp),%xmm6
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%edx
+	movl	%esp,%esi
+	subl	$208,%esp
+	andl	$-64,%esp
+	movdqa	%xmm0,112(%esp)
+	movdqa	%xmm1,128(%esp)
+	movdqa	%xmm2,144(%esp)
+	shll	$6,%edx
+	movdqa	%xmm7,160(%esp)
+	addl	%ebp,%edx
+	movdqa	%xmm6,176(%esp)
+	addl	$64,%ebp
+	movl	%edi,192(%esp)
+	movl	%ebp,196(%esp)
+	movl	%edx,200(%esp)
+	movl	%esi,204(%esp)
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	16(%edi),%edi
+	movl	%ebx,%esi
+	movdqu	-64(%ebp),%xmm0
+	movdqu	-48(%ebp),%xmm1
+	movdqu	-32(%ebp),%xmm2
+	movdqu	-16(%ebp),%xmm3
+.byte	102,15,56,0,198
+.byte	102,15,56,0,206
+.byte	102,15,56,0,214
+	movdqa	%xmm7,96(%esp)
+.byte	102,15,56,0,222
+	paddd	%xmm7,%xmm0
+	paddd	%xmm7,%xmm1
+	paddd	%xmm7,%xmm2
+	movdqa	%xmm0,(%esp)
+	psubd	%xmm7,%xmm0
+	movdqa	%xmm1,16(%esp)
+	psubd	%xmm7,%xmm1
+	movdqa	%xmm2,32(%esp)
+	psubd	%xmm7,%xmm2
+	movdqa	%xmm1,%xmm4
+	jmp	.L004loop
+.align	16
+.L004loop:
+	addl	(%esp),%edi
+	xorl	%edx,%ecx
+.byte	102,15,58,15,224,8
+	movdqa	%xmm3,%xmm6
+	movl	%eax,%ebp
+	roll	$5,%eax
+	paddd	%xmm3,%xmm7
+	movdqa	%xmm0,64(%esp)
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	psrldq	$4,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	pxor	%xmm0,%xmm4
+	rorl	$2,%ebx
+	addl	%esi,%edi
+	pxor	%xmm2,%xmm6
+	addl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	movl	%edi,%esi
+	roll	$5,%edi
+	pxor	%xmm6,%xmm4
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	%xmm7,48(%esp)
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	movdqa	%xmm4,%xmm0
+	movdqa	%xmm4,%xmm6
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	pslldq	$12,%xmm0
+	paddd	%xmm4,%xmm4
+	movl	%edx,%ebp
+	roll	$5,%edx
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	psrld	$31,%xmm6
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	movdqa	%xmm0,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	psrld	$30,%xmm0
+	por	%xmm6,%xmm4
+	addl	12(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	pslld	$2,%xmm7
+	pxor	%xmm0,%xmm4
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	96(%esp),%xmm0
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	pxor	%xmm7,%xmm4
+	movdqa	%xmm2,%xmm5
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	16(%esp),%eax
+	xorl	%edi,%edx
+.byte	102,15,58,15,233,8
+	movdqa	%xmm4,%xmm7
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	paddd	%xmm4,%xmm0
+	movdqa	%xmm1,80(%esp)
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	psrldq	$4,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	pxor	%xmm1,%xmm5
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	pxor	%xmm3,%xmm7
+	addl	20(%esp),%edi
+	xorl	%edx,%ecx
+	movl	%eax,%esi
+	roll	$5,%eax
+	pxor	%xmm7,%xmm5
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	%xmm0,(%esp)
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	movdqa	%xmm5,%xmm1
+	movdqa	%xmm5,%xmm7
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	24(%esp),%edx
+	xorl	%ecx,%ebx
+	pslldq	$12,%xmm1
+	paddd	%xmm5,%xmm5
+	movl	%edi,%ebp
+	roll	$5,%edi
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	psrld	$31,%xmm7
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	movdqa	%xmm1,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	psrld	$30,%xmm1
+	por	%xmm7,%xmm5
+	addl	28(%esp),%ecx
+	xorl	%ebx,%eax
+	movl	%edx,%esi
+	roll	$5,%edx
+	pslld	$2,%xmm0
+	pxor	%xmm1,%xmm5
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	movdqa	112(%esp),%xmm1
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	pxor	%xmm0,%xmm5
+	movdqa	%xmm3,%xmm6
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	32(%esp),%ebx
+	xorl	%eax,%edi
+.byte	102,15,58,15,242,8
+	movdqa	%xmm5,%xmm0
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	paddd	%xmm5,%xmm1
+	movdqa	%xmm2,96(%esp)
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	psrldq	$4,%xmm0
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	pxor	%xmm2,%xmm6
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	pxor	%xmm4,%xmm0
+	addl	36(%esp),%eax
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	pxor	%xmm0,%xmm6
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	movdqa	%xmm1,16(%esp)
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	movdqa	%xmm6,%xmm2
+	movdqa	%xmm6,%xmm0
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	40(%esp),%edi
+	xorl	%edx,%ecx
+	pslldq	$12,%xmm2
+	paddd	%xmm6,%xmm6
+	movl	%eax,%ebp
+	roll	$5,%eax
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	psrld	$31,%xmm0
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm2,%xmm1
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	psrld	$30,%xmm2
+	por	%xmm0,%xmm6
+	addl	44(%esp),%edx
+	xorl	%ecx,%ebx
+	movdqa	64(%esp),%xmm0
+	movl	%edi,%esi
+	roll	$5,%edi
+	pslld	$2,%xmm1
+	pxor	%xmm2,%xmm6
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	112(%esp),%xmm2
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	pxor	%xmm1,%xmm6
+	movdqa	%xmm4,%xmm7
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	48(%esp),%ecx
+	xorl	%ebx,%eax
+.byte	102,15,58,15,251,8
+	movdqa	%xmm6,%xmm1
+	movl	%edx,%ebp
+	roll	$5,%edx
+	paddd	%xmm6,%xmm2
+	movdqa	%xmm3,64(%esp)
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	psrldq	$4,%xmm1
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	pxor	%xmm3,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	pxor	%xmm5,%xmm1
+	addl	52(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	pxor	%xmm1,%xmm7
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	%xmm2,32(%esp)
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	movdqa	%xmm7,%xmm3
+	movdqa	%xmm7,%xmm1
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	56(%esp),%eax
+	xorl	%edi,%edx
+	pslldq	$12,%xmm3
+	paddd	%xmm7,%xmm7
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	psrld	$31,%xmm1
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	movdqa	%xmm3,%xmm2
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	psrld	$30,%xmm3
+	por	%xmm1,%xmm7
+	addl	60(%esp),%edi
+	xorl	%edx,%ecx
+	movdqa	80(%esp),%xmm1
+	movl	%eax,%esi
+	roll	$5,%eax
+	pslld	$2,%xmm2
+	pxor	%xmm3,%xmm7
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	112(%esp),%xmm3
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	pxor	%xmm2,%xmm7
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	movdqa	%xmm7,%xmm2
+	addl	(%esp),%edx
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,214,8
+	xorl	%ecx,%ebx
+	movl	%edi,%ebp
+	roll	$5,%edi
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm4,80(%esp)
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	movdqa	%xmm3,%xmm4
+	paddd	%xmm7,%xmm3
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	pxor	%xmm2,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	movdqa	%xmm0,%xmm2
+	movdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	roll	$5,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	pslld	$2,%xmm0
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	psrld	$30,%xmm2
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	8(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	por	%xmm2,%xmm0
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	movdqa	96(%esp),%xmm2
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	12(%esp),%eax
+	movdqa	%xmm0,%xmm3
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	16(%esp),%edi
+	pxor	%xmm5,%xmm1
+.byte	102,15,58,15,223,8
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,96(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm4,%xmm5
+	paddd	%xmm0,%xmm4
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	pxor	%xmm3,%xmm1
+	addl	20(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	movdqa	%xmm1,%xmm3
+	movdqa	%xmm4,(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	pslld	$2,%xmm1
+	addl	24(%esp),%ecx
+	xorl	%ebx,%esi
+	psrld	$30,%xmm3
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	por	%xmm3,%xmm1
+	addl	28(%esp),%ebx
+	xorl	%eax,%ebp
+	movdqa	64(%esp),%xmm3
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	movdqa	%xmm1,%xmm4
+	addl	%ebp,%ebx
+	addl	32(%esp),%eax
+	pxor	%xmm6,%xmm2
+.byte	102,15,58,15,224,8
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	pxor	%xmm3,%xmm2
+	movdqa	%xmm6,64(%esp)
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	movdqa	128(%esp),%xmm6
+	paddd	%xmm1,%xmm5
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	pxor	%xmm4,%xmm2
+	addl	36(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	movdqa	%xmm2,%xmm4
+	movdqa	%xmm5,16(%esp)
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	pslld	$2,%xmm2
+	addl	40(%esp),%edx
+	xorl	%ecx,%esi
+	psrld	$30,%xmm4
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	por	%xmm4,%xmm2
+	addl	44(%esp),%ecx
+	xorl	%ebx,%ebp
+	movdqa	80(%esp),%xmm4
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	movdqa	%xmm2,%xmm5
+	addl	%ebp,%ecx
+	addl	48(%esp),%ebx
+	pxor	%xmm7,%xmm3
+.byte	102,15,58,15,233,8
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	pxor	%xmm4,%xmm3
+	movdqa	%xmm7,80(%esp)
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	movdqa	%xmm6,%xmm7
+	paddd	%xmm2,%xmm6
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	pxor	%xmm5,%xmm3
+	addl	52(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	movdqa	%xmm3,%xmm5
+	movdqa	%xmm6,32(%esp)
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	pslld	$2,%xmm3
+	addl	56(%esp),%edi
+	xorl	%edx,%esi
+	psrld	$30,%xmm5
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	por	%xmm5,%xmm3
+	addl	60(%esp),%edx
+	xorl	%ecx,%ebp
+	movdqa	96(%esp),%xmm5
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	movdqa	%xmm3,%xmm6
+	addl	%ebp,%edx
+	addl	(%esp),%ecx
+	pxor	%xmm0,%xmm4
+.byte	102,15,58,15,242,8
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	roll	$5,%edx
+	pxor	%xmm5,%xmm4
+	movdqa	%xmm0,96(%esp)
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	movdqa	%xmm7,%xmm0
+	paddd	%xmm3,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	pxor	%xmm6,%xmm4
+	addl	4(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	movdqa	%xmm4,%xmm6
+	movdqa	%xmm7,48(%esp)
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	pslld	$2,%xmm4
+	addl	8(%esp),%eax
+	xorl	%edi,%esi
+	psrld	$30,%xmm6
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	por	%xmm6,%xmm4
+	addl	12(%esp),%edi
+	xorl	%edx,%ebp
+	movdqa	64(%esp),%xmm6
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	movdqa	%xmm4,%xmm7
+	addl	%ebp,%edi
+	addl	16(%esp),%edx
+	pxor	%xmm1,%xmm5
+.byte	102,15,58,15,251,8
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	pxor	%xmm6,%xmm5
+	movdqa	%xmm1,64(%esp)
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	movdqa	%xmm0,%xmm1
+	paddd	%xmm4,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	pxor	%xmm7,%xmm5
+	addl	20(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	movdqa	%xmm5,%xmm7
+	movdqa	%xmm0,(%esp)
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	pslld	$2,%xmm5
+	addl	24(%esp),%ebx
+	xorl	%eax,%esi
+	psrld	$30,%xmm7
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	por	%xmm7,%xmm5
+	addl	28(%esp),%eax
+	xorl	%edi,%ebp
+	movdqa	80(%esp),%xmm7
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	movdqa	%xmm5,%xmm0
+	addl	%ebp,%eax
+	movl	%ecx,%ebp
+	pxor	%xmm2,%xmm6
+.byte	102,15,58,15,196,8
+	xorl	%edx,%ecx
+	addl	32(%esp),%edi
+	andl	%edx,%ebp
+	pxor	%xmm7,%xmm6
+	movdqa	%xmm2,80(%esp)
+	andl	%ecx,%esi
+	rorl	$7,%ebx
+	movdqa	%xmm1,%xmm2
+	paddd	%xmm5,%xmm1
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	pxor	%xmm0,%xmm6
+	roll	$5,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movdqa	%xmm6,%xmm0
+	movdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	36(%esp),%edx
+	andl	%ecx,%esi
+	pslld	$2,%xmm6
+	andl	%ebx,%ebp
+	rorl	$7,%eax
+	psrld	$30,%xmm0
+	addl	%esi,%edx
+	movl	%edi,%esi
+	roll	$5,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	por	%xmm0,%xmm6
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	movdqa	96(%esp),%xmm0
+	addl	40(%esp),%ecx
+	andl	%ebx,%ebp
+	andl	%eax,%esi
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	movdqa	%xmm6,%xmm1
+	movl	%edx,%ebp
+	roll	$5,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	44(%esp),%ebx
+	andl	%eax,%esi
+	andl	%edi,%ebp
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%ebp
+	pxor	%xmm3,%xmm7
+.byte	102,15,58,15,205,8
+	xorl	%edi,%edx
+	addl	48(%esp),%eax
+	andl	%edi,%ebp
+	pxor	%xmm0,%xmm7
+	movdqa	%xmm3,96(%esp)
+	andl	%edx,%esi
+	rorl	$7,%ecx
+	movdqa	144(%esp),%xmm3
+	paddd	%xmm6,%xmm2
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	pxor	%xmm1,%xmm7
+	roll	$5,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movdqa	%xmm7,%xmm1
+	movdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	52(%esp),%edi
+	andl	%edx,%esi
+	pslld	$2,%xmm7
+	andl	%ecx,%ebp
+	rorl	$7,%ebx
+	psrld	$30,%xmm1
+	addl	%esi,%edi
+	movl	%eax,%esi
+	roll	$5,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	por	%xmm1,%xmm7
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	64(%esp),%xmm1
+	addl	56(%esp),%edx
+	andl	%ecx,%ebp
+	andl	%ebx,%esi
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	movdqa	%xmm7,%xmm2
+	movl	%edi,%ebp
+	roll	$5,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	60(%esp),%ecx
+	andl	%ebx,%esi
+	andl	%eax,%ebp
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	roll	$5,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%ebp
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,214,8
+	xorl	%eax,%edi
+	addl	(%esp),%ebx
+	andl	%eax,%ebp
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm4,64(%esp)
+	andl	%edi,%esi
+	rorl	$7,%edx
+	movdqa	%xmm3,%xmm4
+	paddd	%xmm7,%xmm3
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	pxor	%xmm2,%xmm0
+	roll	$5,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movdqa	%xmm0,%xmm2
+	movdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	4(%esp),%eax
+	andl	%edi,%esi
+	pslld	$2,%xmm0
+	andl	%edx,%ebp
+	rorl	$7,%ecx
+	psrld	$30,%xmm2
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	por	%xmm2,%xmm0
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	80(%esp),%xmm2
+	addl	8(%esp),%edi
+	andl	%edx,%ebp
+	andl	%ecx,%esi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	movdqa	%xmm0,%xmm3
+	movl	%eax,%ebp
+	roll	$5,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	12(%esp),%edx
+	andl	%ecx,%esi
+	andl	%ebx,%ebp
+	rorl	$7,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	roll	$5,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%ebp
+	pxor	%xmm5,%xmm1
+.byte	102,15,58,15,223,8
+	xorl	%ebx,%eax
+	addl	16(%esp),%ecx
+	andl	%ebx,%ebp
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,80(%esp)
+	andl	%eax,%esi
+	rorl	$7,%edi
+	movdqa	%xmm4,%xmm5
+	paddd	%xmm0,%xmm4
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	pxor	%xmm3,%xmm1
+	roll	$5,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movdqa	%xmm1,%xmm3
+	movdqa	%xmm4,(%esp)
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	20(%esp),%ebx
+	andl	%eax,%esi
+	pslld	$2,%xmm1
+	andl	%edi,%ebp
+	rorl	$7,%edx
+	psrld	$30,%xmm3
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	por	%xmm3,%xmm1
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	movdqa	96(%esp),%xmm3
+	addl	24(%esp),%eax
+	andl	%edi,%ebp
+	andl	%edx,%esi
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movdqa	%xmm1,%xmm4
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	28(%esp),%edi
+	andl	%edx,%esi
+	andl	%ecx,%ebp
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	roll	$5,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%ebp
+	pxor	%xmm6,%xmm2
+.byte	102,15,58,15,224,8
+	xorl	%ecx,%ebx
+	addl	32(%esp),%edx
+	andl	%ecx,%ebp
+	pxor	%xmm3,%xmm2
+	movdqa	%xmm6,96(%esp)
+	andl	%ebx,%esi
+	rorl	$7,%eax
+	movdqa	%xmm5,%xmm6
+	paddd	%xmm1,%xmm5
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	pxor	%xmm4,%xmm2
+	roll	$5,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movdqa	%xmm2,%xmm4
+	movdqa	%xmm5,16(%esp)
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	36(%esp),%ecx
+	andl	%ebx,%esi
+	pslld	$2,%xmm2
+	andl	%eax,%ebp
+	rorl	$7,%edi
+	psrld	$30,%xmm4
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	roll	$5,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	por	%xmm4,%xmm2
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	64(%esp),%xmm4
+	addl	40(%esp),%ebx
+	andl	%eax,%ebp
+	andl	%edi,%esi
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	movdqa	%xmm2,%xmm5
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	44(%esp),%eax
+	andl	%edi,%esi
+	andl	%edx,%ebp
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	addl	48(%esp),%edi
+	pxor	%xmm7,%xmm3
+.byte	102,15,58,15,233,8
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	pxor	%xmm4,%xmm3
+	movdqa	%xmm7,64(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm6,%xmm7
+	paddd	%xmm2,%xmm6
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	pxor	%xmm5,%xmm3
+	addl	52(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	movdqa	%xmm3,%xmm5
+	movdqa	%xmm6,32(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	pslld	$2,%xmm3
+	addl	56(%esp),%ecx
+	xorl	%ebx,%esi
+	psrld	$30,%xmm5
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	por	%xmm5,%xmm3
+	addl	60(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	(%esp),%eax
+	paddd	%xmm3,%xmm7
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	movdqa	%xmm7,48(%esp)
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	4(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	8(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	12(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	movl	196(%esp),%ebp
+	cmpl	200(%esp),%ebp
+	je	.L005done
+	movdqa	160(%esp),%xmm7
+	movdqa	176(%esp),%xmm6
+	movdqu	(%ebp),%xmm0
+	movdqu	16(%ebp),%xmm1
+	movdqu	32(%ebp),%xmm2
+	movdqu	48(%ebp),%xmm3
+	addl	$64,%ebp
+.byte	102,15,56,0,198
+	movl	%ebp,196(%esp)
+	movdqa	%xmm7,96(%esp)
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+.byte	102,15,56,0,206
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	paddd	%xmm7,%xmm0
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	movdqa	%xmm0,(%esp)
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	psubd	%xmm7,%xmm0
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+.byte	102,15,56,0,214
+	movl	%edx,%ebp
+	roll	$5,%edx
+	paddd	%xmm7,%xmm1
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	movdqa	%xmm1,16(%esp)
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	psubd	%xmm7,%xmm1
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+.byte	102,15,56,0,222
+	movl	%edi,%ebp
+	roll	$5,%edi
+	paddd	%xmm7,%xmm2
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	movdqa	%xmm2,32(%esp)
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	psubd	%xmm7,%xmm2
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%esi,%ebx
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	movdqa	%xmm1,%xmm4
+	jmp	.L004loop
+.align	16
+.L005done:
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	movl	204(%esp),%esp
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3
+.type	_sha1_block_data_order_avx, at function
+.align	16
+_sha1_block_data_order_avx:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L006pic_point
+.L006pic_point:
+	popl	%ebp
+	leal	.LK_XX_XX-.L006pic_point(%ebp),%ebp
+.Lavx_shortcut:
+	vzeroall
+	vmovdqa	(%ebp),%xmm7
+	vmovdqa	16(%ebp),%xmm0
+	vmovdqa	32(%ebp),%xmm1
+	vmovdqa	48(%ebp),%xmm2
+	vmovdqa	64(%ebp),%xmm6
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%edx
+	movl	%esp,%esi
+	subl	$208,%esp
+	andl	$-64,%esp
+	vmovdqa	%xmm0,112(%esp)
+	vmovdqa	%xmm1,128(%esp)
+	vmovdqa	%xmm2,144(%esp)
+	shll	$6,%edx
+	vmovdqa	%xmm7,160(%esp)
+	addl	%ebp,%edx
+	vmovdqa	%xmm6,176(%esp)
+	addl	$64,%ebp
+	movl	%edi,192(%esp)
+	movl	%ebp,196(%esp)
+	movl	%edx,200(%esp)
+	movl	%esi,204(%esp)
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	16(%edi),%edi
+	movl	%ebx,%esi
+	vmovdqu	-64(%ebp),%xmm0
+	vmovdqu	-48(%ebp),%xmm1
+	vmovdqu	-32(%ebp),%xmm2
+	vmovdqu	-16(%ebp),%xmm3
+	vpshufb	%xmm6,%xmm0,%xmm0
+	vpshufb	%xmm6,%xmm1,%xmm1
+	vpshufb	%xmm6,%xmm2,%xmm2
+	vmovdqa	%xmm7,96(%esp)
+	vpshufb	%xmm6,%xmm3,%xmm3
+	vpaddd	%xmm7,%xmm0,%xmm4
+	vpaddd	%xmm7,%xmm1,%xmm5
+	vpaddd	%xmm7,%xmm2,%xmm6
+	vmovdqa	%xmm4,(%esp)
+	vmovdqa	%xmm5,16(%esp)
+	vmovdqa	%xmm6,32(%esp)
+	jmp	.L007loop
+.align	16
+.L007loop:
+	addl	(%esp),%edi
+	xorl	%edx,%ecx
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpaddd	%xmm3,%xmm7,%xmm7
+	vmovdqa	%xmm0,64(%esp)
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	vpsrldq	$4,%xmm3,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	vpxor	%xmm0,%xmm4,%xmm4
+	shrdl	$2,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm2,%xmm6,%xmm6
+	addl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	vmovdqa	%xmm7,48(%esp)
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpxor	%xmm6,%xmm4,%xmm4
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	vpsrld	$31,%xmm4,%xmm6
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	vpslldq	$12,%xmm4,%xmm0
+	vpaddd	%xmm4,%xmm4,%xmm4
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	vpsrld	$30,%xmm0,%xmm7
+	vpor	%xmm6,%xmm4,%xmm4
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpslld	$2,%xmm0,%xmm0
+	addl	12(%esp),%ebx
+	xorl	%eax,%edi
+	vpxor	%xmm7,%xmm4,%xmm4
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	vpxor	%xmm0,%xmm4,%xmm4
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	vmovdqa	96(%esp),%xmm0
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	16(%esp),%eax
+	xorl	%edi,%edx
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	vpaddd	%xmm4,%xmm0,%xmm0
+	vmovdqa	%xmm1,80(%esp)
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	vpsrldq	$4,%xmm4,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	vpxor	%xmm1,%xmm5,%xmm5
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpxor	%xmm3,%xmm7,%xmm7
+	addl	20(%esp),%edi
+	xorl	%edx,%ecx
+	vmovdqa	%xmm0,(%esp)
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	vpxor	%xmm7,%xmm5,%xmm5
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	vpsrld	$31,%xmm5,%xmm7
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	24(%esp),%edx
+	xorl	%ecx,%ebx
+	vpslldq	$12,%xmm5,%xmm1
+	vpaddd	%xmm5,%xmm5,%xmm5
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	vpsrld	$30,%xmm1,%xmm0
+	vpor	%xmm7,%xmm5,%xmm5
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpslld	$2,%xmm1,%xmm1
+	addl	28(%esp),%ecx
+	xorl	%ebx,%eax
+	vpxor	%xmm0,%xmm5,%xmm5
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	vpxor	%xmm1,%xmm5,%xmm5
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	vmovdqa	112(%esp),%xmm1
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	32(%esp),%ebx
+	xorl	%eax,%edi
+	vpalignr	$8,%xmm2,%xmm3,%xmm6
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpaddd	%xmm5,%xmm1,%xmm1
+	vmovdqa	%xmm2,96(%esp)
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	vpsrldq	$4,%xmm5,%xmm0
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	vpxor	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpxor	%xmm4,%xmm0,%xmm0
+	addl	36(%esp),%eax
+	xorl	%edi,%edx
+	vmovdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	vpxor	%xmm0,%xmm6,%xmm6
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	vpsrld	$31,%xmm6,%xmm0
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	40(%esp),%edi
+	xorl	%edx,%ecx
+	vpslldq	$12,%xmm6,%xmm2
+	vpaddd	%xmm6,%xmm6,%xmm6
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	vpsrld	$30,%xmm2,%xmm1
+	vpor	%xmm0,%xmm6,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpslld	$2,%xmm2,%xmm2
+	vmovdqa	64(%esp),%xmm0
+	addl	44(%esp),%edx
+	xorl	%ecx,%ebx
+	vpxor	%xmm1,%xmm6,%xmm6
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	vpxor	%xmm2,%xmm6,%xmm6
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	vmovdqa	112(%esp),%xmm2
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	48(%esp),%ecx
+	xorl	%ebx,%eax
+	vpalignr	$8,%xmm3,%xmm4,%xmm7
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpaddd	%xmm6,%xmm2,%xmm2
+	vmovdqa	%xmm3,64(%esp)
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	vpsrldq	$4,%xmm6,%xmm1
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	vpxor	%xmm3,%xmm7,%xmm7
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpxor	%xmm5,%xmm1,%xmm1
+	addl	52(%esp),%ebx
+	xorl	%eax,%edi
+	vmovdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	vpxor	%xmm1,%xmm7,%xmm7
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	vpsrld	$31,%xmm7,%xmm1
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	56(%esp),%eax
+	xorl	%edi,%edx
+	vpslldq	$12,%xmm7,%xmm3
+	vpaddd	%xmm7,%xmm7,%xmm7
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	vpsrld	$30,%xmm3,%xmm2
+	vpor	%xmm1,%xmm7,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpslld	$2,%xmm3,%xmm3
+	vmovdqa	80(%esp),%xmm1
+	addl	60(%esp),%edi
+	xorl	%edx,%ecx
+	vpxor	%xmm2,%xmm7,%xmm7
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	vpxor	%xmm3,%xmm7,%xmm7
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	vmovdqa	112(%esp),%xmm3
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpalignr	$8,%xmm6,%xmm7,%xmm2
+	vpxor	%xmm4,%xmm0,%xmm0
+	addl	(%esp),%edx
+	xorl	%ecx,%ebx
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpxor	%xmm1,%xmm0,%xmm0
+	vmovdqa	%xmm4,80(%esp)
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	vmovdqa	%xmm3,%xmm4
+	vpaddd	%xmm7,%xmm3,%xmm3
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	vpxor	%xmm2,%xmm0,%xmm0
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	vpsrld	$30,%xmm0,%xmm2
+	vmovdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	vpslld	$2,%xmm0,%xmm0
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	8(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpor	%xmm2,%xmm0,%xmm0
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	vmovdqa	96(%esp),%xmm2
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	12(%esp),%eax
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpalignr	$8,%xmm7,%xmm0,%xmm3
+	vpxor	%xmm5,%xmm1,%xmm1
+	addl	16(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpxor	%xmm2,%xmm1,%xmm1
+	vmovdqa	%xmm5,96(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	vmovdqa	%xmm4,%xmm5
+	vpaddd	%xmm0,%xmm4,%xmm4
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm3,%xmm1,%xmm1
+	addl	20(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpsrld	$30,%xmm1,%xmm3
+	vmovdqa	%xmm4,(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpslld	$2,%xmm1,%xmm1
+	addl	24(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpor	%xmm3,%xmm1,%xmm1
+	addl	28(%esp),%ebx
+	xorl	%eax,%ebp
+	vmovdqa	64(%esp),%xmm3
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	vpxor	%xmm6,%xmm2,%xmm2
+	addl	32(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	vpxor	%xmm3,%xmm2,%xmm2
+	vmovdqa	%xmm6,64(%esp)
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	vmovdqa	128(%esp),%xmm6
+	vpaddd	%xmm1,%xmm5,%xmm5
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpxor	%xmm4,%xmm2,%xmm2
+	addl	36(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	vpsrld	$30,%xmm2,%xmm4
+	vmovdqa	%xmm5,16(%esp)
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpslld	$2,%xmm2,%xmm2
+	addl	40(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpor	%xmm4,%xmm2,%xmm2
+	addl	44(%esp),%ecx
+	xorl	%ebx,%ebp
+	vmovdqa	80(%esp),%xmm4
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	vpxor	%xmm7,%xmm3,%xmm3
+	addl	48(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpxor	%xmm4,%xmm3,%xmm3
+	vmovdqa	%xmm7,80(%esp)
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	vmovdqa	%xmm6,%xmm7
+	vpaddd	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpxor	%xmm5,%xmm3,%xmm3
+	addl	52(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	vpsrld	$30,%xmm3,%xmm5
+	vmovdqa	%xmm6,32(%esp)
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpslld	$2,%xmm3,%xmm3
+	addl	56(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpor	%xmm5,%xmm3,%xmm3
+	addl	60(%esp),%edx
+	xorl	%ecx,%ebp
+	vmovdqa	96(%esp),%xmm5
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpalignr	$8,%xmm2,%xmm3,%xmm6
+	vpxor	%xmm0,%xmm4,%xmm4
+	addl	(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpxor	%xmm5,%xmm4,%xmm4
+	vmovdqa	%xmm0,96(%esp)
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	vmovdqa	%xmm7,%xmm0
+	vpaddd	%xmm3,%xmm7,%xmm7
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpxor	%xmm6,%xmm4,%xmm4
+	addl	4(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	vpsrld	$30,%xmm4,%xmm6
+	vmovdqa	%xmm7,48(%esp)
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	vpslld	$2,%xmm4,%xmm4
+	addl	8(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpor	%xmm6,%xmm4,%xmm4
+	addl	12(%esp),%edi
+	xorl	%edx,%ebp
+	vmovdqa	64(%esp),%xmm6
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpalignr	$8,%xmm3,%xmm4,%xmm7
+	vpxor	%xmm1,%xmm5,%xmm5
+	addl	16(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpxor	%xmm6,%xmm5,%xmm5
+	vmovdqa	%xmm1,64(%esp)
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	vmovdqa	%xmm0,%xmm1
+	vpaddd	%xmm4,%xmm0,%xmm0
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpxor	%xmm7,%xmm5,%xmm5
+	addl	20(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	vpsrld	$30,%xmm5,%xmm7
+	vmovdqa	%xmm0,(%esp)
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	vpslld	$2,%xmm5,%xmm5
+	addl	24(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpor	%xmm7,%xmm5,%xmm5
+	addl	28(%esp),%eax
+	xorl	%edi,%ebp
+	vmovdqa	80(%esp),%xmm7
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpalignr	$8,%xmm4,%xmm5,%xmm0
+	vpxor	%xmm2,%xmm6,%xmm6
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	addl	32(%esp),%edi
+	andl	%edx,%ebp
+	vpxor	%xmm7,%xmm6,%xmm6
+	vmovdqa	%xmm2,80(%esp)
+	andl	%ecx,%esi
+	shrdl	$7,%ebx,%ebx
+	vmovdqa	%xmm1,%xmm2
+	vpaddd	%xmm5,%xmm1,%xmm1
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	vpxor	%xmm0,%xmm6,%xmm6
+	shldl	$5,%eax,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpsrld	$30,%xmm6,%xmm0
+	vmovdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	36(%esp),%edx
+	andl	%ecx,%esi
+	vpslld	$2,%xmm6,%xmm6
+	andl	%ebx,%ebp
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpor	%xmm0,%xmm6,%xmm6
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	vmovdqa	96(%esp),%xmm0
+	addl	40(%esp),%ecx
+	andl	%ebx,%ebp
+	andl	%eax,%esi
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	44(%esp),%ebx
+	andl	%eax,%esi
+	andl	%edi,%ebp
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpalignr	$8,%xmm5,%xmm6,%xmm1
+	vpxor	%xmm3,%xmm7,%xmm7
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	addl	48(%esp),%eax
+	andl	%edi,%ebp
+	vpxor	%xmm0,%xmm7,%xmm7
+	vmovdqa	%xmm3,96(%esp)
+	andl	%edx,%esi
+	shrdl	$7,%ecx,%ecx
+	vmovdqa	144(%esp),%xmm3
+	vpaddd	%xmm6,%xmm2,%xmm2
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	vpxor	%xmm1,%xmm7,%xmm7
+	shldl	$5,%ebx,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpsrld	$30,%xmm7,%xmm1
+	vmovdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	52(%esp),%edi
+	andl	%edx,%esi
+	vpslld	$2,%xmm7,%xmm7
+	andl	%ecx,%ebp
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpor	%xmm1,%xmm7,%xmm7
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	vmovdqa	64(%esp),%xmm1
+	addl	56(%esp),%edx
+	andl	%ecx,%ebp
+	andl	%ebx,%esi
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	60(%esp),%ecx
+	andl	%ebx,%esi
+	andl	%eax,%ebp
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpalignr	$8,%xmm6,%xmm7,%xmm2
+	vpxor	%xmm4,%xmm0,%xmm0
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	addl	(%esp),%ebx
+	andl	%eax,%ebp
+	vpxor	%xmm1,%xmm0,%xmm0
+	vmovdqa	%xmm4,64(%esp)
+	andl	%edi,%esi
+	shrdl	$7,%edx,%edx
+	vmovdqa	%xmm3,%xmm4
+	vpaddd	%xmm7,%xmm3,%xmm3
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	vpxor	%xmm2,%xmm0,%xmm0
+	shldl	$5,%ecx,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpsrld	$30,%xmm0,%xmm2
+	vmovdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	4(%esp),%eax
+	andl	%edi,%esi
+	vpslld	$2,%xmm0,%xmm0
+	andl	%edx,%ebp
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpor	%xmm2,%xmm0,%xmm0
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	vmovdqa	80(%esp),%xmm2
+	addl	8(%esp),%edi
+	andl	%edx,%ebp
+	andl	%ecx,%esi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	12(%esp),%edx
+	andl	%ecx,%esi
+	andl	%ebx,%ebp
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpalignr	$8,%xmm7,%xmm0,%xmm3
+	vpxor	%xmm5,%xmm1,%xmm1
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	addl	16(%esp),%ecx
+	andl	%ebx,%ebp
+	vpxor	%xmm2,%xmm1,%xmm1
+	vmovdqa	%xmm5,80(%esp)
+	andl	%eax,%esi
+	shrdl	$7,%edi,%edi
+	vmovdqa	%xmm4,%xmm5
+	vpaddd	%xmm0,%xmm4,%xmm4
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	vpxor	%xmm3,%xmm1,%xmm1
+	shldl	$5,%edx,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpsrld	$30,%xmm1,%xmm3
+	vmovdqa	%xmm4,(%esp)
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	20(%esp),%ebx
+	andl	%eax,%esi
+	vpslld	$2,%xmm1,%xmm1
+	andl	%edi,%ebp
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpor	%xmm3,%xmm1,%xmm1
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	vmovdqa	96(%esp),%xmm3
+	addl	24(%esp),%eax
+	andl	%edi,%ebp
+	andl	%edx,%esi
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	28(%esp),%edi
+	andl	%edx,%esi
+	andl	%ecx,%ebp
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	vpxor	%xmm6,%xmm2,%xmm2
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	addl	32(%esp),%edx
+	andl	%ecx,%ebp
+	vpxor	%xmm3,%xmm2,%xmm2
+	vmovdqa	%xmm6,96(%esp)
+	andl	%ebx,%esi
+	shrdl	$7,%eax,%eax
+	vmovdqa	%xmm5,%xmm6
+	vpaddd	%xmm1,%xmm5,%xmm5
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	vpxor	%xmm4,%xmm2,%xmm2
+	shldl	$5,%edi,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpsrld	$30,%xmm2,%xmm4
+	vmovdqa	%xmm5,16(%esp)
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	36(%esp),%ecx
+	andl	%ebx,%esi
+	vpslld	$2,%xmm2,%xmm2
+	andl	%eax,%ebp
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpor	%xmm4,%xmm2,%xmm2
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	vmovdqa	64(%esp),%xmm4
+	addl	40(%esp),%ebx
+	andl	%eax,%ebp
+	andl	%edi,%esi
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	44(%esp),%eax
+	andl	%edi,%esi
+	andl	%edx,%ebp
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	vpxor	%xmm7,%xmm3,%xmm3
+	addl	48(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpxor	%xmm4,%xmm3,%xmm3
+	vmovdqa	%xmm7,64(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	vmovdqa	%xmm6,%xmm7
+	vpaddd	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm5,%xmm3,%xmm3
+	addl	52(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpsrld	$30,%xmm3,%xmm5
+	vmovdqa	%xmm6,32(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpslld	$2,%xmm3,%xmm3
+	addl	56(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpor	%xmm5,%xmm3,%xmm3
+	addl	60(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	(%esp),%eax
+	vpaddd	%xmm3,%xmm7,%xmm7
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	vmovdqa	%xmm7,48(%esp)
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	4(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	8(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	12(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	movl	196(%esp),%ebp
+	cmpl	200(%esp),%ebp
+	je	.L008done
+	vmovdqa	160(%esp),%xmm7
+	vmovdqa	176(%esp),%xmm6
+	vmovdqu	(%ebp),%xmm0
+	vmovdqu	16(%ebp),%xmm1
+	vmovdqu	32(%ebp),%xmm2
+	vmovdqu	48(%ebp),%xmm3
+	addl	$64,%ebp
+	vpshufb	%xmm6,%xmm0,%xmm0
+	movl	%ebp,196(%esp)
+	vmovdqa	%xmm7,96(%esp)
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	vpshufb	%xmm6,%xmm1,%xmm1
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpaddd	%xmm7,%xmm0,%xmm4
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vmovdqa	%xmm4,(%esp)
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	vpshufb	%xmm6,%xmm2,%xmm2
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpaddd	%xmm7,%xmm1,%xmm5
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vmovdqa	%xmm5,16(%esp)
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	vpshufb	%xmm6,%xmm3,%xmm3
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpaddd	%xmm7,%xmm2,%xmm6
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vmovdqa	%xmm6,32(%esp)
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%esi,%ebx
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	jmp	.L007loop
+.align	16
+.L008done:
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vzeroall
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	movl	204(%esp),%esp
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_sha1_block_data_order_avx,.-_sha1_block_data_order_avx
+.align	64
+.LK_XX_XX:
+.long	1518500249,1518500249,1518500249,1518500249
+.long	1859775393,1859775393,1859775393,1859775393
+.long	2400959708,2400959708,2400959708,2400959708
+.long	3395469782,3395469782,3395469782,3395469782
+.long	66051,67438087,134810123,202182159
+.byte	83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte	102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte	89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte	114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"sha1-586.S"
+.text
+.globl	sha1_block_data_order
+.type	sha1_block_data_order, at function
+.align	16
+sha1_block_data_order:
+.L_sha1_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	OPENSSL_ia32cap_P,%esi
+	leal	.LK_XX_XX-.L000pic_point(%ebp),%ebp
+	movl	(%esi),%eax
+	movl	4(%esi),%edx
+	testl	$512,%edx
+	jz	.L001x86
+	testl	$16777216,%eax
+	jz	.L001x86
+	andl	$268435456,%edx
+	andl	$1073741824,%eax
+	orl	%edx,%eax
+	cmpl	$1342177280,%eax
+	je	.Lavx_shortcut
+	jmp	.Lssse3_shortcut
+.align	16
+.L001x86:
+	movl	20(%esp),%ebp
+	movl	24(%esp),%esi
+	movl	28(%esp),%eax
+	subl	$76,%esp
+	shll	$6,%eax
+	addl	%esi,%eax
+	movl	%eax,104(%esp)
+	movl	16(%ebp),%edi
+	jmp	.L002loop
+.align	16
+.L002loop:
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edx,12(%esp)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,16(%esp)
+	movl	%ebx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%edx,28(%esp)
+	movl	32(%esi),%eax
+	movl	36(%esi),%ebx
+	movl	40(%esi),%ecx
+	movl	44(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,40(%esp)
+	movl	%edx,44(%esp)
+	movl	48(%esi),%eax
+	movl	52(%esi),%ebx
+	movl	56(%esi),%ecx
+	movl	60(%esi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	movl	%eax,48(%esp)
+	movl	%ebx,52(%esp)
+	movl	%ecx,56(%esp)
+	movl	%edx,60(%esp)
+	movl	%esi,100(%esp)
+	movl	(%ebp),%eax
+	movl	4(%ebp),%ebx
+	movl	8(%ebp),%ecx
+	movl	12(%ebp),%edx
+
+	movl	%ecx,%esi
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	4(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	8(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	12(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	addl	%ecx,%ebp
+
+	movl	%edi,%ebx
+	movl	%ebp,%ecx
+	roll	$5,%ebp
+	xorl	%esi,%ebx
+	addl	%eax,%ebp
+	movl	16(%esp),%eax
+	andl	%edx,%ebx
+	rorl	$2,%edx
+	xorl	%esi,%ebx
+	leal	1518500249(%ebp,%eax,1),%ebp
+	addl	%ebx,%ebp
+
+	movl	%edx,%eax
+	movl	%ebp,%ebx
+	roll	$5,%ebp
+	xorl	%edi,%eax
+	addl	%esi,%ebp
+	movl	20(%esp),%esi
+	andl	%ecx,%eax
+	rorl	$2,%ecx
+	xorl	%edi,%eax
+	leal	1518500249(%ebp,%esi,1),%ebp
+	addl	%eax,%ebp
+
+	movl	%ecx,%esi
+	movl	%ebp,%eax
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	24(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	28(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	32(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	36(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	addl	%ecx,%ebp
+
+	movl	%edi,%ebx
+	movl	%ebp,%ecx
+	roll	$5,%ebp
+	xorl	%esi,%ebx
+	addl	%eax,%ebp
+	movl	40(%esp),%eax
+	andl	%edx,%ebx
+	rorl	$2,%edx
+	xorl	%esi,%ebx
+	leal	1518500249(%ebp,%eax,1),%ebp
+	addl	%ebx,%ebp
+
+	movl	%edx,%eax
+	movl	%ebp,%ebx
+	roll	$5,%ebp
+	xorl	%edi,%eax
+	addl	%esi,%ebp
+	movl	44(%esp),%esi
+	andl	%ecx,%eax
+	rorl	$2,%ecx
+	xorl	%edi,%eax
+	leal	1518500249(%ebp,%esi,1),%ebp
+	addl	%eax,%ebp
+
+	movl	%ecx,%esi
+	movl	%ebp,%eax
+	roll	$5,%ebp
+	xorl	%edx,%esi
+	addl	%edi,%ebp
+	movl	48(%esp),%edi
+	andl	%ebx,%esi
+	rorl	$2,%ebx
+	xorl	%edx,%esi
+	leal	1518500249(%ebp,%edi,1),%ebp
+	addl	%esi,%ebp
+
+	movl	%ebx,%edi
+	movl	%ebp,%esi
+	roll	$5,%ebp
+	xorl	%ecx,%edi
+	addl	%edx,%ebp
+	movl	52(%esp),%edx
+	andl	%eax,%edi
+	rorl	$2,%eax
+	xorl	%ecx,%edi
+	leal	1518500249(%ebp,%edx,1),%ebp
+	addl	%edi,%ebp
+
+	movl	%eax,%edx
+	movl	%ebp,%edi
+	roll	$5,%ebp
+	xorl	%ebx,%edx
+	addl	%ecx,%ebp
+	movl	56(%esp),%ecx
+	andl	%esi,%edx
+	rorl	$2,%esi
+	xorl	%ebx,%edx
+	leal	1518500249(%ebp,%ecx,1),%ebp
+	addl	%edx,%ebp
+
+	movl	%esi,%ecx
+	movl	%ebp,%edx
+	roll	$5,%ebp
+	xorl	%eax,%ecx
+	addl	%ebx,%ebp
+	movl	60(%esp),%ebx
+	andl	%edi,%ecx
+	rorl	$2,%edi
+	xorl	%eax,%ecx
+	leal	1518500249(%ebp,%ebx,1),%ebp
+	movl	(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	8(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	32(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	52(%esp),%ebx
+	roll	$1,%ebx
+	xorl	%esi,%ebp
+	addl	%ebp,%eax
+	movl	%ecx,%ebp
+	rorl	$2,%edx
+	movl	%ebx,(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%ebx,%eax,1),%ebx
+	movl	4(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	12(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	36(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	56(%esp),%eax
+	roll	$1,%eax
+	xorl	%edi,%ebp
+	addl	%ebp,%esi
+	movl	%ebx,%ebp
+	rorl	$2,%ecx
+	movl	%eax,4(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%eax,%esi,1),%eax
+	movl	8(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	40(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	60(%esp),%esi
+	roll	$1,%esi
+	xorl	%edx,%ebp
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	rorl	$2,%ebx
+	movl	%esi,8(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%esi,%edi,1),%esi
+	movl	12(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	44(%esp),%edi
+	andl	%eax,%ebp
+	xorl	(%esp),%edi
+	roll	$1,%edi
+	xorl	%ecx,%ebp
+	addl	%ebp,%edx
+	movl	%esi,%ebp
+	rorl	$2,%eax
+	movl	%edi,12(%esp)
+	roll	$5,%ebp
+	leal	1518500249(%edi,%edx,1),%edi
+	movl	16(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	24(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,16(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	20(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,20(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	24(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,24(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	28(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	16(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,28(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	32(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	20(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,32(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	36(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,36(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	40(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	48(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,40(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	44(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,44(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	48(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,48(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	52(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	40(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,52(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	56(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	44(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,56(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	60(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,60(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	8(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	52(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	4(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	56(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,4(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	8(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	40(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	60(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,8(%esp)
+	leal	1859775393(%ebx,%eax,1),%ebx
+	movl	12(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	44(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,12(%esp)
+	leal	1859775393(%eax,%esi,1),%eax
+	movl	16(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	4(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,16(%esp)
+	leal	1859775393(%esi,%edi,1),%esi
+	movl	20(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	52(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	8(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,20(%esp)
+	leal	1859775393(%edi,%edx,1),%edi
+	movl	24(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	32(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	56(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	12(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,24(%esp)
+	leal	1859775393(%edx,%ecx,1),%edx
+	movl	28(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	36(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	60(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	16(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,28(%esp)
+	leal	1859775393(%ecx,%ebx,1),%ecx
+	movl	32(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	40(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	20(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,32(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	36(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	44(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	4(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	24(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,36(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	40(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	48(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	8(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	28(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,40(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	44(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	52(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	12(%esp),%edi
+	andl	%eax,%ebp
+	xorl	32(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,44(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	48(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	56(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	16(%esp),%edx
+	andl	%esi,%ebp
+	xorl	36(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,48(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	52(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	60(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	20(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	40(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,52(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	56(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	24(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	44(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,56(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	60(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	4(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	28(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	48(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,60(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	8(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	32(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	52(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	4(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	12(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	36(%esp),%edi
+	andl	%eax,%ebp
+	xorl	56(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,4(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	8(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	16(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	40(%esp),%edx
+	andl	%esi,%ebp
+	xorl	60(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,8(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	12(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	20(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	44(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,12(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	16(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	24(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	48(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	4(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,16(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	20(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	28(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	52(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	8(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,20(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	24(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ecx,%ebp
+	xorl	32(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	56(%esp),%esi
+	andl	%ebx,%ebp
+	xorl	12(%esp),%esi
+	roll	$1,%esi
+	addl	%edi,%ebp
+	rorl	$2,%ebx
+	movl	%eax,%edi
+	roll	$5,%edi
+	movl	%esi,24(%esp)
+	leal	2400959708(%esi,%ebp,1),%esi
+	movl	%ecx,%ebp
+	addl	%edi,%esi
+	andl	%edx,%ebp
+	movl	28(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%ebx,%ebp
+	xorl	36(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	60(%esp),%edi
+	andl	%eax,%ebp
+	xorl	16(%esp),%edi
+	roll	$1,%edi
+	addl	%edx,%ebp
+	rorl	$2,%eax
+	movl	%esi,%edx
+	roll	$5,%edx
+	movl	%edi,28(%esp)
+	leal	2400959708(%edi,%ebp,1),%edi
+	movl	%ebx,%ebp
+	addl	%edx,%edi
+	andl	%ecx,%ebp
+	movl	32(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%eax,%ebp
+	xorl	40(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	(%esp),%edx
+	andl	%esi,%ebp
+	xorl	20(%esp),%edx
+	roll	$1,%edx
+	addl	%ecx,%ebp
+	rorl	$2,%esi
+	movl	%edi,%ecx
+	roll	$5,%ecx
+	movl	%edx,32(%esp)
+	leal	2400959708(%edx,%ebp,1),%edx
+	movl	%eax,%ebp
+	addl	%ecx,%edx
+	andl	%ebx,%ebp
+	movl	36(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%esi,%ebp
+	xorl	44(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	4(%esp),%ecx
+	andl	%edi,%ebp
+	xorl	24(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebx,%ebp
+	rorl	$2,%edi
+	movl	%edx,%ebx
+	roll	$5,%ebx
+	movl	%ecx,36(%esp)
+	leal	2400959708(%ecx,%ebp,1),%ecx
+	movl	%esi,%ebp
+	addl	%ebx,%ecx
+	andl	%eax,%ebp
+	movl	40(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edi,%ebp
+	xorl	48(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	8(%esp),%ebx
+	andl	%edx,%ebp
+	xorl	28(%esp),%ebx
+	roll	$1,%ebx
+	addl	%eax,%ebp
+	rorl	$2,%edx
+	movl	%ecx,%eax
+	roll	$5,%eax
+	movl	%ebx,40(%esp)
+	leal	2400959708(%ebx,%ebp,1),%ebx
+	movl	%edi,%ebp
+	addl	%eax,%ebx
+	andl	%esi,%ebp
+	movl	44(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%edx,%ebp
+	xorl	52(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	12(%esp),%eax
+	andl	%ecx,%ebp
+	xorl	32(%esp),%eax
+	roll	$1,%eax
+	addl	%esi,%ebp
+	rorl	$2,%ecx
+	movl	%ebx,%esi
+	roll	$5,%esi
+	movl	%eax,44(%esp)
+	leal	2400959708(%eax,%ebp,1),%eax
+	movl	%edx,%ebp
+	addl	%esi,%eax
+	andl	%edi,%ebp
+	movl	48(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	56(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	36(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,48(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	52(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	60(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	40(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,52(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	56(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	24(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	44(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,56(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	60(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	4(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,60(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	8(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	4(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	12(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	56(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,4(%esp)
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	8(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	16(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	60(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,8(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	12(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	20(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,12(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	16(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	24(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	48(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,16(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	20(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	28(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,20(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	24(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	32(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,24(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	28(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	36(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	16(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	movl	%eax,28(%esp)
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	32(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	40(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	20(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	movl	%esi,32(%esp)
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	36(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	44(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	movl	%edi,36(%esp)
+	leal	3395469782(%edi,%edx,1),%edi
+	movl	40(%esp),%edx
+	addl	%ebp,%edi
+
+	movl	%esi,%ebp
+	xorl	48(%esp),%edx
+	xorl	%eax,%ebp
+	xorl	8(%esp),%edx
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edx
+	roll	$1,%edx
+	addl	%ebp,%ecx
+	rorl	$2,%esi
+	movl	%edi,%ebp
+	roll	$5,%ebp
+	movl	%edx,40(%esp)
+	leal	3395469782(%edx,%ecx,1),%edx
+	movl	44(%esp),%ecx
+	addl	%ebp,%edx
+
+	movl	%edi,%ebp
+	xorl	52(%esp),%ecx
+	xorl	%esi,%ebp
+	xorl	12(%esp),%ecx
+	xorl	%eax,%ebp
+	xorl	32(%esp),%ecx
+	roll	$1,%ecx
+	addl	%ebp,%ebx
+	rorl	$2,%edi
+	movl	%edx,%ebp
+	roll	$5,%ebp
+	movl	%ecx,44(%esp)
+	leal	3395469782(%ecx,%ebx,1),%ecx
+	movl	48(%esp),%ebx
+	addl	%ebp,%ecx
+
+	movl	%edx,%ebp
+	xorl	56(%esp),%ebx
+	xorl	%edi,%ebp
+	xorl	16(%esp),%ebx
+	xorl	%esi,%ebp
+	xorl	36(%esp),%ebx
+	roll	$1,%ebx
+	addl	%ebp,%eax
+	rorl	$2,%edx
+	movl	%ecx,%ebp
+	roll	$5,%ebp
+	movl	%ebx,48(%esp)
+	leal	3395469782(%ebx,%eax,1),%ebx
+	movl	52(%esp),%eax
+	addl	%ebp,%ebx
+
+	movl	%ecx,%ebp
+	xorl	60(%esp),%eax
+	xorl	%edx,%ebp
+	xorl	20(%esp),%eax
+	xorl	%edi,%ebp
+	xorl	40(%esp),%eax
+	roll	$1,%eax
+	addl	%ebp,%esi
+	rorl	$2,%ecx
+	movl	%ebx,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%eax,%esi,1),%eax
+	movl	56(%esp),%esi
+	addl	%ebp,%eax
+
+	movl	%ebx,%ebp
+	xorl	(%esp),%esi
+	xorl	%ecx,%ebp
+	xorl	24(%esp),%esi
+	xorl	%edx,%ebp
+	xorl	44(%esp),%esi
+	roll	$1,%esi
+	addl	%ebp,%edi
+	rorl	$2,%ebx
+	movl	%eax,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%esi,%edi,1),%esi
+	movl	60(%esp),%edi
+	addl	%ebp,%esi
+
+	movl	%eax,%ebp
+	xorl	4(%esp),%edi
+	xorl	%ebx,%ebp
+	xorl	28(%esp),%edi
+	xorl	%ecx,%ebp
+	xorl	48(%esp),%edi
+	roll	$1,%edi
+	addl	%ebp,%edx
+	rorl	$2,%eax
+	movl	%esi,%ebp
+	roll	$5,%ebp
+	leal	3395469782(%edi,%edx,1),%edi
+	addl	%ebp,%edi
+	movl	96(%esp),%ebp
+	movl	100(%esp),%edx
+	addl	(%ebp),%edi
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%eax
+	addl	12(%ebp),%ebx
+	addl	16(%ebp),%ecx
+	movl	%edi,(%ebp)
+	addl	$64,%edx
+	movl	%esi,4(%ebp)
+	cmpl	104(%esp),%edx
+	movl	%eax,8(%ebp)
+	movl	%ecx,%edi
+	movl	%ebx,12(%ebp)
+	movl	%edx,%esi
+	movl	%ecx,16(%ebp)
+	jb	.L002loop
+	addl	$76,%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	sha1_block_data_order,.-.L_sha1_block_data_order_begin
+.type	_sha1_block_data_order_ssse3, at function
+.align	16
+_sha1_block_data_order_ssse3:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L003pic_point
+.L003pic_point:
+	popl	%ebp
+	leal	.LK_XX_XX-.L003pic_point(%ebp),%ebp
+.Lssse3_shortcut:
+	movdqa	(%ebp),%xmm7
+	movdqa	16(%ebp),%xmm0
+	movdqa	32(%ebp),%xmm1
+	movdqa	48(%ebp),%xmm2
+	movdqa	64(%ebp),%xmm6
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%edx
+	movl	%esp,%esi
+	subl	$208,%esp
+	andl	$-64,%esp
+	movdqa	%xmm0,112(%esp)
+	movdqa	%xmm1,128(%esp)
+	movdqa	%xmm2,144(%esp)
+	shll	$6,%edx
+	movdqa	%xmm7,160(%esp)
+	addl	%ebp,%edx
+	movdqa	%xmm6,176(%esp)
+	addl	$64,%ebp
+	movl	%edi,192(%esp)
+	movl	%ebp,196(%esp)
+	movl	%edx,200(%esp)
+	movl	%esi,204(%esp)
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	16(%edi),%edi
+	movl	%ebx,%esi
+	movdqu	-64(%ebp),%xmm0
+	movdqu	-48(%ebp),%xmm1
+	movdqu	-32(%ebp),%xmm2
+	movdqu	-16(%ebp),%xmm3
+.byte	102,15,56,0,198
+.byte	102,15,56,0,206
+.byte	102,15,56,0,214
+	movdqa	%xmm7,96(%esp)
+.byte	102,15,56,0,222
+	paddd	%xmm7,%xmm0
+	paddd	%xmm7,%xmm1
+	paddd	%xmm7,%xmm2
+	movdqa	%xmm0,(%esp)
+	psubd	%xmm7,%xmm0
+	movdqa	%xmm1,16(%esp)
+	psubd	%xmm7,%xmm1
+	movdqa	%xmm2,32(%esp)
+	psubd	%xmm7,%xmm2
+	movdqa	%xmm1,%xmm4
+	jmp	.L004loop
+.align	16
+.L004loop:
+	addl	(%esp),%edi
+	xorl	%edx,%ecx
+.byte	102,15,58,15,224,8
+	movdqa	%xmm3,%xmm6
+	movl	%eax,%ebp
+	roll	$5,%eax
+	paddd	%xmm3,%xmm7
+	movdqa	%xmm0,64(%esp)
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	psrldq	$4,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	pxor	%xmm0,%xmm4
+	rorl	$2,%ebx
+	addl	%esi,%edi
+	pxor	%xmm2,%xmm6
+	addl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	movl	%edi,%esi
+	roll	$5,%edi
+	pxor	%xmm6,%xmm4
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	%xmm7,48(%esp)
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	movdqa	%xmm4,%xmm0
+	movdqa	%xmm4,%xmm6
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	pslldq	$12,%xmm0
+	paddd	%xmm4,%xmm4
+	movl	%edx,%ebp
+	roll	$5,%edx
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	psrld	$31,%xmm6
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	movdqa	%xmm0,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	psrld	$30,%xmm0
+	por	%xmm6,%xmm4
+	addl	12(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	pslld	$2,%xmm7
+	pxor	%xmm0,%xmm4
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	96(%esp),%xmm0
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	pxor	%xmm7,%xmm4
+	movdqa	%xmm2,%xmm5
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	16(%esp),%eax
+	xorl	%edi,%edx
+.byte	102,15,58,15,233,8
+	movdqa	%xmm4,%xmm7
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	paddd	%xmm4,%xmm0
+	movdqa	%xmm1,80(%esp)
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	psrldq	$4,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	pxor	%xmm1,%xmm5
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	pxor	%xmm3,%xmm7
+	addl	20(%esp),%edi
+	xorl	%edx,%ecx
+	movl	%eax,%esi
+	roll	$5,%eax
+	pxor	%xmm7,%xmm5
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	%xmm0,(%esp)
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	movdqa	%xmm5,%xmm1
+	movdqa	%xmm5,%xmm7
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	24(%esp),%edx
+	xorl	%ecx,%ebx
+	pslldq	$12,%xmm1
+	paddd	%xmm5,%xmm5
+	movl	%edi,%ebp
+	roll	$5,%edi
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	psrld	$31,%xmm7
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	movdqa	%xmm1,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	psrld	$30,%xmm1
+	por	%xmm7,%xmm5
+	addl	28(%esp),%ecx
+	xorl	%ebx,%eax
+	movl	%edx,%esi
+	roll	$5,%edx
+	pslld	$2,%xmm0
+	pxor	%xmm1,%xmm5
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	movdqa	112(%esp),%xmm1
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	pxor	%xmm0,%xmm5
+	movdqa	%xmm3,%xmm6
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	32(%esp),%ebx
+	xorl	%eax,%edi
+.byte	102,15,58,15,242,8
+	movdqa	%xmm5,%xmm0
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	paddd	%xmm5,%xmm1
+	movdqa	%xmm2,96(%esp)
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	psrldq	$4,%xmm0
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	pxor	%xmm2,%xmm6
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	pxor	%xmm4,%xmm0
+	addl	36(%esp),%eax
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	pxor	%xmm0,%xmm6
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	movdqa	%xmm1,16(%esp)
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	movdqa	%xmm6,%xmm2
+	movdqa	%xmm6,%xmm0
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	40(%esp),%edi
+	xorl	%edx,%ecx
+	pslldq	$12,%xmm2
+	paddd	%xmm6,%xmm6
+	movl	%eax,%ebp
+	roll	$5,%eax
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	psrld	$31,%xmm0
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm2,%xmm1
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	psrld	$30,%xmm2
+	por	%xmm0,%xmm6
+	addl	44(%esp),%edx
+	xorl	%ecx,%ebx
+	movdqa	64(%esp),%xmm0
+	movl	%edi,%esi
+	roll	$5,%edi
+	pslld	$2,%xmm1
+	pxor	%xmm2,%xmm6
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	112(%esp),%xmm2
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	pxor	%xmm1,%xmm6
+	movdqa	%xmm4,%xmm7
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	48(%esp),%ecx
+	xorl	%ebx,%eax
+.byte	102,15,58,15,251,8
+	movdqa	%xmm6,%xmm1
+	movl	%edx,%ebp
+	roll	$5,%edx
+	paddd	%xmm6,%xmm2
+	movdqa	%xmm3,64(%esp)
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	psrldq	$4,%xmm1
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	pxor	%xmm3,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	pxor	%xmm5,%xmm1
+	addl	52(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	pxor	%xmm1,%xmm7
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	%xmm2,32(%esp)
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	movdqa	%xmm7,%xmm3
+	movdqa	%xmm7,%xmm1
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	56(%esp),%eax
+	xorl	%edi,%edx
+	pslldq	$12,%xmm3
+	paddd	%xmm7,%xmm7
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	psrld	$31,%xmm1
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	movdqa	%xmm3,%xmm2
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	psrld	$30,%xmm3
+	por	%xmm1,%xmm7
+	addl	60(%esp),%edi
+	xorl	%edx,%ecx
+	movdqa	80(%esp),%xmm1
+	movl	%eax,%esi
+	roll	$5,%eax
+	pslld	$2,%xmm2
+	pxor	%xmm3,%xmm7
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	112(%esp),%xmm3
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	pxor	%xmm2,%xmm7
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	movdqa	%xmm7,%xmm2
+	addl	(%esp),%edx
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,214,8
+	xorl	%ecx,%ebx
+	movl	%edi,%ebp
+	roll	$5,%edi
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm4,80(%esp)
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	movdqa	%xmm3,%xmm4
+	paddd	%xmm7,%xmm3
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	pxor	%xmm2,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	movdqa	%xmm0,%xmm2
+	movdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	roll	$5,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	pslld	$2,%xmm0
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	psrld	$30,%xmm2
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	8(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	por	%xmm2,%xmm0
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	movdqa	96(%esp),%xmm2
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	12(%esp),%eax
+	movdqa	%xmm0,%xmm3
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	16(%esp),%edi
+	pxor	%xmm5,%xmm1
+.byte	102,15,58,15,223,8
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,96(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm4,%xmm5
+	paddd	%xmm0,%xmm4
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	pxor	%xmm3,%xmm1
+	addl	20(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	movdqa	%xmm1,%xmm3
+	movdqa	%xmm4,(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	pslld	$2,%xmm1
+	addl	24(%esp),%ecx
+	xorl	%ebx,%esi
+	psrld	$30,%xmm3
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	por	%xmm3,%xmm1
+	addl	28(%esp),%ebx
+	xorl	%eax,%ebp
+	movdqa	64(%esp),%xmm3
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	movdqa	%xmm1,%xmm4
+	addl	%ebp,%ebx
+	addl	32(%esp),%eax
+	pxor	%xmm6,%xmm2
+.byte	102,15,58,15,224,8
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	pxor	%xmm3,%xmm2
+	movdqa	%xmm6,64(%esp)
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	movdqa	128(%esp),%xmm6
+	paddd	%xmm1,%xmm5
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	pxor	%xmm4,%xmm2
+	addl	36(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	movdqa	%xmm2,%xmm4
+	movdqa	%xmm5,16(%esp)
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	pslld	$2,%xmm2
+	addl	40(%esp),%edx
+	xorl	%ecx,%esi
+	psrld	$30,%xmm4
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	por	%xmm4,%xmm2
+	addl	44(%esp),%ecx
+	xorl	%ebx,%ebp
+	movdqa	80(%esp),%xmm4
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	movdqa	%xmm2,%xmm5
+	addl	%ebp,%ecx
+	addl	48(%esp),%ebx
+	pxor	%xmm7,%xmm3
+.byte	102,15,58,15,233,8
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	pxor	%xmm4,%xmm3
+	movdqa	%xmm7,80(%esp)
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	movdqa	%xmm6,%xmm7
+	paddd	%xmm2,%xmm6
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	pxor	%xmm5,%xmm3
+	addl	52(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	movdqa	%xmm3,%xmm5
+	movdqa	%xmm6,32(%esp)
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	pslld	$2,%xmm3
+	addl	56(%esp),%edi
+	xorl	%edx,%esi
+	psrld	$30,%xmm5
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	por	%xmm5,%xmm3
+	addl	60(%esp),%edx
+	xorl	%ecx,%ebp
+	movdqa	96(%esp),%xmm5
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	movdqa	%xmm3,%xmm6
+	addl	%ebp,%edx
+	addl	(%esp),%ecx
+	pxor	%xmm0,%xmm4
+.byte	102,15,58,15,242,8
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	roll	$5,%edx
+	pxor	%xmm5,%xmm4
+	movdqa	%xmm0,96(%esp)
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	movdqa	%xmm7,%xmm0
+	paddd	%xmm3,%xmm7
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	pxor	%xmm6,%xmm4
+	addl	4(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	movdqa	%xmm4,%xmm6
+	movdqa	%xmm7,48(%esp)
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	pslld	$2,%xmm4
+	addl	8(%esp),%eax
+	xorl	%edi,%esi
+	psrld	$30,%xmm6
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	por	%xmm6,%xmm4
+	addl	12(%esp),%edi
+	xorl	%edx,%ebp
+	movdqa	64(%esp),%xmm6
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	movdqa	%xmm4,%xmm7
+	addl	%ebp,%edi
+	addl	16(%esp),%edx
+	pxor	%xmm1,%xmm5
+.byte	102,15,58,15,251,8
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	pxor	%xmm6,%xmm5
+	movdqa	%xmm1,64(%esp)
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	movdqa	%xmm0,%xmm1
+	paddd	%xmm4,%xmm0
+	rorl	$7,%eax
+	addl	%esi,%edx
+	pxor	%xmm7,%xmm5
+	addl	20(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	movdqa	%xmm5,%xmm7
+	movdqa	%xmm0,(%esp)
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	pslld	$2,%xmm5
+	addl	24(%esp),%ebx
+	xorl	%eax,%esi
+	psrld	$30,%xmm7
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	por	%xmm7,%xmm5
+	addl	28(%esp),%eax
+	xorl	%edi,%ebp
+	movdqa	80(%esp),%xmm7
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	movdqa	%xmm5,%xmm0
+	addl	%ebp,%eax
+	movl	%ecx,%ebp
+	pxor	%xmm2,%xmm6
+.byte	102,15,58,15,196,8
+	xorl	%edx,%ecx
+	addl	32(%esp),%edi
+	andl	%edx,%ebp
+	pxor	%xmm7,%xmm6
+	movdqa	%xmm2,80(%esp)
+	andl	%ecx,%esi
+	rorl	$7,%ebx
+	movdqa	%xmm1,%xmm2
+	paddd	%xmm5,%xmm1
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	pxor	%xmm0,%xmm6
+	roll	$5,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movdqa	%xmm6,%xmm0
+	movdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	36(%esp),%edx
+	andl	%ecx,%esi
+	pslld	$2,%xmm6
+	andl	%ebx,%ebp
+	rorl	$7,%eax
+	psrld	$30,%xmm0
+	addl	%esi,%edx
+	movl	%edi,%esi
+	roll	$5,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	por	%xmm0,%xmm6
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	movdqa	96(%esp),%xmm0
+	addl	40(%esp),%ecx
+	andl	%ebx,%ebp
+	andl	%eax,%esi
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	movdqa	%xmm6,%xmm1
+	movl	%edx,%ebp
+	roll	$5,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	44(%esp),%ebx
+	andl	%eax,%esi
+	andl	%edi,%ebp
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%ebp
+	pxor	%xmm3,%xmm7
+.byte	102,15,58,15,205,8
+	xorl	%edi,%edx
+	addl	48(%esp),%eax
+	andl	%edi,%ebp
+	pxor	%xmm0,%xmm7
+	movdqa	%xmm3,96(%esp)
+	andl	%edx,%esi
+	rorl	$7,%ecx
+	movdqa	144(%esp),%xmm3
+	paddd	%xmm6,%xmm2
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	pxor	%xmm1,%xmm7
+	roll	$5,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movdqa	%xmm7,%xmm1
+	movdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	52(%esp),%edi
+	andl	%edx,%esi
+	pslld	$2,%xmm7
+	andl	%ecx,%ebp
+	rorl	$7,%ebx
+	psrld	$30,%xmm1
+	addl	%esi,%edi
+	movl	%eax,%esi
+	roll	$5,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	por	%xmm1,%xmm7
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	movdqa	64(%esp),%xmm1
+	addl	56(%esp),%edx
+	andl	%ecx,%ebp
+	andl	%ebx,%esi
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	movdqa	%xmm7,%xmm2
+	movl	%edi,%ebp
+	roll	$5,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	60(%esp),%ecx
+	andl	%ebx,%esi
+	andl	%eax,%ebp
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	roll	$5,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%ebp
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,214,8
+	xorl	%eax,%edi
+	addl	(%esp),%ebx
+	andl	%eax,%ebp
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm4,64(%esp)
+	andl	%edi,%esi
+	rorl	$7,%edx
+	movdqa	%xmm3,%xmm4
+	paddd	%xmm7,%xmm3
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	pxor	%xmm2,%xmm0
+	roll	$5,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movdqa	%xmm0,%xmm2
+	movdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	4(%esp),%eax
+	andl	%edi,%esi
+	pslld	$2,%xmm0
+	andl	%edx,%ebp
+	rorl	$7,%ecx
+	psrld	$30,%xmm2
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	por	%xmm2,%xmm0
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	movdqa	80(%esp),%xmm2
+	addl	8(%esp),%edi
+	andl	%edx,%ebp
+	andl	%ecx,%esi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	movdqa	%xmm0,%xmm3
+	movl	%eax,%ebp
+	roll	$5,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	12(%esp),%edx
+	andl	%ecx,%esi
+	andl	%ebx,%ebp
+	rorl	$7,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	roll	$5,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%ebp
+	pxor	%xmm5,%xmm1
+.byte	102,15,58,15,223,8
+	xorl	%ebx,%eax
+	addl	16(%esp),%ecx
+	andl	%ebx,%ebp
+	pxor	%xmm2,%xmm1
+	movdqa	%xmm5,80(%esp)
+	andl	%eax,%esi
+	rorl	$7,%edi
+	movdqa	%xmm4,%xmm5
+	paddd	%xmm0,%xmm4
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	pxor	%xmm3,%xmm1
+	roll	$5,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movdqa	%xmm1,%xmm3
+	movdqa	%xmm4,(%esp)
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	20(%esp),%ebx
+	andl	%eax,%esi
+	pslld	$2,%xmm1
+	andl	%edi,%ebp
+	rorl	$7,%edx
+	psrld	$30,%xmm3
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	por	%xmm3,%xmm1
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	movdqa	96(%esp),%xmm3
+	addl	24(%esp),%eax
+	andl	%edi,%ebp
+	andl	%edx,%esi
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movdqa	%xmm1,%xmm4
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	28(%esp),%edi
+	andl	%edx,%esi
+	andl	%ecx,%ebp
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	roll	$5,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%ebp
+	pxor	%xmm6,%xmm2
+.byte	102,15,58,15,224,8
+	xorl	%ecx,%ebx
+	addl	32(%esp),%edx
+	andl	%ecx,%ebp
+	pxor	%xmm3,%xmm2
+	movdqa	%xmm6,96(%esp)
+	andl	%ebx,%esi
+	rorl	$7,%eax
+	movdqa	%xmm5,%xmm6
+	paddd	%xmm1,%xmm5
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	pxor	%xmm4,%xmm2
+	roll	$5,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movdqa	%xmm2,%xmm4
+	movdqa	%xmm5,16(%esp)
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	36(%esp),%ecx
+	andl	%ebx,%esi
+	pslld	$2,%xmm2
+	andl	%eax,%ebp
+	rorl	$7,%edi
+	psrld	$30,%xmm4
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	roll	$5,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	por	%xmm4,%xmm2
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	movdqa	64(%esp),%xmm4
+	addl	40(%esp),%ebx
+	andl	%eax,%ebp
+	andl	%edi,%esi
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	movdqa	%xmm2,%xmm5
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	44(%esp),%eax
+	andl	%edi,%esi
+	andl	%edx,%ebp
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	addl	48(%esp),%edi
+	pxor	%xmm7,%xmm3
+.byte	102,15,58,15,233,8
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	pxor	%xmm4,%xmm3
+	movdqa	%xmm7,64(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	movdqa	%xmm6,%xmm7
+	paddd	%xmm2,%xmm6
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	pxor	%xmm5,%xmm3
+	addl	52(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	movdqa	%xmm3,%xmm5
+	movdqa	%xmm6,32(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	pslld	$2,%xmm3
+	addl	56(%esp),%ecx
+	xorl	%ebx,%esi
+	psrld	$30,%xmm5
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	por	%xmm5,%xmm3
+	addl	60(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	(%esp),%eax
+	paddd	%xmm3,%xmm7
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	movdqa	%xmm7,48(%esp)
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	4(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	8(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	12(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	movl	196(%esp),%ebp
+	cmpl	200(%esp),%ebp
+	je	.L005done
+	movdqa	160(%esp),%xmm7
+	movdqa	176(%esp),%xmm6
+	movdqu	(%ebp),%xmm0
+	movdqu	16(%ebp),%xmm1
+	movdqu	32(%ebp),%xmm2
+	movdqu	48(%ebp),%xmm3
+	addl	$64,%ebp
+.byte	102,15,56,0,198
+	movl	%ebp,196(%esp)
+	movdqa	%xmm7,96(%esp)
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+.byte	102,15,56,0,206
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	paddd	%xmm7,%xmm0
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	movdqa	%xmm0,(%esp)
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	psubd	%xmm7,%xmm0
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+.byte	102,15,56,0,214
+	movl	%edx,%ebp
+	roll	$5,%edx
+	paddd	%xmm7,%xmm1
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	movdqa	%xmm1,16(%esp)
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	psubd	%xmm7,%xmm1
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+.byte	102,15,56,0,222
+	movl	%edi,%ebp
+	roll	$5,%edi
+	paddd	%xmm7,%xmm2
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	movdqa	%xmm2,32(%esp)
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	psubd	%xmm7,%xmm2
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%esi,%ebx
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	movdqa	%xmm1,%xmm4
+	jmp	.L004loop
+.align	16
+.L005done:
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	roll	$5,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	roll	$5,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	roll	$5,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%esi,%ecx
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	roll	$5,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	roll	$5,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	roll	$5,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	rorl	$7,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	roll	$5,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	rorl	$7,%eax
+	addl	%esi,%edx
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	roll	$5,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	rorl	$7,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	roll	$5,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	rorl	$7,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	roll	$5,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	rorl	$7,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	movl	204(%esp),%esp
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3
+.type	_sha1_block_data_order_avx, at function
+.align	16
+_sha1_block_data_order_avx:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	call	.L006pic_point
+.L006pic_point:
+	popl	%ebp
+	leal	.LK_XX_XX-.L006pic_point(%ebp),%ebp
+.Lavx_shortcut:
+	vzeroall
+	vmovdqa	(%ebp),%xmm7
+	vmovdqa	16(%ebp),%xmm0
+	vmovdqa	32(%ebp),%xmm1
+	vmovdqa	48(%ebp),%xmm2
+	vmovdqa	64(%ebp),%xmm6
+	movl	20(%esp),%edi
+	movl	24(%esp),%ebp
+	movl	28(%esp),%edx
+	movl	%esp,%esi
+	subl	$208,%esp
+	andl	$-64,%esp
+	vmovdqa	%xmm0,112(%esp)
+	vmovdqa	%xmm1,128(%esp)
+	vmovdqa	%xmm2,144(%esp)
+	shll	$6,%edx
+	vmovdqa	%xmm7,160(%esp)
+	addl	%ebp,%edx
+	vmovdqa	%xmm6,176(%esp)
+	addl	$64,%ebp
+	movl	%edi,192(%esp)
+	movl	%ebp,196(%esp)
+	movl	%edx,200(%esp)
+	movl	%esi,204(%esp)
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	movl	16(%edi),%edi
+	movl	%ebx,%esi
+	vmovdqu	-64(%ebp),%xmm0
+	vmovdqu	-48(%ebp),%xmm1
+	vmovdqu	-32(%ebp),%xmm2
+	vmovdqu	-16(%ebp),%xmm3
+	vpshufb	%xmm6,%xmm0,%xmm0
+	vpshufb	%xmm6,%xmm1,%xmm1
+	vpshufb	%xmm6,%xmm2,%xmm2
+	vmovdqa	%xmm7,96(%esp)
+	vpshufb	%xmm6,%xmm3,%xmm3
+	vpaddd	%xmm7,%xmm0,%xmm4
+	vpaddd	%xmm7,%xmm1,%xmm5
+	vpaddd	%xmm7,%xmm2,%xmm6
+	vmovdqa	%xmm4,(%esp)
+	vmovdqa	%xmm5,16(%esp)
+	vmovdqa	%xmm6,32(%esp)
+	jmp	.L007loop
+.align	16
+.L007loop:
+	addl	(%esp),%edi
+	xorl	%edx,%ecx
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpaddd	%xmm3,%xmm7,%xmm7
+	vmovdqa	%xmm0,64(%esp)
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	vpsrldq	$4,%xmm3,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	vpxor	%xmm0,%xmm4,%xmm4
+	shrdl	$2,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm2,%xmm6,%xmm6
+	addl	4(%esp),%edx
+	xorl	%ecx,%ebx
+	vmovdqa	%xmm7,48(%esp)
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpxor	%xmm6,%xmm4,%xmm4
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	vpsrld	$31,%xmm4,%xmm6
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	8(%esp),%ecx
+	xorl	%ebx,%eax
+	vpslldq	$12,%xmm4,%xmm0
+	vpaddd	%xmm4,%xmm4,%xmm4
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	vpsrld	$30,%xmm0,%xmm7
+	vpor	%xmm6,%xmm4,%xmm4
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpslld	$2,%xmm0,%xmm0
+	addl	12(%esp),%ebx
+	xorl	%eax,%edi
+	vpxor	%xmm7,%xmm4,%xmm4
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	vpxor	%xmm0,%xmm4,%xmm4
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	vmovdqa	96(%esp),%xmm0
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	16(%esp),%eax
+	xorl	%edi,%edx
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	vpaddd	%xmm4,%xmm0,%xmm0
+	vmovdqa	%xmm1,80(%esp)
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	vpsrldq	$4,%xmm4,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	vpxor	%xmm1,%xmm5,%xmm5
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpxor	%xmm3,%xmm7,%xmm7
+	addl	20(%esp),%edi
+	xorl	%edx,%ecx
+	vmovdqa	%xmm0,(%esp)
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	vpxor	%xmm7,%xmm5,%xmm5
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	vpsrld	$31,%xmm5,%xmm7
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	24(%esp),%edx
+	xorl	%ecx,%ebx
+	vpslldq	$12,%xmm5,%xmm1
+	vpaddd	%xmm5,%xmm5,%xmm5
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	vpsrld	$30,%xmm1,%xmm0
+	vpor	%xmm7,%xmm5,%xmm5
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpslld	$2,%xmm1,%xmm1
+	addl	28(%esp),%ecx
+	xorl	%ebx,%eax
+	vpxor	%xmm0,%xmm5,%xmm5
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	vpxor	%xmm1,%xmm5,%xmm5
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	vmovdqa	112(%esp),%xmm1
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	32(%esp),%ebx
+	xorl	%eax,%edi
+	vpalignr	$8,%xmm2,%xmm3,%xmm6
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpaddd	%xmm5,%xmm1,%xmm1
+	vmovdqa	%xmm2,96(%esp)
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	vpsrldq	$4,%xmm5,%xmm0
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	vpxor	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpxor	%xmm4,%xmm0,%xmm0
+	addl	36(%esp),%eax
+	xorl	%edi,%edx
+	vmovdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	vpxor	%xmm0,%xmm6,%xmm6
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	vpsrld	$31,%xmm6,%xmm0
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	40(%esp),%edi
+	xorl	%edx,%ecx
+	vpslldq	$12,%xmm6,%xmm2
+	vpaddd	%xmm6,%xmm6,%xmm6
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	andl	%ecx,%esi
+	xorl	%edx,%ecx
+	vpsrld	$30,%xmm2,%xmm1
+	vpor	%xmm0,%xmm6,%xmm6
+	xorl	%edx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpslld	$2,%xmm2,%xmm2
+	vmovdqa	64(%esp),%xmm0
+	addl	44(%esp),%edx
+	xorl	%ecx,%ebx
+	vpxor	%xmm1,%xmm6,%xmm6
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	andl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	vpxor	%xmm2,%xmm6,%xmm6
+	xorl	%ecx,%ebp
+	addl	%edi,%edx
+	vmovdqa	112(%esp),%xmm2
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	48(%esp),%ecx
+	xorl	%ebx,%eax
+	vpalignr	$8,%xmm3,%xmm4,%xmm7
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpaddd	%xmm6,%xmm2,%xmm2
+	vmovdqa	%xmm3,64(%esp)
+	andl	%eax,%esi
+	xorl	%ebx,%eax
+	vpsrldq	$4,%xmm6,%xmm1
+	xorl	%ebx,%esi
+	addl	%edx,%ecx
+	vpxor	%xmm3,%xmm7,%xmm7
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpxor	%xmm5,%xmm1,%xmm1
+	addl	52(%esp),%ebx
+	xorl	%eax,%edi
+	vmovdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	vpxor	%xmm1,%xmm7,%xmm7
+	andl	%edi,%ebp
+	xorl	%eax,%edi
+	xorl	%eax,%ebp
+	addl	%ecx,%ebx
+	vpsrld	$31,%xmm7,%xmm1
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	56(%esp),%eax
+	xorl	%edi,%edx
+	vpslldq	$12,%xmm7,%xmm3
+	vpaddd	%xmm7,%xmm7,%xmm7
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	andl	%edx,%esi
+	xorl	%edi,%edx
+	vpsrld	$30,%xmm3,%xmm2
+	vpor	%xmm1,%xmm7,%xmm7
+	xorl	%edi,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpslld	$2,%xmm3,%xmm3
+	vmovdqa	80(%esp),%xmm1
+	addl	60(%esp),%edi
+	xorl	%edx,%ecx
+	vpxor	%xmm2,%xmm7,%xmm7
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	andl	%ecx,%ebp
+	xorl	%edx,%ecx
+	vpxor	%xmm3,%xmm7,%xmm7
+	xorl	%edx,%ebp
+	addl	%eax,%edi
+	vmovdqa	112(%esp),%xmm3
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpalignr	$8,%xmm6,%xmm7,%xmm2
+	vpxor	%xmm4,%xmm0,%xmm0
+	addl	(%esp),%edx
+	xorl	%ecx,%ebx
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpxor	%xmm1,%xmm0,%xmm0
+	vmovdqa	%xmm4,80(%esp)
+	andl	%ebx,%esi
+	xorl	%ecx,%ebx
+	vmovdqa	%xmm3,%xmm4
+	vpaddd	%xmm7,%xmm3,%xmm3
+	xorl	%ecx,%esi
+	addl	%edi,%edx
+	vpxor	%xmm2,%xmm0,%xmm0
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	4(%esp),%ecx
+	xorl	%ebx,%eax
+	vpsrld	$30,%xmm0,%xmm2
+	vmovdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	andl	%eax,%ebp
+	xorl	%ebx,%eax
+	vpslld	$2,%xmm0,%xmm0
+	xorl	%ebx,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	8(%esp),%ebx
+	xorl	%eax,%edi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpor	%xmm2,%xmm0,%xmm0
+	andl	%edi,%esi
+	xorl	%eax,%edi
+	vmovdqa	96(%esp),%xmm2
+	xorl	%eax,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	12(%esp),%eax
+	xorl	%edi,%edx
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	andl	%edx,%ebp
+	xorl	%edi,%edx
+	xorl	%edi,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpalignr	$8,%xmm7,%xmm0,%xmm3
+	vpxor	%xmm5,%xmm1,%xmm1
+	addl	16(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpxor	%xmm2,%xmm1,%xmm1
+	vmovdqa	%xmm5,96(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	vmovdqa	%xmm4,%xmm5
+	vpaddd	%xmm0,%xmm4,%xmm4
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm3,%xmm1,%xmm1
+	addl	20(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpsrld	$30,%xmm1,%xmm3
+	vmovdqa	%xmm4,(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpslld	$2,%xmm1,%xmm1
+	addl	24(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpor	%xmm3,%xmm1,%xmm1
+	addl	28(%esp),%ebx
+	xorl	%eax,%ebp
+	vmovdqa	64(%esp),%xmm3
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	vpxor	%xmm6,%xmm2,%xmm2
+	addl	32(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	vpxor	%xmm3,%xmm2,%xmm2
+	vmovdqa	%xmm6,64(%esp)
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	vmovdqa	128(%esp),%xmm6
+	vpaddd	%xmm1,%xmm5,%xmm5
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpxor	%xmm4,%xmm2,%xmm2
+	addl	36(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	vpsrld	$30,%xmm2,%xmm4
+	vmovdqa	%xmm5,16(%esp)
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpslld	$2,%xmm2,%xmm2
+	addl	40(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpor	%xmm4,%xmm2,%xmm2
+	addl	44(%esp),%ecx
+	xorl	%ebx,%ebp
+	vmovdqa	80(%esp),%xmm4
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	vpxor	%xmm7,%xmm3,%xmm3
+	addl	48(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpxor	%xmm4,%xmm3,%xmm3
+	vmovdqa	%xmm7,80(%esp)
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	vmovdqa	%xmm6,%xmm7
+	vpaddd	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpxor	%xmm5,%xmm3,%xmm3
+	addl	52(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	vpsrld	$30,%xmm3,%xmm5
+	vmovdqa	%xmm6,32(%esp)
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpslld	$2,%xmm3,%xmm3
+	addl	56(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpor	%xmm5,%xmm3,%xmm3
+	addl	60(%esp),%edx
+	xorl	%ecx,%ebp
+	vmovdqa	96(%esp),%xmm5
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpalignr	$8,%xmm2,%xmm3,%xmm6
+	vpxor	%xmm0,%xmm4,%xmm4
+	addl	(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpxor	%xmm5,%xmm4,%xmm4
+	vmovdqa	%xmm0,96(%esp)
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	vmovdqa	%xmm7,%xmm0
+	vpaddd	%xmm3,%xmm7,%xmm7
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpxor	%xmm6,%xmm4,%xmm4
+	addl	4(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	vpsrld	$30,%xmm4,%xmm6
+	vmovdqa	%xmm7,48(%esp)
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	vpslld	$2,%xmm4,%xmm4
+	addl	8(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	vpor	%xmm6,%xmm4,%xmm4
+	addl	12(%esp),%edi
+	xorl	%edx,%ebp
+	vmovdqa	64(%esp),%xmm6
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	vpalignr	$8,%xmm3,%xmm4,%xmm7
+	vpxor	%xmm1,%xmm5,%xmm5
+	addl	16(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpxor	%xmm6,%xmm5,%xmm5
+	vmovdqa	%xmm1,64(%esp)
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	vmovdqa	%xmm0,%xmm1
+	vpaddd	%xmm4,%xmm0,%xmm0
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vpxor	%xmm7,%xmm5,%xmm5
+	addl	20(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	vpsrld	$30,%xmm5,%xmm7
+	vmovdqa	%xmm0,(%esp)
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	vpslld	$2,%xmm5,%xmm5
+	addl	24(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vpor	%xmm7,%xmm5,%xmm5
+	addl	28(%esp),%eax
+	xorl	%edi,%ebp
+	vmovdqa	80(%esp),%xmm7
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vpalignr	$8,%xmm4,%xmm5,%xmm0
+	vpxor	%xmm2,%xmm6,%xmm6
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	addl	32(%esp),%edi
+	andl	%edx,%ebp
+	vpxor	%xmm7,%xmm6,%xmm6
+	vmovdqa	%xmm2,80(%esp)
+	andl	%ecx,%esi
+	shrdl	$7,%ebx,%ebx
+	vmovdqa	%xmm1,%xmm2
+	vpaddd	%xmm5,%xmm1,%xmm1
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	vpxor	%xmm0,%xmm6,%xmm6
+	shldl	$5,%eax,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpsrld	$30,%xmm6,%xmm0
+	vmovdqa	%xmm1,16(%esp)
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	36(%esp),%edx
+	andl	%ecx,%esi
+	vpslld	$2,%xmm6,%xmm6
+	andl	%ebx,%ebp
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpor	%xmm0,%xmm6,%xmm6
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	vmovdqa	96(%esp),%xmm0
+	addl	40(%esp),%ecx
+	andl	%ebx,%ebp
+	andl	%eax,%esi
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	44(%esp),%ebx
+	andl	%eax,%esi
+	andl	%edi,%ebp
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpalignr	$8,%xmm5,%xmm6,%xmm1
+	vpxor	%xmm3,%xmm7,%xmm7
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	addl	48(%esp),%eax
+	andl	%edi,%ebp
+	vpxor	%xmm0,%xmm7,%xmm7
+	vmovdqa	%xmm3,96(%esp)
+	andl	%edx,%esi
+	shrdl	$7,%ecx,%ecx
+	vmovdqa	144(%esp),%xmm3
+	vpaddd	%xmm6,%xmm2,%xmm2
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	vpxor	%xmm1,%xmm7,%xmm7
+	shldl	$5,%ebx,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpsrld	$30,%xmm7,%xmm1
+	vmovdqa	%xmm2,32(%esp)
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	52(%esp),%edi
+	andl	%edx,%esi
+	vpslld	$2,%xmm7,%xmm7
+	andl	%ecx,%ebp
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpor	%xmm1,%xmm7,%xmm7
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	vmovdqa	64(%esp),%xmm1
+	addl	56(%esp),%edx
+	andl	%ecx,%ebp
+	andl	%ebx,%esi
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	60(%esp),%ecx
+	andl	%ebx,%esi
+	andl	%eax,%ebp
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpalignr	$8,%xmm6,%xmm7,%xmm2
+	vpxor	%xmm4,%xmm0,%xmm0
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	addl	(%esp),%ebx
+	andl	%eax,%ebp
+	vpxor	%xmm1,%xmm0,%xmm0
+	vmovdqa	%xmm4,64(%esp)
+	andl	%edi,%esi
+	shrdl	$7,%edx,%edx
+	vmovdqa	%xmm3,%xmm4
+	vpaddd	%xmm7,%xmm3,%xmm3
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	vpxor	%xmm2,%xmm0,%xmm0
+	shldl	$5,%ecx,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpsrld	$30,%xmm0,%xmm2
+	vmovdqa	%xmm3,48(%esp)
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	4(%esp),%eax
+	andl	%edi,%esi
+	vpslld	$2,%xmm0,%xmm0
+	andl	%edx,%ebp
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpor	%xmm2,%xmm0,%xmm0
+	movl	%ecx,%ebp
+	xorl	%edx,%ecx
+	vmovdqa	80(%esp),%xmm2
+	addl	8(%esp),%edi
+	andl	%edx,%ebp
+	andl	%ecx,%esi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	addl	%esi,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	movl	%ebx,%esi
+	xorl	%ecx,%ebx
+	addl	12(%esp),%edx
+	andl	%ecx,%esi
+	andl	%ebx,%ebp
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	addl	%ebp,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpalignr	$8,%xmm7,%xmm0,%xmm3
+	vpxor	%xmm5,%xmm1,%xmm1
+	movl	%eax,%ebp
+	xorl	%ebx,%eax
+	addl	16(%esp),%ecx
+	andl	%ebx,%ebp
+	vpxor	%xmm2,%xmm1,%xmm1
+	vmovdqa	%xmm5,80(%esp)
+	andl	%eax,%esi
+	shrdl	$7,%edi,%edi
+	vmovdqa	%xmm4,%xmm5
+	vpaddd	%xmm0,%xmm4,%xmm4
+	addl	%ebp,%ecx
+	movl	%edx,%ebp
+	vpxor	%xmm3,%xmm1,%xmm1
+	shldl	$5,%edx,%edx
+	addl	%esi,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpsrld	$30,%xmm1,%xmm3
+	vmovdqa	%xmm4,(%esp)
+	movl	%edi,%esi
+	xorl	%eax,%edi
+	addl	20(%esp),%ebx
+	andl	%eax,%esi
+	vpslld	$2,%xmm1,%xmm1
+	andl	%edi,%ebp
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	addl	%ebp,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	vpor	%xmm3,%xmm1,%xmm1
+	movl	%edx,%ebp
+	xorl	%edi,%edx
+	vmovdqa	96(%esp),%xmm3
+	addl	24(%esp),%eax
+	andl	%edi,%ebp
+	andl	%edx,%esi
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	addl	%esi,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	movl	%ecx,%esi
+	xorl	%edx,%ecx
+	addl	28(%esp),%edi
+	andl	%edx,%esi
+	andl	%ecx,%ebp
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	addl	%ebp,%edi
+	xorl	%edx,%ecx
+	addl	%eax,%edi
+	vpalignr	$8,%xmm0,%xmm1,%xmm4
+	vpxor	%xmm6,%xmm2,%xmm2
+	movl	%ebx,%ebp
+	xorl	%ecx,%ebx
+	addl	32(%esp),%edx
+	andl	%ecx,%ebp
+	vpxor	%xmm3,%xmm2,%xmm2
+	vmovdqa	%xmm6,96(%esp)
+	andl	%ebx,%esi
+	shrdl	$7,%eax,%eax
+	vmovdqa	%xmm5,%xmm6
+	vpaddd	%xmm1,%xmm5,%xmm5
+	addl	%ebp,%edx
+	movl	%edi,%ebp
+	vpxor	%xmm4,%xmm2,%xmm2
+	shldl	$5,%edi,%edi
+	addl	%esi,%edx
+	xorl	%ecx,%ebx
+	addl	%edi,%edx
+	vpsrld	$30,%xmm2,%xmm4
+	vmovdqa	%xmm5,16(%esp)
+	movl	%eax,%esi
+	xorl	%ebx,%eax
+	addl	36(%esp),%ecx
+	andl	%ebx,%esi
+	vpslld	$2,%xmm2,%xmm2
+	andl	%eax,%ebp
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	addl	%ebp,%ecx
+	xorl	%ebx,%eax
+	addl	%edx,%ecx
+	vpor	%xmm4,%xmm2,%xmm2
+	movl	%edi,%ebp
+	xorl	%eax,%edi
+	vmovdqa	64(%esp),%xmm4
+	addl	40(%esp),%ebx
+	andl	%eax,%ebp
+	andl	%edi,%esi
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	addl	%esi,%ebx
+	xorl	%eax,%edi
+	addl	%ecx,%ebx
+	movl	%edx,%esi
+	xorl	%edi,%edx
+	addl	44(%esp),%eax
+	andl	%edi,%esi
+	andl	%edx,%ebp
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	addl	%ebp,%eax
+	xorl	%edi,%edx
+	addl	%ebx,%eax
+	vpalignr	$8,%xmm1,%xmm2,%xmm5
+	vpxor	%xmm7,%xmm3,%xmm3
+	addl	48(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	vpxor	%xmm4,%xmm3,%xmm3
+	vmovdqa	%xmm7,64(%esp)
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	vmovdqa	%xmm6,%xmm7
+	vpaddd	%xmm2,%xmm6,%xmm6
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	vpxor	%xmm5,%xmm3,%xmm3
+	addl	52(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	vpsrld	$30,%xmm3,%xmm5
+	vmovdqa	%xmm6,32(%esp)
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	vpslld	$2,%xmm3,%xmm3
+	addl	56(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vpor	%xmm5,%xmm3,%xmm3
+	addl	60(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	(%esp),%eax
+	vpaddd	%xmm3,%xmm7,%xmm7
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	vmovdqa	%xmm7,48(%esp)
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	4(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	8(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	12(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	movl	196(%esp),%ebp
+	cmpl	200(%esp),%ebp
+	je	.L008done
+	vmovdqa	160(%esp),%xmm7
+	vmovdqa	176(%esp),%xmm6
+	vmovdqu	(%ebp),%xmm0
+	vmovdqu	16(%ebp),%xmm1
+	vmovdqu	32(%ebp),%xmm2
+	vmovdqu	48(%ebp),%xmm3
+	addl	$64,%ebp
+	vpshufb	%xmm6,%xmm0,%xmm0
+	movl	%ebp,196(%esp)
+	vmovdqa	%xmm7,96(%esp)
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	vpshufb	%xmm6,%xmm1,%xmm1
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	vpaddd	%xmm7,%xmm0,%xmm4
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	vmovdqa	%xmm4,(%esp)
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	vpshufb	%xmm6,%xmm2,%xmm2
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	vpaddd	%xmm7,%xmm1,%xmm5
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	vmovdqa	%xmm5,16(%esp)
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	vpshufb	%xmm6,%xmm3,%xmm3
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	vpaddd	%xmm7,%xmm2,%xmm6
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	vmovdqa	%xmm6,32(%esp)
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%esi,%ebx
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	jmp	.L007loop
+.align	16
+.L008done:
+	addl	16(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	20(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	addl	24(%esp),%edi
+	xorl	%edx,%esi
+	movl	%eax,%ebp
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%esi
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%esi,%edi
+	addl	28(%esp),%edx
+	xorl	%ecx,%ebp
+	movl	%edi,%esi
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%ebp
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%ebp,%edx
+	addl	32(%esp),%ecx
+	xorl	%ebx,%esi
+	movl	%edx,%ebp
+	shldl	$5,%edx,%edx
+	xorl	%eax,%esi
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%esi,%ecx
+	addl	36(%esp),%ebx
+	xorl	%eax,%ebp
+	movl	%ecx,%esi
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%ebp
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%ebp,%ebx
+	addl	40(%esp),%eax
+	xorl	%edi,%esi
+	movl	%ebx,%ebp
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%esi
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%esi,%eax
+	addl	44(%esp),%edi
+	xorl	%edx,%ebp
+	movl	%eax,%esi
+	shldl	$5,%eax,%eax
+	xorl	%ecx,%ebp
+	addl	%eax,%edi
+	shrdl	$7,%ebx,%ebx
+	addl	%ebp,%edi
+	addl	48(%esp),%edx
+	xorl	%ecx,%esi
+	movl	%edi,%ebp
+	shldl	$5,%edi,%edi
+	xorl	%ebx,%esi
+	addl	%edi,%edx
+	shrdl	$7,%eax,%eax
+	addl	%esi,%edx
+	addl	52(%esp),%ecx
+	xorl	%ebx,%ebp
+	movl	%edx,%esi
+	shldl	$5,%edx,%edx
+	xorl	%eax,%ebp
+	addl	%edx,%ecx
+	shrdl	$7,%edi,%edi
+	addl	%ebp,%ecx
+	addl	56(%esp),%ebx
+	xorl	%eax,%esi
+	movl	%ecx,%ebp
+	shldl	$5,%ecx,%ecx
+	xorl	%edi,%esi
+	addl	%ecx,%ebx
+	shrdl	$7,%edx,%edx
+	addl	%esi,%ebx
+	addl	60(%esp),%eax
+	xorl	%edi,%ebp
+	movl	%ebx,%esi
+	shldl	$5,%ebx,%ebx
+	xorl	%edx,%ebp
+	addl	%ebx,%eax
+	shrdl	$7,%ecx,%ecx
+	addl	%ebp,%eax
+	vzeroall
+	movl	192(%esp),%ebp
+	addl	(%ebp),%eax
+	movl	204(%esp),%esp
+	addl	4(%ebp),%esi
+	addl	8(%ebp),%ecx
+	movl	%eax,(%ebp)
+	addl	12(%ebp),%edx
+	movl	%esi,4(%ebp)
+	addl	16(%ebp),%edi
+	movl	%ecx,8(%ebp)
+	movl	%edx,12(%ebp)
+	movl	%edi,16(%ebp)
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	_sha1_block_data_order_avx,.-_sha1_block_data_order_avx
+.align	64
+.LK_XX_XX:
+.long	1518500249,1518500249,1518500249,1518500249
+.long	1859775393,1859775393,1859775393,1859775393
+.long	2400959708,2400959708,2400959708,2400959708
+.long	3395469782,3395469782,3395469782,3395469782
+.long	66051,67438087,134810123,202182159
+.byte	83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte	102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte	89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte	114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/sha1-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/sha1-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha1-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/sha1-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,2639 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha1-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"sha1-586.s"
-.text
-.globl	sha1_block_data_order
-.type	sha1_block_data_order, at function
-.align	16
-sha1_block_data_order:
-.L_sha1_block_data_order_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	OPENSSL_ia32cap_P,%esi
-	leal	.LK_XX_XX-.L000pic_point(%ebp),%ebp
-	movl	(%esi),%eax
-	movl	4(%esi),%edx
-	testl	$512,%edx
-	jz	.L001x86
-	testl	$16777216,%eax
-	jz	.L001x86
-	jmp	.Lssse3_shortcut
-.align	16
-.L001x86:
-	movl	20(%esp),%ebp
-	movl	24(%esp),%esi
-	movl	28(%esp),%eax
-	subl	$76,%esp
-	shll	$6,%eax
-	addl	%esi,%eax
-	movl	%eax,104(%esp)
-	movl	16(%ebp),%edi
-	jmp	.L002loop
-.align	16
-.L002loop:
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edx,12(%esp)
-	movl	16(%esi),%eax
-	movl	20(%esi),%ebx
-	movl	24(%esi),%ecx
-	movl	28(%esi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,16(%esp)
-	movl	%ebx,20(%esp)
-	movl	%ecx,24(%esp)
-	movl	%edx,28(%esp)
-	movl	32(%esi),%eax
-	movl	36(%esi),%ebx
-	movl	40(%esi),%ecx
-	movl	44(%esi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,32(%esp)
-	movl	%ebx,36(%esp)
-	movl	%ecx,40(%esp)
-	movl	%edx,44(%esp)
-	movl	48(%esi),%eax
-	movl	52(%esi),%ebx
-	movl	56(%esi),%ecx
-	movl	60(%esi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	movl	%eax,48(%esp)
-	movl	%ebx,52(%esp)
-	movl	%ecx,56(%esp)
-	movl	%edx,60(%esp)
-	movl	%esi,100(%esp)
-	movl	(%ebp),%eax
-	movl	4(%ebp),%ebx
-	movl	8(%ebp),%ecx
-	movl	12(%ebp),%edx
-
-	movl	%ecx,%esi
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	xorl	%edx,%esi
-	addl	%edi,%ebp
-	movl	(%esp),%edi
-	andl	%ebx,%esi
-	rorl	$2,%ebx
-	xorl	%edx,%esi
-	leal	1518500249(%ebp,%edi,1),%ebp
-	addl	%esi,%ebp
-
-	movl	%ebx,%edi
-	movl	%ebp,%esi
-	roll	$5,%ebp
-	xorl	%ecx,%edi
-	addl	%edx,%ebp
-	movl	4(%esp),%edx
-	andl	%eax,%edi
-	rorl	$2,%eax
-	xorl	%ecx,%edi
-	leal	1518500249(%ebp,%edx,1),%ebp
-	addl	%edi,%ebp
-
-	movl	%eax,%edx
-	movl	%ebp,%edi
-	roll	$5,%ebp
-	xorl	%ebx,%edx
-	addl	%ecx,%ebp
-	movl	8(%esp),%ecx
-	andl	%esi,%edx
-	rorl	$2,%esi
-	xorl	%ebx,%edx
-	leal	1518500249(%ebp,%ecx,1),%ebp
-	addl	%edx,%ebp
-
-	movl	%esi,%ecx
-	movl	%ebp,%edx
-	roll	$5,%ebp
-	xorl	%eax,%ecx
-	addl	%ebx,%ebp
-	movl	12(%esp),%ebx
-	andl	%edi,%ecx
-	rorl	$2,%edi
-	xorl	%eax,%ecx
-	leal	1518500249(%ebp,%ebx,1),%ebp
-	addl	%ecx,%ebp
-
-	movl	%edi,%ebx
-	movl	%ebp,%ecx
-	roll	$5,%ebp
-	xorl	%esi,%ebx
-	addl	%eax,%ebp
-	movl	16(%esp),%eax
-	andl	%edx,%ebx
-	rorl	$2,%edx
-	xorl	%esi,%ebx
-	leal	1518500249(%ebp,%eax,1),%ebp
-	addl	%ebx,%ebp
-
-	movl	%edx,%eax
-	movl	%ebp,%ebx
-	roll	$5,%ebp
-	xorl	%edi,%eax
-	addl	%esi,%ebp
-	movl	20(%esp),%esi
-	andl	%ecx,%eax
-	rorl	$2,%ecx
-	xorl	%edi,%eax
-	leal	1518500249(%ebp,%esi,1),%ebp
-	addl	%eax,%ebp
-
-	movl	%ecx,%esi
-	movl	%ebp,%eax
-	roll	$5,%ebp
-	xorl	%edx,%esi
-	addl	%edi,%ebp
-	movl	24(%esp),%edi
-	andl	%ebx,%esi
-	rorl	$2,%ebx
-	xorl	%edx,%esi
-	leal	1518500249(%ebp,%edi,1),%ebp
-	addl	%esi,%ebp
-
-	movl	%ebx,%edi
-	movl	%ebp,%esi
-	roll	$5,%ebp
-	xorl	%ecx,%edi
-	addl	%edx,%ebp
-	movl	28(%esp),%edx
-	andl	%eax,%edi
-	rorl	$2,%eax
-	xorl	%ecx,%edi
-	leal	1518500249(%ebp,%edx,1),%ebp
-	addl	%edi,%ebp
-
-	movl	%eax,%edx
-	movl	%ebp,%edi
-	roll	$5,%ebp
-	xorl	%ebx,%edx
-	addl	%ecx,%ebp
-	movl	32(%esp),%ecx
-	andl	%esi,%edx
-	rorl	$2,%esi
-	xorl	%ebx,%edx
-	leal	1518500249(%ebp,%ecx,1),%ebp
-	addl	%edx,%ebp
-
-	movl	%esi,%ecx
-	movl	%ebp,%edx
-	roll	$5,%ebp
-	xorl	%eax,%ecx
-	addl	%ebx,%ebp
-	movl	36(%esp),%ebx
-	andl	%edi,%ecx
-	rorl	$2,%edi
-	xorl	%eax,%ecx
-	leal	1518500249(%ebp,%ebx,1),%ebp
-	addl	%ecx,%ebp
-
-	movl	%edi,%ebx
-	movl	%ebp,%ecx
-	roll	$5,%ebp
-	xorl	%esi,%ebx
-	addl	%eax,%ebp
-	movl	40(%esp),%eax
-	andl	%edx,%ebx
-	rorl	$2,%edx
-	xorl	%esi,%ebx
-	leal	1518500249(%ebp,%eax,1),%ebp
-	addl	%ebx,%ebp
-
-	movl	%edx,%eax
-	movl	%ebp,%ebx
-	roll	$5,%ebp
-	xorl	%edi,%eax
-	addl	%esi,%ebp
-	movl	44(%esp),%esi
-	andl	%ecx,%eax
-	rorl	$2,%ecx
-	xorl	%edi,%eax
-	leal	1518500249(%ebp,%esi,1),%ebp
-	addl	%eax,%ebp
-
-	movl	%ecx,%esi
-	movl	%ebp,%eax
-	roll	$5,%ebp
-	xorl	%edx,%esi
-	addl	%edi,%ebp
-	movl	48(%esp),%edi
-	andl	%ebx,%esi
-	rorl	$2,%ebx
-	xorl	%edx,%esi
-	leal	1518500249(%ebp,%edi,1),%ebp
-	addl	%esi,%ebp
-
-	movl	%ebx,%edi
-	movl	%ebp,%esi
-	roll	$5,%ebp
-	xorl	%ecx,%edi
-	addl	%edx,%ebp
-	movl	52(%esp),%edx
-	andl	%eax,%edi
-	rorl	$2,%eax
-	xorl	%ecx,%edi
-	leal	1518500249(%ebp,%edx,1),%ebp
-	addl	%edi,%ebp
-
-	movl	%eax,%edx
-	movl	%ebp,%edi
-	roll	$5,%ebp
-	xorl	%ebx,%edx
-	addl	%ecx,%ebp
-	movl	56(%esp),%ecx
-	andl	%esi,%edx
-	rorl	$2,%esi
-	xorl	%ebx,%edx
-	leal	1518500249(%ebp,%ecx,1),%ebp
-	addl	%edx,%ebp
-
-	movl	%esi,%ecx
-	movl	%ebp,%edx
-	roll	$5,%ebp
-	xorl	%eax,%ecx
-	addl	%ebx,%ebp
-	movl	60(%esp),%ebx
-	andl	%edi,%ecx
-	rorl	$2,%edi
-	xorl	%eax,%ecx
-	leal	1518500249(%ebp,%ebx,1),%ebp
-	movl	(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edi,%ebp
-	xorl	8(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	32(%esp),%ebx
-	andl	%edx,%ebp
-	xorl	52(%esp),%ebx
-	roll	$1,%ebx
-	xorl	%esi,%ebp
-	addl	%ebp,%eax
-	movl	%ecx,%ebp
-	rorl	$2,%edx
-	movl	%ebx,(%esp)
-	roll	$5,%ebp
-	leal	1518500249(%ebx,%eax,1),%ebx
-	movl	4(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%edx,%ebp
-	xorl	12(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	36(%esp),%eax
-	andl	%ecx,%ebp
-	xorl	56(%esp),%eax
-	roll	$1,%eax
-	xorl	%edi,%ebp
-	addl	%ebp,%esi
-	movl	%ebx,%ebp
-	rorl	$2,%ecx
-	movl	%eax,4(%esp)
-	roll	$5,%ebp
-	leal	1518500249(%eax,%esi,1),%eax
-	movl	8(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ecx,%ebp
-	xorl	16(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	40(%esp),%esi
-	andl	%ebx,%ebp
-	xorl	60(%esp),%esi
-	roll	$1,%esi
-	xorl	%edx,%ebp
-	addl	%ebp,%edi
-	movl	%eax,%ebp
-	rorl	$2,%ebx
-	movl	%esi,8(%esp)
-	roll	$5,%ebp
-	leal	1518500249(%esi,%edi,1),%esi
-	movl	12(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%ebx,%ebp
-	xorl	20(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	44(%esp),%edi
-	andl	%eax,%ebp
-	xorl	(%esp),%edi
-	roll	$1,%edi
-	xorl	%ecx,%ebp
-	addl	%ebp,%edx
-	movl	%esi,%ebp
-	rorl	$2,%eax
-	movl	%edi,12(%esp)
-	roll	$5,%ebp
-	leal	1518500249(%edi,%edx,1),%edi
-	movl	16(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	24(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	48(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	4(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,16(%esp)
-	leal	1859775393(%edx,%ecx,1),%edx
-	movl	20(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	28(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	52(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	8(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,20(%esp)
-	leal	1859775393(%ecx,%ebx,1),%ecx
-	movl	24(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	32(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	56(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	12(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,24(%esp)
-	leal	1859775393(%ebx,%eax,1),%ebx
-	movl	28(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	36(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	60(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	16(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	movl	%eax,28(%esp)
-	leal	1859775393(%eax,%esi,1),%eax
-	movl	32(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	40(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	20(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,32(%esp)
-	leal	1859775393(%esi,%edi,1),%esi
-	movl	36(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	44(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	4(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	24(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,36(%esp)
-	leal	1859775393(%edi,%edx,1),%edi
-	movl	40(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	48(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	8(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	28(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,40(%esp)
-	leal	1859775393(%edx,%ecx,1),%edx
-	movl	44(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	52(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	12(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	32(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,44(%esp)
-	leal	1859775393(%ecx,%ebx,1),%ecx
-	movl	48(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	56(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	16(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	36(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,48(%esp)
-	leal	1859775393(%ebx,%eax,1),%ebx
-	movl	52(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	60(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	20(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	40(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	movl	%eax,52(%esp)
-	leal	1859775393(%eax,%esi,1),%eax
-	movl	56(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	24(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	44(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,56(%esp)
-	leal	1859775393(%esi,%edi,1),%esi
-	movl	60(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	4(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	28(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	48(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,60(%esp)
-	leal	1859775393(%edi,%edx,1),%edi
-	movl	(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	8(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	32(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	52(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,(%esp)
-	leal	1859775393(%edx,%ecx,1),%edx
-	movl	4(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	12(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	36(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	56(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,4(%esp)
-	leal	1859775393(%ecx,%ebx,1),%ecx
-	movl	8(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	16(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	40(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	60(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,8(%esp)
-	leal	1859775393(%ebx,%eax,1),%ebx
-	movl	12(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	20(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	44(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	movl	%eax,12(%esp)
-	leal	1859775393(%eax,%esi,1),%eax
-	movl	16(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	24(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	48(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	4(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,16(%esp)
-	leal	1859775393(%esi,%edi,1),%esi
-	movl	20(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	28(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	52(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	8(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,20(%esp)
-	leal	1859775393(%edi,%edx,1),%edi
-	movl	24(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	32(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	56(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	12(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,24(%esp)
-	leal	1859775393(%edx,%ecx,1),%edx
-	movl	28(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	36(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	60(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	16(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,28(%esp)
-	leal	1859775393(%ecx,%ebx,1),%ecx
-	movl	32(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edi,%ebp
-	xorl	40(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	(%esp),%ebx
-	andl	%edx,%ebp
-	xorl	20(%esp),%ebx
-	roll	$1,%ebx
-	addl	%eax,%ebp
-	rorl	$2,%edx
-	movl	%ecx,%eax
-	roll	$5,%eax
-	movl	%ebx,32(%esp)
-	leal	2400959708(%ebx,%ebp,1),%ebx
-	movl	%edi,%ebp
-	addl	%eax,%ebx
-	andl	%esi,%ebp
-	movl	36(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%edx,%ebp
-	xorl	44(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	4(%esp),%eax
-	andl	%ecx,%ebp
-	xorl	24(%esp),%eax
-	roll	$1,%eax
-	addl	%esi,%ebp
-	rorl	$2,%ecx
-	movl	%ebx,%esi
-	roll	$5,%esi
-	movl	%eax,36(%esp)
-	leal	2400959708(%eax,%ebp,1),%eax
-	movl	%edx,%ebp
-	addl	%esi,%eax
-	andl	%edi,%ebp
-	movl	40(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ecx,%ebp
-	xorl	48(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	8(%esp),%esi
-	andl	%ebx,%ebp
-	xorl	28(%esp),%esi
-	roll	$1,%esi
-	addl	%edi,%ebp
-	rorl	$2,%ebx
-	movl	%eax,%edi
-	roll	$5,%edi
-	movl	%esi,40(%esp)
-	leal	2400959708(%esi,%ebp,1),%esi
-	movl	%ecx,%ebp
-	addl	%edi,%esi
-	andl	%edx,%ebp
-	movl	44(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%ebx,%ebp
-	xorl	52(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	12(%esp),%edi
-	andl	%eax,%ebp
-	xorl	32(%esp),%edi
-	roll	$1,%edi
-	addl	%edx,%ebp
-	rorl	$2,%eax
-	movl	%esi,%edx
-	roll	$5,%edx
-	movl	%edi,44(%esp)
-	leal	2400959708(%edi,%ebp,1),%edi
-	movl	%ebx,%ebp
-	addl	%edx,%edi
-	andl	%ecx,%ebp
-	movl	48(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%eax,%ebp
-	xorl	56(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	16(%esp),%edx
-	andl	%esi,%ebp
-	xorl	36(%esp),%edx
-	roll	$1,%edx
-	addl	%ecx,%ebp
-	rorl	$2,%esi
-	movl	%edi,%ecx
-	roll	$5,%ecx
-	movl	%edx,48(%esp)
-	leal	2400959708(%edx,%ebp,1),%edx
-	movl	%eax,%ebp
-	addl	%ecx,%edx
-	andl	%ebx,%ebp
-	movl	52(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%esi,%ebp
-	xorl	60(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	20(%esp),%ecx
-	andl	%edi,%ebp
-	xorl	40(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebx,%ebp
-	rorl	$2,%edi
-	movl	%edx,%ebx
-	roll	$5,%ebx
-	movl	%ecx,52(%esp)
-	leal	2400959708(%ecx,%ebp,1),%ecx
-	movl	%esi,%ebp
-	addl	%ebx,%ecx
-	andl	%eax,%ebp
-	movl	56(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edi,%ebp
-	xorl	(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	24(%esp),%ebx
-	andl	%edx,%ebp
-	xorl	44(%esp),%ebx
-	roll	$1,%ebx
-	addl	%eax,%ebp
-	rorl	$2,%edx
-	movl	%ecx,%eax
-	roll	$5,%eax
-	movl	%ebx,56(%esp)
-	leal	2400959708(%ebx,%ebp,1),%ebx
-	movl	%edi,%ebp
-	addl	%eax,%ebx
-	andl	%esi,%ebp
-	movl	60(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%edx,%ebp
-	xorl	4(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	28(%esp),%eax
-	andl	%ecx,%ebp
-	xorl	48(%esp),%eax
-	roll	$1,%eax
-	addl	%esi,%ebp
-	rorl	$2,%ecx
-	movl	%ebx,%esi
-	roll	$5,%esi
-	movl	%eax,60(%esp)
-	leal	2400959708(%eax,%ebp,1),%eax
-	movl	%edx,%ebp
-	addl	%esi,%eax
-	andl	%edi,%ebp
-	movl	(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ecx,%ebp
-	xorl	8(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	32(%esp),%esi
-	andl	%ebx,%ebp
-	xorl	52(%esp),%esi
-	roll	$1,%esi
-	addl	%edi,%ebp
-	rorl	$2,%ebx
-	movl	%eax,%edi
-	roll	$5,%edi
-	movl	%esi,(%esp)
-	leal	2400959708(%esi,%ebp,1),%esi
-	movl	%ecx,%ebp
-	addl	%edi,%esi
-	andl	%edx,%ebp
-	movl	4(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%ebx,%ebp
-	xorl	12(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	36(%esp),%edi
-	andl	%eax,%ebp
-	xorl	56(%esp),%edi
-	roll	$1,%edi
-	addl	%edx,%ebp
-	rorl	$2,%eax
-	movl	%esi,%edx
-	roll	$5,%edx
-	movl	%edi,4(%esp)
-	leal	2400959708(%edi,%ebp,1),%edi
-	movl	%ebx,%ebp
-	addl	%edx,%edi
-	andl	%ecx,%ebp
-	movl	8(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%eax,%ebp
-	xorl	16(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	40(%esp),%edx
-	andl	%esi,%ebp
-	xorl	60(%esp),%edx
-	roll	$1,%edx
-	addl	%ecx,%ebp
-	rorl	$2,%esi
-	movl	%edi,%ecx
-	roll	$5,%ecx
-	movl	%edx,8(%esp)
-	leal	2400959708(%edx,%ebp,1),%edx
-	movl	%eax,%ebp
-	addl	%ecx,%edx
-	andl	%ebx,%ebp
-	movl	12(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%esi,%ebp
-	xorl	20(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	44(%esp),%ecx
-	andl	%edi,%ebp
-	xorl	(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebx,%ebp
-	rorl	$2,%edi
-	movl	%edx,%ebx
-	roll	$5,%ebx
-	movl	%ecx,12(%esp)
-	leal	2400959708(%ecx,%ebp,1),%ecx
-	movl	%esi,%ebp
-	addl	%ebx,%ecx
-	andl	%eax,%ebp
-	movl	16(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edi,%ebp
-	xorl	24(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	48(%esp),%ebx
-	andl	%edx,%ebp
-	xorl	4(%esp),%ebx
-	roll	$1,%ebx
-	addl	%eax,%ebp
-	rorl	$2,%edx
-	movl	%ecx,%eax
-	roll	$5,%eax
-	movl	%ebx,16(%esp)
-	leal	2400959708(%ebx,%ebp,1),%ebx
-	movl	%edi,%ebp
-	addl	%eax,%ebx
-	andl	%esi,%ebp
-	movl	20(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%edx,%ebp
-	xorl	28(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	52(%esp),%eax
-	andl	%ecx,%ebp
-	xorl	8(%esp),%eax
-	roll	$1,%eax
-	addl	%esi,%ebp
-	rorl	$2,%ecx
-	movl	%ebx,%esi
-	roll	$5,%esi
-	movl	%eax,20(%esp)
-	leal	2400959708(%eax,%ebp,1),%eax
-	movl	%edx,%ebp
-	addl	%esi,%eax
-	andl	%edi,%ebp
-	movl	24(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ecx,%ebp
-	xorl	32(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	56(%esp),%esi
-	andl	%ebx,%ebp
-	xorl	12(%esp),%esi
-	roll	$1,%esi
-	addl	%edi,%ebp
-	rorl	$2,%ebx
-	movl	%eax,%edi
-	roll	$5,%edi
-	movl	%esi,24(%esp)
-	leal	2400959708(%esi,%ebp,1),%esi
-	movl	%ecx,%ebp
-	addl	%edi,%esi
-	andl	%edx,%ebp
-	movl	28(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%ebx,%ebp
-	xorl	36(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	60(%esp),%edi
-	andl	%eax,%ebp
-	xorl	16(%esp),%edi
-	roll	$1,%edi
-	addl	%edx,%ebp
-	rorl	$2,%eax
-	movl	%esi,%edx
-	roll	$5,%edx
-	movl	%edi,28(%esp)
-	leal	2400959708(%edi,%ebp,1),%edi
-	movl	%ebx,%ebp
-	addl	%edx,%edi
-	andl	%ecx,%ebp
-	movl	32(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%eax,%ebp
-	xorl	40(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	(%esp),%edx
-	andl	%esi,%ebp
-	xorl	20(%esp),%edx
-	roll	$1,%edx
-	addl	%ecx,%ebp
-	rorl	$2,%esi
-	movl	%edi,%ecx
-	roll	$5,%ecx
-	movl	%edx,32(%esp)
-	leal	2400959708(%edx,%ebp,1),%edx
-	movl	%eax,%ebp
-	addl	%ecx,%edx
-	andl	%ebx,%ebp
-	movl	36(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%esi,%ebp
-	xorl	44(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	4(%esp),%ecx
-	andl	%edi,%ebp
-	xorl	24(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebx,%ebp
-	rorl	$2,%edi
-	movl	%edx,%ebx
-	roll	$5,%ebx
-	movl	%ecx,36(%esp)
-	leal	2400959708(%ecx,%ebp,1),%ecx
-	movl	%esi,%ebp
-	addl	%ebx,%ecx
-	andl	%eax,%ebp
-	movl	40(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edi,%ebp
-	xorl	48(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	8(%esp),%ebx
-	andl	%edx,%ebp
-	xorl	28(%esp),%ebx
-	roll	$1,%ebx
-	addl	%eax,%ebp
-	rorl	$2,%edx
-	movl	%ecx,%eax
-	roll	$5,%eax
-	movl	%ebx,40(%esp)
-	leal	2400959708(%ebx,%ebp,1),%ebx
-	movl	%edi,%ebp
-	addl	%eax,%ebx
-	andl	%esi,%ebp
-	movl	44(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%edx,%ebp
-	xorl	52(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	12(%esp),%eax
-	andl	%ecx,%ebp
-	xorl	32(%esp),%eax
-	roll	$1,%eax
-	addl	%esi,%ebp
-	rorl	$2,%ecx
-	movl	%ebx,%esi
-	roll	$5,%esi
-	movl	%eax,44(%esp)
-	leal	2400959708(%eax,%ebp,1),%eax
-	movl	%edx,%ebp
-	addl	%esi,%eax
-	andl	%edi,%ebp
-	movl	48(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	56(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	16(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	36(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,48(%esp)
-	leal	3395469782(%esi,%edi,1),%esi
-	movl	52(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	60(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	20(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	40(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,52(%esp)
-	leal	3395469782(%edi,%edx,1),%edi
-	movl	56(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	24(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	44(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,56(%esp)
-	leal	3395469782(%edx,%ecx,1),%edx
-	movl	60(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	4(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	28(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	48(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,60(%esp)
-	leal	3395469782(%ecx,%ebx,1),%ecx
-	movl	(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	8(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	32(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	52(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,(%esp)
-	leal	3395469782(%ebx,%eax,1),%ebx
-	movl	4(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	12(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	36(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	56(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	movl	%eax,4(%esp)
-	leal	3395469782(%eax,%esi,1),%eax
-	movl	8(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	16(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	40(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	60(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,8(%esp)
-	leal	3395469782(%esi,%edi,1),%esi
-	movl	12(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	20(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	44(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,12(%esp)
-	leal	3395469782(%edi,%edx,1),%edi
-	movl	16(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	24(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	48(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	4(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,16(%esp)
-	leal	3395469782(%edx,%ecx,1),%edx
-	movl	20(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	28(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	52(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	8(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,20(%esp)
-	leal	3395469782(%ecx,%ebx,1),%ecx
-	movl	24(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	32(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	56(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	12(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,24(%esp)
-	leal	3395469782(%ebx,%eax,1),%ebx
-	movl	28(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	36(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	60(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	16(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	movl	%eax,28(%esp)
-	leal	3395469782(%eax,%esi,1),%eax
-	movl	32(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	40(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	20(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	movl	%esi,32(%esp)
-	leal	3395469782(%esi,%edi,1),%esi
-	movl	36(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	44(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	4(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	24(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	movl	%edi,36(%esp)
-	leal	3395469782(%edi,%edx,1),%edi
-	movl	40(%esp),%edx
-	addl	%ebp,%edi
-
-	movl	%esi,%ebp
-	xorl	48(%esp),%edx
-	xorl	%eax,%ebp
-	xorl	8(%esp),%edx
-	xorl	%ebx,%ebp
-	xorl	28(%esp),%edx
-	roll	$1,%edx
-	addl	%ebp,%ecx
-	rorl	$2,%esi
-	movl	%edi,%ebp
-	roll	$5,%ebp
-	movl	%edx,40(%esp)
-	leal	3395469782(%edx,%ecx,1),%edx
-	movl	44(%esp),%ecx
-	addl	%ebp,%edx
-
-	movl	%edi,%ebp
-	xorl	52(%esp),%ecx
-	xorl	%esi,%ebp
-	xorl	12(%esp),%ecx
-	xorl	%eax,%ebp
-	xorl	32(%esp),%ecx
-	roll	$1,%ecx
-	addl	%ebp,%ebx
-	rorl	$2,%edi
-	movl	%edx,%ebp
-	roll	$5,%ebp
-	movl	%ecx,44(%esp)
-	leal	3395469782(%ecx,%ebx,1),%ecx
-	movl	48(%esp),%ebx
-	addl	%ebp,%ecx
-
-	movl	%edx,%ebp
-	xorl	56(%esp),%ebx
-	xorl	%edi,%ebp
-	xorl	16(%esp),%ebx
-	xorl	%esi,%ebp
-	xorl	36(%esp),%ebx
-	roll	$1,%ebx
-	addl	%ebp,%eax
-	rorl	$2,%edx
-	movl	%ecx,%ebp
-	roll	$5,%ebp
-	movl	%ebx,48(%esp)
-	leal	3395469782(%ebx,%eax,1),%ebx
-	movl	52(%esp),%eax
-	addl	%ebp,%ebx
-
-	movl	%ecx,%ebp
-	xorl	60(%esp),%eax
-	xorl	%edx,%ebp
-	xorl	20(%esp),%eax
-	xorl	%edi,%ebp
-	xorl	40(%esp),%eax
-	roll	$1,%eax
-	addl	%ebp,%esi
-	rorl	$2,%ecx
-	movl	%ebx,%ebp
-	roll	$5,%ebp
-	leal	3395469782(%eax,%esi,1),%eax
-	movl	56(%esp),%esi
-	addl	%ebp,%eax
-
-	movl	%ebx,%ebp
-	xorl	(%esp),%esi
-	xorl	%ecx,%ebp
-	xorl	24(%esp),%esi
-	xorl	%edx,%ebp
-	xorl	44(%esp),%esi
-	roll	$1,%esi
-	addl	%ebp,%edi
-	rorl	$2,%ebx
-	movl	%eax,%ebp
-	roll	$5,%ebp
-	leal	3395469782(%esi,%edi,1),%esi
-	movl	60(%esp),%edi
-	addl	%ebp,%esi
-
-	movl	%eax,%ebp
-	xorl	4(%esp),%edi
-	xorl	%ebx,%ebp
-	xorl	28(%esp),%edi
-	xorl	%ecx,%ebp
-	xorl	48(%esp),%edi
-	roll	$1,%edi
-	addl	%ebp,%edx
-	rorl	$2,%eax
-	movl	%esi,%ebp
-	roll	$5,%ebp
-	leal	3395469782(%edi,%edx,1),%edi
-	addl	%ebp,%edi
-	movl	96(%esp),%ebp
-	movl	100(%esp),%edx
-	addl	(%ebp),%edi
-	addl	4(%ebp),%esi
-	addl	8(%ebp),%eax
-	addl	12(%ebp),%ebx
-	addl	16(%ebp),%ecx
-	movl	%edi,(%ebp)
-	addl	$64,%edx
-	movl	%esi,4(%ebp)
-	cmpl	104(%esp),%edx
-	movl	%eax,8(%ebp)
-	movl	%ecx,%edi
-	movl	%ebx,12(%ebp)
-	movl	%edx,%esi
-	movl	%ecx,16(%ebp)
-	jb	.L002loop
-	addl	$76,%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	sha1_block_data_order,.-.L_sha1_block_data_order_begin
-.type	_sha1_block_data_order_ssse3, at function
-.align	16
-_sha1_block_data_order_ssse3:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	call	.L003pic_point
-.L003pic_point:
-	popl	%ebp
-	leal	.LK_XX_XX-.L003pic_point(%ebp),%ebp
-.Lssse3_shortcut:
-	movdqa	(%ebp),%xmm7
-	movdqa	16(%ebp),%xmm0
-	movdqa	32(%ebp),%xmm1
-	movdqa	48(%ebp),%xmm2
-	movdqa	64(%ebp),%xmm6
-	movl	20(%esp),%edi
-	movl	24(%esp),%ebp
-	movl	28(%esp),%edx
-	movl	%esp,%esi
-	subl	$208,%esp
-	andl	$-64,%esp
-	movdqa	%xmm0,112(%esp)
-	movdqa	%xmm1,128(%esp)
-	movdqa	%xmm2,144(%esp)
-	shll	$6,%edx
-	movdqa	%xmm7,160(%esp)
-	addl	%ebp,%edx
-	movdqa	%xmm6,176(%esp)
-	addl	$64,%ebp
-	movl	%edi,192(%esp)
-	movl	%ebp,196(%esp)
-	movl	%edx,200(%esp)
-	movl	%esi,204(%esp)
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	movl	16(%edi),%edi
-	movl	%ebx,%esi
-	movdqu	-64(%ebp),%xmm0
-	movdqu	-48(%ebp),%xmm1
-	movdqu	-32(%ebp),%xmm2
-	movdqu	-16(%ebp),%xmm3
-.byte	102,15,56,0,198
-.byte	102,15,56,0,206
-.byte	102,15,56,0,214
-	movdqa	%xmm7,96(%esp)
-.byte	102,15,56,0,222
-	paddd	%xmm7,%xmm0
-	paddd	%xmm7,%xmm1
-	paddd	%xmm7,%xmm2
-	movdqa	%xmm0,(%esp)
-	psubd	%xmm7,%xmm0
-	movdqa	%xmm1,16(%esp)
-	psubd	%xmm7,%xmm1
-	movdqa	%xmm2,32(%esp)
-	psubd	%xmm7,%xmm2
-	movdqa	%xmm1,%xmm4
-	jmp	.L004loop
-.align	16
-.L004loop:
-	addl	(%esp),%edi
-	xorl	%edx,%ecx
-.byte	102,15,58,15,224,8
-	movdqa	%xmm3,%xmm6
-	movl	%eax,%ebp
-	roll	$5,%eax
-	paddd	%xmm3,%xmm7
-	movdqa	%xmm0,64(%esp)
-	andl	%ecx,%esi
-	xorl	%edx,%ecx
-	psrldq	$4,%xmm6
-	xorl	%edx,%esi
-	addl	%eax,%edi
-	pxor	%xmm0,%xmm4
-	rorl	$2,%ebx
-	addl	%esi,%edi
-	pxor	%xmm2,%xmm6
-	addl	4(%esp),%edx
-	xorl	%ecx,%ebx
-	movl	%edi,%esi
-	roll	$5,%edi
-	pxor	%xmm6,%xmm4
-	andl	%ebx,%ebp
-	xorl	%ecx,%ebx
-	movdqa	%xmm7,48(%esp)
-	xorl	%ecx,%ebp
-	addl	%edi,%edx
-	movdqa	%xmm4,%xmm0
-	movdqa	%xmm4,%xmm6
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	addl	8(%esp),%ecx
-	xorl	%ebx,%eax
-	pslldq	$12,%xmm0
-	paddd	%xmm4,%xmm4
-	movl	%edx,%ebp
-	roll	$5,%edx
-	andl	%eax,%esi
-	xorl	%ebx,%eax
-	psrld	$31,%xmm6
-	xorl	%ebx,%esi
-	addl	%edx,%ecx
-	movdqa	%xmm0,%xmm7
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	psrld	$30,%xmm0
-	por	%xmm6,%xmm4
-	addl	12(%esp),%ebx
-	xorl	%eax,%edi
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	pslld	$2,%xmm7
-	pxor	%xmm0,%xmm4
-	andl	%edi,%ebp
-	xorl	%eax,%edi
-	movdqa	96(%esp),%xmm0
-	xorl	%eax,%ebp
-	addl	%ecx,%ebx
-	pxor	%xmm7,%xmm4
-	movdqa	%xmm2,%xmm5
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	addl	16(%esp),%eax
-	xorl	%edi,%edx
-.byte	102,15,58,15,233,8
-	movdqa	%xmm4,%xmm7
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	paddd	%xmm4,%xmm0
-	movdqa	%xmm1,80(%esp)
-	andl	%edx,%esi
-	xorl	%edi,%edx
-	psrldq	$4,%xmm7
-	xorl	%edi,%esi
-	addl	%ebx,%eax
-	pxor	%xmm1,%xmm5
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	pxor	%xmm3,%xmm7
-	addl	20(%esp),%edi
-	xorl	%edx,%ecx
-	movl	%eax,%esi
-	roll	$5,%eax
-	pxor	%xmm7,%xmm5
-	andl	%ecx,%ebp
-	xorl	%edx,%ecx
-	movdqa	%xmm0,(%esp)
-	xorl	%edx,%ebp
-	addl	%eax,%edi
-	movdqa	%xmm5,%xmm1
-	movdqa	%xmm5,%xmm7
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	addl	24(%esp),%edx
-	xorl	%ecx,%ebx
-	pslldq	$12,%xmm1
-	paddd	%xmm5,%xmm5
-	movl	%edi,%ebp
-	roll	$5,%edi
-	andl	%ebx,%esi
-	xorl	%ecx,%ebx
-	psrld	$31,%xmm7
-	xorl	%ecx,%esi
-	addl	%edi,%edx
-	movdqa	%xmm1,%xmm0
-	rorl	$7,%eax
-	addl	%esi,%edx
-	psrld	$30,%xmm1
-	por	%xmm7,%xmm5
-	addl	28(%esp),%ecx
-	xorl	%ebx,%eax
-	movl	%edx,%esi
-	roll	$5,%edx
-	pslld	$2,%xmm0
-	pxor	%xmm1,%xmm5
-	andl	%eax,%ebp
-	xorl	%ebx,%eax
-	movdqa	112(%esp),%xmm1
-	xorl	%ebx,%ebp
-	addl	%edx,%ecx
-	pxor	%xmm0,%xmm5
-	movdqa	%xmm3,%xmm6
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	addl	32(%esp),%ebx
-	xorl	%eax,%edi
-.byte	102,15,58,15,242,8
-	movdqa	%xmm5,%xmm0
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	paddd	%xmm5,%xmm1
-	movdqa	%xmm2,96(%esp)
-	andl	%edi,%esi
-	xorl	%eax,%edi
-	psrldq	$4,%xmm0
-	xorl	%eax,%esi
-	addl	%ecx,%ebx
-	pxor	%xmm2,%xmm6
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	pxor	%xmm4,%xmm0
-	addl	36(%esp),%eax
-	xorl	%edi,%edx
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	pxor	%xmm0,%xmm6
-	andl	%edx,%ebp
-	xorl	%edi,%edx
-	movdqa	%xmm1,16(%esp)
-	xorl	%edi,%ebp
-	addl	%ebx,%eax
-	movdqa	%xmm6,%xmm2
-	movdqa	%xmm6,%xmm0
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	addl	40(%esp),%edi
-	xorl	%edx,%ecx
-	pslldq	$12,%xmm2
-	paddd	%xmm6,%xmm6
-	movl	%eax,%ebp
-	roll	$5,%eax
-	andl	%ecx,%esi
-	xorl	%edx,%ecx
-	psrld	$31,%xmm0
-	xorl	%edx,%esi
-	addl	%eax,%edi
-	movdqa	%xmm2,%xmm1
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	psrld	$30,%xmm2
-	por	%xmm0,%xmm6
-	addl	44(%esp),%edx
-	xorl	%ecx,%ebx
-	movdqa	64(%esp),%xmm0
-	movl	%edi,%esi
-	roll	$5,%edi
-	pslld	$2,%xmm1
-	pxor	%xmm2,%xmm6
-	andl	%ebx,%ebp
-	xorl	%ecx,%ebx
-	movdqa	112(%esp),%xmm2
-	xorl	%ecx,%ebp
-	addl	%edi,%edx
-	pxor	%xmm1,%xmm6
-	movdqa	%xmm4,%xmm7
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	addl	48(%esp),%ecx
-	xorl	%ebx,%eax
-.byte	102,15,58,15,251,8
-	movdqa	%xmm6,%xmm1
-	movl	%edx,%ebp
-	roll	$5,%edx
-	paddd	%xmm6,%xmm2
-	movdqa	%xmm3,64(%esp)
-	andl	%eax,%esi
-	xorl	%ebx,%eax
-	psrldq	$4,%xmm1
-	xorl	%ebx,%esi
-	addl	%edx,%ecx
-	pxor	%xmm3,%xmm7
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	pxor	%xmm5,%xmm1
-	addl	52(%esp),%ebx
-	xorl	%eax,%edi
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	pxor	%xmm1,%xmm7
-	andl	%edi,%ebp
-	xorl	%eax,%edi
-	movdqa	%xmm2,32(%esp)
-	xorl	%eax,%ebp
-	addl	%ecx,%ebx
-	movdqa	%xmm7,%xmm3
-	movdqa	%xmm7,%xmm1
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	addl	56(%esp),%eax
-	xorl	%edi,%edx
-	pslldq	$12,%xmm3
-	paddd	%xmm7,%xmm7
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	andl	%edx,%esi
-	xorl	%edi,%edx
-	psrld	$31,%xmm1
-	xorl	%edi,%esi
-	addl	%ebx,%eax
-	movdqa	%xmm3,%xmm2
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	psrld	$30,%xmm3
-	por	%xmm1,%xmm7
-	addl	60(%esp),%edi
-	xorl	%edx,%ecx
-	movdqa	80(%esp),%xmm1
-	movl	%eax,%esi
-	roll	$5,%eax
-	pslld	$2,%xmm2
-	pxor	%xmm3,%xmm7
-	andl	%ecx,%ebp
-	xorl	%edx,%ecx
-	movdqa	112(%esp),%xmm3
-	xorl	%edx,%ebp
-	addl	%eax,%edi
-	pxor	%xmm2,%xmm7
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	movdqa	%xmm7,%xmm2
-	addl	(%esp),%edx
-	pxor	%xmm4,%xmm0
-.byte	102,15,58,15,214,8
-	xorl	%ecx,%ebx
-	movl	%edi,%ebp
-	roll	$5,%edi
-	pxor	%xmm1,%xmm0
-	movdqa	%xmm4,80(%esp)
-	andl	%ebx,%esi
-	xorl	%ecx,%ebx
-	movdqa	%xmm3,%xmm4
-	paddd	%xmm7,%xmm3
-	xorl	%ecx,%esi
-	addl	%edi,%edx
-	pxor	%xmm2,%xmm0
-	rorl	$7,%eax
-	addl	%esi,%edx
-	addl	4(%esp),%ecx
-	xorl	%ebx,%eax
-	movdqa	%xmm0,%xmm2
-	movdqa	%xmm3,48(%esp)
-	movl	%edx,%esi
-	roll	$5,%edx
-	andl	%eax,%ebp
-	xorl	%ebx,%eax
-	pslld	$2,%xmm0
-	xorl	%ebx,%ebp
-	addl	%edx,%ecx
-	psrld	$30,%xmm2
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	addl	8(%esp),%ebx
-	xorl	%eax,%edi
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	por	%xmm2,%xmm0
-	andl	%edi,%esi
-	xorl	%eax,%edi
-	movdqa	96(%esp),%xmm2
-	xorl	%eax,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	addl	12(%esp),%eax
-	movdqa	%xmm0,%xmm3
-	xorl	%edi,%edx
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	andl	%edx,%ebp
-	xorl	%edi,%edx
-	xorl	%edi,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	addl	16(%esp),%edi
-	pxor	%xmm5,%xmm1
-.byte	102,15,58,15,223,8
-	xorl	%edx,%esi
-	movl	%eax,%ebp
-	roll	$5,%eax
-	pxor	%xmm2,%xmm1
-	movdqa	%xmm5,96(%esp)
-	xorl	%ecx,%esi
-	addl	%eax,%edi
-	movdqa	%xmm4,%xmm5
-	paddd	%xmm0,%xmm4
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	pxor	%xmm3,%xmm1
-	addl	20(%esp),%edx
-	xorl	%ecx,%ebp
-	movl	%edi,%esi
-	roll	$5,%edi
-	movdqa	%xmm1,%xmm3
-	movdqa	%xmm4,(%esp)
-	xorl	%ebx,%ebp
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	pslld	$2,%xmm1
-	addl	24(%esp),%ecx
-	xorl	%ebx,%esi
-	psrld	$30,%xmm3
-	movl	%edx,%ebp
-	roll	$5,%edx
-	xorl	%eax,%esi
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	por	%xmm3,%xmm1
-	addl	28(%esp),%ebx
-	xorl	%eax,%ebp
-	movdqa	64(%esp),%xmm3
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	xorl	%edi,%ebp
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	movdqa	%xmm1,%xmm4
-	addl	%ebp,%ebx
-	addl	32(%esp),%eax
-	pxor	%xmm6,%xmm2
-.byte	102,15,58,15,224,8
-	xorl	%edi,%esi
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	pxor	%xmm3,%xmm2
-	movdqa	%xmm6,64(%esp)
-	xorl	%edx,%esi
-	addl	%ebx,%eax
-	movdqa	128(%esp),%xmm6
-	paddd	%xmm1,%xmm5
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	pxor	%xmm4,%xmm2
-	addl	36(%esp),%edi
-	xorl	%edx,%ebp
-	movl	%eax,%esi
-	roll	$5,%eax
-	movdqa	%xmm2,%xmm4
-	movdqa	%xmm5,16(%esp)
-	xorl	%ecx,%ebp
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	pslld	$2,%xmm2
-	addl	40(%esp),%edx
-	xorl	%ecx,%esi
-	psrld	$30,%xmm4
-	movl	%edi,%ebp
-	roll	$5,%edi
-	xorl	%ebx,%esi
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%esi,%edx
-	por	%xmm4,%xmm2
-	addl	44(%esp),%ecx
-	xorl	%ebx,%ebp
-	movdqa	80(%esp),%xmm4
-	movl	%edx,%esi
-	roll	$5,%edx
-	xorl	%eax,%ebp
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	movdqa	%xmm2,%xmm5
-	addl	%ebp,%ecx
-	addl	48(%esp),%ebx
-	pxor	%xmm7,%xmm3
-.byte	102,15,58,15,233,8
-	xorl	%eax,%esi
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	pxor	%xmm4,%xmm3
-	movdqa	%xmm7,80(%esp)
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	movdqa	%xmm6,%xmm7
-	paddd	%xmm2,%xmm6
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	pxor	%xmm5,%xmm3
-	addl	52(%esp),%eax
-	xorl	%edi,%ebp
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	movdqa	%xmm3,%xmm5
-	movdqa	%xmm6,32(%esp)
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	pslld	$2,%xmm3
-	addl	56(%esp),%edi
-	xorl	%edx,%esi
-	psrld	$30,%xmm5
-	movl	%eax,%ebp
-	roll	$5,%eax
-	xorl	%ecx,%esi
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	por	%xmm5,%xmm3
-	addl	60(%esp),%edx
-	xorl	%ecx,%ebp
-	movdqa	96(%esp),%xmm5
-	movl	%edi,%esi
-	roll	$5,%edi
-	xorl	%ebx,%ebp
-	addl	%edi,%edx
-	rorl	$7,%eax
-	movdqa	%xmm3,%xmm6
-	addl	%ebp,%edx
-	addl	(%esp),%ecx
-	pxor	%xmm0,%xmm4
-.byte	102,15,58,15,242,8
-	xorl	%ebx,%esi
-	movl	%edx,%ebp
-	roll	$5,%edx
-	pxor	%xmm5,%xmm4
-	movdqa	%xmm0,96(%esp)
-	xorl	%eax,%esi
-	addl	%edx,%ecx
-	movdqa	%xmm7,%xmm0
-	paddd	%xmm3,%xmm7
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	pxor	%xmm6,%xmm4
-	addl	4(%esp),%ebx
-	xorl	%eax,%ebp
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	movdqa	%xmm4,%xmm6
-	movdqa	%xmm7,48(%esp)
-	xorl	%edi,%ebp
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	pslld	$2,%xmm4
-	addl	8(%esp),%eax
-	xorl	%edi,%esi
-	psrld	$30,%xmm6
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	xorl	%edx,%esi
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	por	%xmm6,%xmm4
-	addl	12(%esp),%edi
-	xorl	%edx,%ebp
-	movdqa	64(%esp),%xmm6
-	movl	%eax,%esi
-	roll	$5,%eax
-	xorl	%ecx,%ebp
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	movdqa	%xmm4,%xmm7
-	addl	%ebp,%edi
-	addl	16(%esp),%edx
-	pxor	%xmm1,%xmm5
-.byte	102,15,58,15,251,8
-	xorl	%ecx,%esi
-	movl	%edi,%ebp
-	roll	$5,%edi
-	pxor	%xmm6,%xmm5
-	movdqa	%xmm1,64(%esp)
-	xorl	%ebx,%esi
-	addl	%edi,%edx
-	movdqa	%xmm0,%xmm1
-	paddd	%xmm4,%xmm0
-	rorl	$7,%eax
-	addl	%esi,%edx
-	pxor	%xmm7,%xmm5
-	addl	20(%esp),%ecx
-	xorl	%ebx,%ebp
-	movl	%edx,%esi
-	roll	$5,%edx
-	movdqa	%xmm5,%xmm7
-	movdqa	%xmm0,(%esp)
-	xorl	%eax,%ebp
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	pslld	$2,%xmm5
-	addl	24(%esp),%ebx
-	xorl	%eax,%esi
-	psrld	$30,%xmm7
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	por	%xmm7,%xmm5
-	addl	28(%esp),%eax
-	xorl	%edi,%ebp
-	movdqa	80(%esp),%xmm7
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	movdqa	%xmm5,%xmm0
-	addl	%ebp,%eax
-	movl	%ecx,%ebp
-	pxor	%xmm2,%xmm6
-.byte	102,15,58,15,196,8
-	xorl	%edx,%ecx
-	addl	32(%esp),%edi
-	andl	%edx,%ebp
-	pxor	%xmm7,%xmm6
-	movdqa	%xmm2,80(%esp)
-	andl	%ecx,%esi
-	rorl	$7,%ebx
-	movdqa	%xmm1,%xmm2
-	paddd	%xmm5,%xmm1
-	addl	%ebp,%edi
-	movl	%eax,%ebp
-	pxor	%xmm0,%xmm6
-	roll	$5,%eax
-	addl	%esi,%edi
-	xorl	%edx,%ecx
-	addl	%eax,%edi
-	movdqa	%xmm6,%xmm0
-	movdqa	%xmm1,16(%esp)
-	movl	%ebx,%esi
-	xorl	%ecx,%ebx
-	addl	36(%esp),%edx
-	andl	%ecx,%esi
-	pslld	$2,%xmm6
-	andl	%ebx,%ebp
-	rorl	$7,%eax
-	psrld	$30,%xmm0
-	addl	%esi,%edx
-	movl	%edi,%esi
-	roll	$5,%edi
-	addl	%ebp,%edx
-	xorl	%ecx,%ebx
-	addl	%edi,%edx
-	por	%xmm0,%xmm6
-	movl	%eax,%ebp
-	xorl	%ebx,%eax
-	movdqa	96(%esp),%xmm0
-	addl	40(%esp),%ecx
-	andl	%ebx,%ebp
-	andl	%eax,%esi
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	movdqa	%xmm6,%xmm1
-	movl	%edx,%ebp
-	roll	$5,%edx
-	addl	%esi,%ecx
-	xorl	%ebx,%eax
-	addl	%edx,%ecx
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	addl	44(%esp),%ebx
-	andl	%eax,%esi
-	andl	%edi,%ebp
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	addl	%ebp,%ebx
-	xorl	%eax,%edi
-	addl	%ecx,%ebx
-	movl	%edx,%ebp
-	pxor	%xmm3,%xmm7
-.byte	102,15,58,15,205,8
-	xorl	%edi,%edx
-	addl	48(%esp),%eax
-	andl	%edi,%ebp
-	pxor	%xmm0,%xmm7
-	movdqa	%xmm3,96(%esp)
-	andl	%edx,%esi
-	rorl	$7,%ecx
-	movdqa	144(%esp),%xmm3
-	paddd	%xmm6,%xmm2
-	addl	%ebp,%eax
-	movl	%ebx,%ebp
-	pxor	%xmm1,%xmm7
-	roll	$5,%ebx
-	addl	%esi,%eax
-	xorl	%edi,%edx
-	addl	%ebx,%eax
-	movdqa	%xmm7,%xmm1
-	movdqa	%xmm2,32(%esp)
-	movl	%ecx,%esi
-	xorl	%edx,%ecx
-	addl	52(%esp),%edi
-	andl	%edx,%esi
-	pslld	$2,%xmm7
-	andl	%ecx,%ebp
-	rorl	$7,%ebx
-	psrld	$30,%xmm1
-	addl	%esi,%edi
-	movl	%eax,%esi
-	roll	$5,%eax
-	addl	%ebp,%edi
-	xorl	%edx,%ecx
-	addl	%eax,%edi
-	por	%xmm1,%xmm7
-	movl	%ebx,%ebp
-	xorl	%ecx,%ebx
-	movdqa	64(%esp),%xmm1
-	addl	56(%esp),%edx
-	andl	%ecx,%ebp
-	andl	%ebx,%esi
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	movdqa	%xmm7,%xmm2
-	movl	%edi,%ebp
-	roll	$5,%edi
-	addl	%esi,%edx
-	xorl	%ecx,%ebx
-	addl	%edi,%edx
-	movl	%eax,%esi
-	xorl	%ebx,%eax
-	addl	60(%esp),%ecx
-	andl	%ebx,%esi
-	andl	%eax,%ebp
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	movl	%edx,%esi
-	roll	$5,%edx
-	addl	%ebp,%ecx
-	xorl	%ebx,%eax
-	addl	%edx,%ecx
-	movl	%edi,%ebp
-	pxor	%xmm4,%xmm0
-.byte	102,15,58,15,214,8
-	xorl	%eax,%edi
-	addl	(%esp),%ebx
-	andl	%eax,%ebp
-	pxor	%xmm1,%xmm0
-	movdqa	%xmm4,64(%esp)
-	andl	%edi,%esi
-	rorl	$7,%edx
-	movdqa	%xmm3,%xmm4
-	paddd	%xmm7,%xmm3
-	addl	%ebp,%ebx
-	movl	%ecx,%ebp
-	pxor	%xmm2,%xmm0
-	roll	$5,%ecx
-	addl	%esi,%ebx
-	xorl	%eax,%edi
-	addl	%ecx,%ebx
-	movdqa	%xmm0,%xmm2
-	movdqa	%xmm3,48(%esp)
-	movl	%edx,%esi
-	xorl	%edi,%edx
-	addl	4(%esp),%eax
-	andl	%edi,%esi
-	pslld	$2,%xmm0
-	andl	%edx,%ebp
-	rorl	$7,%ecx
-	psrld	$30,%xmm2
-	addl	%esi,%eax
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	addl	%ebp,%eax
-	xorl	%edi,%edx
-	addl	%ebx,%eax
-	por	%xmm2,%xmm0
-	movl	%ecx,%ebp
-	xorl	%edx,%ecx
-	movdqa	80(%esp),%xmm2
-	addl	8(%esp),%edi
-	andl	%edx,%ebp
-	andl	%ecx,%esi
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	movdqa	%xmm0,%xmm3
-	movl	%eax,%ebp
-	roll	$5,%eax
-	addl	%esi,%edi
-	xorl	%edx,%ecx
-	addl	%eax,%edi
-	movl	%ebx,%esi
-	xorl	%ecx,%ebx
-	addl	12(%esp),%edx
-	andl	%ecx,%esi
-	andl	%ebx,%ebp
-	rorl	$7,%eax
-	addl	%esi,%edx
-	movl	%edi,%esi
-	roll	$5,%edi
-	addl	%ebp,%edx
-	xorl	%ecx,%ebx
-	addl	%edi,%edx
-	movl	%eax,%ebp
-	pxor	%xmm5,%xmm1
-.byte	102,15,58,15,223,8
-	xorl	%ebx,%eax
-	addl	16(%esp),%ecx
-	andl	%ebx,%ebp
-	pxor	%xmm2,%xmm1
-	movdqa	%xmm5,80(%esp)
-	andl	%eax,%esi
-	rorl	$7,%edi
-	movdqa	%xmm4,%xmm5
-	paddd	%xmm0,%xmm4
-	addl	%ebp,%ecx
-	movl	%edx,%ebp
-	pxor	%xmm3,%xmm1
-	roll	$5,%edx
-	addl	%esi,%ecx
-	xorl	%ebx,%eax
-	addl	%edx,%ecx
-	movdqa	%xmm1,%xmm3
-	movdqa	%xmm4,(%esp)
-	movl	%edi,%esi
-	xorl	%eax,%edi
-	addl	20(%esp),%ebx
-	andl	%eax,%esi
-	pslld	$2,%xmm1
-	andl	%edi,%ebp
-	rorl	$7,%edx
-	psrld	$30,%xmm3
-	addl	%esi,%ebx
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	addl	%ebp,%ebx
-	xorl	%eax,%edi
-	addl	%ecx,%ebx
-	por	%xmm3,%xmm1
-	movl	%edx,%ebp
-	xorl	%edi,%edx
-	movdqa	96(%esp),%xmm3
-	addl	24(%esp),%eax
-	andl	%edi,%ebp
-	andl	%edx,%esi
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	movdqa	%xmm1,%xmm4
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	addl	%esi,%eax
-	xorl	%edi,%edx
-	addl	%ebx,%eax
-	movl	%ecx,%esi
-	xorl	%edx,%ecx
-	addl	28(%esp),%edi
-	andl	%edx,%esi
-	andl	%ecx,%ebp
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	movl	%eax,%esi
-	roll	$5,%eax
-	addl	%ebp,%edi
-	xorl	%edx,%ecx
-	addl	%eax,%edi
-	movl	%ebx,%ebp
-	pxor	%xmm6,%xmm2
-.byte	102,15,58,15,224,8
-	xorl	%ecx,%ebx
-	addl	32(%esp),%edx
-	andl	%ecx,%ebp
-	pxor	%xmm3,%xmm2
-	movdqa	%xmm6,96(%esp)
-	andl	%ebx,%esi
-	rorl	$7,%eax
-	movdqa	%xmm5,%xmm6
-	paddd	%xmm1,%xmm5
-	addl	%ebp,%edx
-	movl	%edi,%ebp
-	pxor	%xmm4,%xmm2
-	roll	$5,%edi
-	addl	%esi,%edx
-	xorl	%ecx,%ebx
-	addl	%edi,%edx
-	movdqa	%xmm2,%xmm4
-	movdqa	%xmm5,16(%esp)
-	movl	%eax,%esi
-	xorl	%ebx,%eax
-	addl	36(%esp),%ecx
-	andl	%ebx,%esi
-	pslld	$2,%xmm2
-	andl	%eax,%ebp
-	rorl	$7,%edi
-	psrld	$30,%xmm4
-	addl	%esi,%ecx
-	movl	%edx,%esi
-	roll	$5,%edx
-	addl	%ebp,%ecx
-	xorl	%ebx,%eax
-	addl	%edx,%ecx
-	por	%xmm4,%xmm2
-	movl	%edi,%ebp
-	xorl	%eax,%edi
-	movdqa	64(%esp),%xmm4
-	addl	40(%esp),%ebx
-	andl	%eax,%ebp
-	andl	%edi,%esi
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	movdqa	%xmm2,%xmm5
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	addl	%esi,%ebx
-	xorl	%eax,%edi
-	addl	%ecx,%ebx
-	movl	%edx,%esi
-	xorl	%edi,%edx
-	addl	44(%esp),%eax
-	andl	%edi,%esi
-	andl	%edx,%ebp
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	addl	%ebp,%eax
-	xorl	%edi,%edx
-	addl	%ebx,%eax
-	addl	48(%esp),%edi
-	pxor	%xmm7,%xmm3
-.byte	102,15,58,15,233,8
-	xorl	%edx,%esi
-	movl	%eax,%ebp
-	roll	$5,%eax
-	pxor	%xmm4,%xmm3
-	movdqa	%xmm7,64(%esp)
-	xorl	%ecx,%esi
-	addl	%eax,%edi
-	movdqa	%xmm6,%xmm7
-	paddd	%xmm2,%xmm6
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	pxor	%xmm5,%xmm3
-	addl	52(%esp),%edx
-	xorl	%ecx,%ebp
-	movl	%edi,%esi
-	roll	$5,%edi
-	movdqa	%xmm3,%xmm5
-	movdqa	%xmm6,32(%esp)
-	xorl	%ebx,%ebp
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	pslld	$2,%xmm3
-	addl	56(%esp),%ecx
-	xorl	%ebx,%esi
-	psrld	$30,%xmm5
-	movl	%edx,%ebp
-	roll	$5,%edx
-	xorl	%eax,%esi
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	por	%xmm5,%xmm3
-	addl	60(%esp),%ebx
-	xorl	%eax,%ebp
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	xorl	%edi,%ebp
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	addl	(%esp),%eax
-	paddd	%xmm3,%xmm7
-	xorl	%edi,%esi
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	xorl	%edx,%esi
-	movdqa	%xmm7,48(%esp)
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	addl	4(%esp),%edi
-	xorl	%edx,%ebp
-	movl	%eax,%esi
-	roll	$5,%eax
-	xorl	%ecx,%ebp
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	addl	8(%esp),%edx
-	xorl	%ecx,%esi
-	movl	%edi,%ebp
-	roll	$5,%edi
-	xorl	%ebx,%esi
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%esi,%edx
-	addl	12(%esp),%ecx
-	xorl	%ebx,%ebp
-	movl	%edx,%esi
-	roll	$5,%edx
-	xorl	%eax,%ebp
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	movl	196(%esp),%ebp
-	cmpl	200(%esp),%ebp
-	je	.L005done
-	movdqa	160(%esp),%xmm7
-	movdqa	176(%esp),%xmm6
-	movdqu	(%ebp),%xmm0
-	movdqu	16(%ebp),%xmm1
-	movdqu	32(%ebp),%xmm2
-	movdqu	48(%ebp),%xmm3
-	addl	$64,%ebp
-.byte	102,15,56,0,198
-	movl	%ebp,196(%esp)
-	movdqa	%xmm7,96(%esp)
-	addl	16(%esp),%ebx
-	xorl	%eax,%esi
-.byte	102,15,56,0,206
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	paddd	%xmm7,%xmm0
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	movdqa	%xmm0,(%esp)
-	addl	20(%esp),%eax
-	xorl	%edi,%ebp
-	psubd	%xmm7,%xmm0
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	addl	24(%esp),%edi
-	xorl	%edx,%esi
-	movl	%eax,%ebp
-	roll	$5,%eax
-	xorl	%ecx,%esi
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	addl	28(%esp),%edx
-	xorl	%ecx,%ebp
-	movl	%edi,%esi
-	roll	$5,%edi
-	xorl	%ebx,%ebp
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	addl	32(%esp),%ecx
-	xorl	%ebx,%esi
-.byte	102,15,56,0,214
-	movl	%edx,%ebp
-	roll	$5,%edx
-	paddd	%xmm7,%xmm1
-	xorl	%eax,%esi
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	movdqa	%xmm1,16(%esp)
-	addl	36(%esp),%ebx
-	xorl	%eax,%ebp
-	psubd	%xmm7,%xmm1
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	xorl	%edi,%ebp
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	addl	40(%esp),%eax
-	xorl	%edi,%esi
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	xorl	%edx,%esi
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	addl	44(%esp),%edi
-	xorl	%edx,%ebp
-	movl	%eax,%esi
-	roll	$5,%eax
-	xorl	%ecx,%ebp
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	addl	48(%esp),%edx
-	xorl	%ecx,%esi
-.byte	102,15,56,0,222
-	movl	%edi,%ebp
-	roll	$5,%edi
-	paddd	%xmm7,%xmm2
-	xorl	%ebx,%esi
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%esi,%edx
-	movdqa	%xmm2,32(%esp)
-	addl	52(%esp),%ecx
-	xorl	%ebx,%ebp
-	psubd	%xmm7,%xmm2
-	movl	%edx,%esi
-	roll	$5,%edx
-	xorl	%eax,%ebp
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	addl	56(%esp),%ebx
-	xorl	%eax,%esi
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	addl	60(%esp),%eax
-	xorl	%edi,%ebp
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	movl	192(%esp),%ebp
-	addl	(%ebp),%eax
-	addl	4(%ebp),%esi
-	addl	8(%ebp),%ecx
-	movl	%eax,(%ebp)
-	addl	12(%ebp),%edx
-	movl	%esi,4(%ebp)
-	addl	16(%ebp),%edi
-	movl	%ecx,8(%ebp)
-	movl	%esi,%ebx
-	movl	%edx,12(%ebp)
-	movl	%edi,16(%ebp)
-	movdqa	%xmm1,%xmm4
-	jmp	.L004loop
-.align	16
-.L005done:
-	addl	16(%esp),%ebx
-	xorl	%eax,%esi
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	addl	20(%esp),%eax
-	xorl	%edi,%ebp
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	addl	24(%esp),%edi
-	xorl	%edx,%esi
-	movl	%eax,%ebp
-	roll	$5,%eax
-	xorl	%ecx,%esi
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%esi,%edi
-	addl	28(%esp),%edx
-	xorl	%ecx,%ebp
-	movl	%edi,%esi
-	roll	$5,%edi
-	xorl	%ebx,%ebp
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%ebp,%edx
-	addl	32(%esp),%ecx
-	xorl	%ebx,%esi
-	movl	%edx,%ebp
-	roll	$5,%edx
-	xorl	%eax,%esi
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%esi,%ecx
-	addl	36(%esp),%ebx
-	xorl	%eax,%ebp
-	movl	%ecx,%esi
-	roll	$5,%ecx
-	xorl	%edi,%ebp
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%ebp,%ebx
-	addl	40(%esp),%eax
-	xorl	%edi,%esi
-	movl	%ebx,%ebp
-	roll	$5,%ebx
-	xorl	%edx,%esi
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%esi,%eax
-	addl	44(%esp),%edi
-	xorl	%edx,%ebp
-	movl	%eax,%esi
-	roll	$5,%eax
-	xorl	%ecx,%ebp
-	addl	%eax,%edi
-	rorl	$7,%ebx
-	addl	%ebp,%edi
-	addl	48(%esp),%edx
-	xorl	%ecx,%esi
-	movl	%edi,%ebp
-	roll	$5,%edi
-	xorl	%ebx,%esi
-	addl	%edi,%edx
-	rorl	$7,%eax
-	addl	%esi,%edx
-	addl	52(%esp),%ecx
-	xorl	%ebx,%ebp
-	movl	%edx,%esi
-	roll	$5,%edx
-	xorl	%eax,%ebp
-	addl	%edx,%ecx
-	rorl	$7,%edi
-	addl	%ebp,%ecx
-	addl	56(%esp),%ebx
-	xorl	%eax,%esi
-	movl	%ecx,%ebp
-	roll	$5,%ecx
-	xorl	%edi,%esi
-	addl	%ecx,%ebx
-	rorl	$7,%edx
-	addl	%esi,%ebx
-	addl	60(%esp),%eax
-	xorl	%edi,%ebp
-	movl	%ebx,%esi
-	roll	$5,%ebx
-	xorl	%edx,%ebp
-	addl	%ebx,%eax
-	rorl	$7,%ecx
-	addl	%ebp,%eax
-	movl	192(%esp),%ebp
-	addl	(%ebp),%eax
-	movl	204(%esp),%esp
-	addl	4(%ebp),%esi
-	addl	8(%ebp),%ecx
-	movl	%eax,(%ebp)
-	addl	12(%ebp),%edx
-	movl	%esi,4(%ebp)
-	addl	16(%ebp),%edi
-	movl	%ecx,8(%ebp)
-	movl	%edx,12(%ebp)
-	movl	%edi,16(%ebp)
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	_sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3
-.align	64
-.LK_XX_XX:
-.long	1518500249,1518500249,1518500249,1518500249
-.long	1859775393,1859775393,1859775393,1859775393
-.long	2400959708,2400959708,2400959708,2400959708
-.long	3395469782,3395469782,3395469782,3395469782
-.long	66051,67438087,134810123,202182159
-.byte	83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
-.byte	102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
-.byte	89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
-.byte	114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/sha256-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha256-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/sha256-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,522 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha256-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from sha256-586.pl.
+#ifdef PIC
+.file	"sha256-586.S"
+.text
+.globl	sha256_block_data_order
+.type	sha256_block_data_order, at function
+.align	16
+sha256_block_data_order:
+.L_sha256_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	%esp,%ebx
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001K256-.L000pic_point(%ebp),%ebp
+	subl	$16,%esp
+	andl	$-64,%esp
+	shll	$6,%eax
+	addl	%edi,%eax
+	movl	%esi,(%esp)
+	movl	%edi,4(%esp)
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+.align	16
+.L002loop:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	16(%edi),%eax
+	movl	20(%edi),%ebx
+	movl	24(%edi),%ecx
+	movl	28(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	32(%edi),%eax
+	movl	36(%edi),%ebx
+	movl	40(%edi),%ecx
+	movl	44(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	48(%edi),%eax
+	movl	52(%edi),%ebx
+	movl	56(%edi),%ecx
+	movl	60(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	addl	$64,%edi
+	subl	$32,%esp
+	movl	%edi,100(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edi
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edi,12(%esp)
+	movl	16(%esi),%edx
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edi
+	movl	%ebx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%edi,28(%esp)
+.align	16
+.L00300_15:
+	movl	92(%esp),%ebx
+	movl	%edx,%ecx
+	rorl	$14,%ecx
+	movl	20(%esp),%esi
+	xorl	%edx,%ecx
+	rorl	$5,%ecx
+	xorl	%edx,%ecx
+	rorl	$6,%ecx
+	movl	24(%esp),%edi
+	addl	%ecx,%ebx
+	xorl	%edi,%esi
+	movl	%edx,16(%esp)
+	movl	%eax,%ecx
+	andl	%edx,%esi
+	movl	12(%esp),%edx
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	addl	%esi,%ebx
+	rorl	$9,%ecx
+	addl	28(%esp),%ebx
+	xorl	%eax,%ecx
+	rorl	$11,%ecx
+	movl	4(%esp),%esi
+	xorl	%eax,%ecx
+	rorl	$2,%ecx
+	addl	%ebx,%edx
+	movl	8(%esp),%edi
+	addl	%ecx,%ebx
+	movl	%eax,(%esp)
+	movl	%eax,%ecx
+	subl	$4,%esp
+	orl	%esi,%eax
+	andl	%esi,%ecx
+	andl	%edi,%eax
+	movl	(%ebp),%esi
+	orl	%ecx,%eax
+	addl	$4,%ebp
+	addl	%ebx,%eax
+	addl	%esi,%edx
+	addl	%esi,%eax
+	cmpl	$3248222580,%esi
+	jne	.L00300_15
+	movl	152(%esp),%ebx
+.align	16
+.L00416_63:
+	movl	%ebx,%esi
+	movl	100(%esp),%ecx
+	rorl	$11,%esi
+	movl	%ecx,%edi
+	xorl	%ebx,%esi
+	rorl	$7,%esi
+	shrl	$3,%ebx
+	rorl	$2,%edi
+	xorl	%esi,%ebx
+	xorl	%ecx,%edi
+	rorl	$17,%edi
+	shrl	$10,%ecx
+	addl	156(%esp),%ebx
+	xorl	%ecx,%edi
+	addl	120(%esp),%ebx
+	movl	%edx,%ecx
+	addl	%edi,%ebx
+	rorl	$14,%ecx
+	movl	20(%esp),%esi
+	xorl	%edx,%ecx
+	rorl	$5,%ecx
+	movl	%ebx,92(%esp)
+	xorl	%edx,%ecx
+	rorl	$6,%ecx
+	movl	24(%esp),%edi
+	addl	%ecx,%ebx
+	xorl	%edi,%esi
+	movl	%edx,16(%esp)
+	movl	%eax,%ecx
+	andl	%edx,%esi
+	movl	12(%esp),%edx
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	addl	%esi,%ebx
+	rorl	$9,%ecx
+	addl	28(%esp),%ebx
+	xorl	%eax,%ecx
+	rorl	$11,%ecx
+	movl	4(%esp),%esi
+	xorl	%eax,%ecx
+	rorl	$2,%ecx
+	addl	%ebx,%edx
+	movl	8(%esp),%edi
+	addl	%ecx,%ebx
+	movl	%eax,(%esp)
+	movl	%eax,%ecx
+	subl	$4,%esp
+	orl	%esi,%eax
+	andl	%esi,%ecx
+	andl	%edi,%eax
+	movl	(%ebp),%esi
+	orl	%ecx,%eax
+	addl	$4,%ebp
+	addl	%ebx,%eax
+	movl	152(%esp),%ebx
+	addl	%esi,%edx
+	addl	%esi,%eax
+	cmpl	$3329325298,%esi
+	jne	.L00416_63
+	movl	352(%esp),%esi
+	movl	4(%esp),%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edi
+	addl	(%esi),%eax
+	addl	4(%esi),%ebx
+	addl	8(%esi),%ecx
+	addl	12(%esi),%edi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edi,12(%esi)
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ecx
+	movl	356(%esp),%edi
+	addl	16(%esi),%edx
+	addl	20(%esi),%eax
+	addl	24(%esi),%ebx
+	addl	28(%esi),%ecx
+	movl	%edx,16(%esi)
+	movl	%eax,20(%esi)
+	movl	%ebx,24(%esi)
+	movl	%ecx,28(%esi)
+	addl	$352,%esp
+	subl	$256,%ebp
+	cmpl	8(%esp),%edi
+	jb	.L002loop
+	movl	12(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001K256:
+.long	1116352408,1899447441,3049323471,3921009573
+.long	961987163,1508970993,2453635748,2870763221
+.long	3624381080,310598401,607225278,1426881987
+.long	1925078388,2162078206,2614888103,3248222580
+.long	3835390401,4022224774,264347078,604807628
+.long	770255983,1249150122,1555081692,1996064986
+.long	2554220882,2821834349,2952996808,3210313671
+.long	3336571891,3584528711,113926993,338241895
+.long	666307205,773529912,1294757372,1396182291
+.long	1695183700,1986661051,2177026350,2456956037
+.long	2730485921,2820302411,3259730800,3345764771
+.long	3516065817,3600352804,4094571909,275423344
+.long	430227734,506948616,659060556,883997877
+.long	958139571,1322822218,1537002063,1747873779
+.long	1955562222,2024104815,2227730452,2361852424
+.long	2428436474,2756734187,3204031479,3329325298
+.size	sha256_block_data_order,.-.L_sha256_block_data_order_begin
+.byte	83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+#else
+.file	"sha256-586.S"
+.text
+.globl	sha256_block_data_order
+.type	sha256_block_data_order, at function
+.align	16
+sha256_block_data_order:
+.L_sha256_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	%esp,%ebx
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001K256-.L000pic_point(%ebp),%ebp
+	subl	$16,%esp
+	andl	$-64,%esp
+	shll	$6,%eax
+	addl	%edi,%eax
+	movl	%esi,(%esp)
+	movl	%edi,4(%esp)
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+.align	16
+.L002loop:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	16(%edi),%eax
+	movl	20(%edi),%ebx
+	movl	24(%edi),%ecx
+	movl	28(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	32(%edi),%eax
+	movl	36(%edi),%ebx
+	movl	40(%edi),%ecx
+	movl	44(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	48(%edi),%eax
+	movl	52(%edi),%ebx
+	movl	56(%edi),%ecx
+	movl	60(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	addl	$64,%edi
+	subl	$32,%esp
+	movl	%edi,100(%esp)
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edi
+	movl	%ebx,4(%esp)
+	movl	%ecx,8(%esp)
+	movl	%edi,12(%esp)
+	movl	16(%esi),%edx
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edi
+	movl	%ebx,20(%esp)
+	movl	%ecx,24(%esp)
+	movl	%edi,28(%esp)
+.align	16
+.L00300_15:
+	movl	92(%esp),%ebx
+	movl	%edx,%ecx
+	rorl	$14,%ecx
+	movl	20(%esp),%esi
+	xorl	%edx,%ecx
+	rorl	$5,%ecx
+	xorl	%edx,%ecx
+	rorl	$6,%ecx
+	movl	24(%esp),%edi
+	addl	%ecx,%ebx
+	xorl	%edi,%esi
+	movl	%edx,16(%esp)
+	movl	%eax,%ecx
+	andl	%edx,%esi
+	movl	12(%esp),%edx
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	addl	%esi,%ebx
+	rorl	$9,%ecx
+	addl	28(%esp),%ebx
+	xorl	%eax,%ecx
+	rorl	$11,%ecx
+	movl	4(%esp),%esi
+	xorl	%eax,%ecx
+	rorl	$2,%ecx
+	addl	%ebx,%edx
+	movl	8(%esp),%edi
+	addl	%ecx,%ebx
+	movl	%eax,(%esp)
+	movl	%eax,%ecx
+	subl	$4,%esp
+	orl	%esi,%eax
+	andl	%esi,%ecx
+	andl	%edi,%eax
+	movl	(%ebp),%esi
+	orl	%ecx,%eax
+	addl	$4,%ebp
+	addl	%ebx,%eax
+	addl	%esi,%edx
+	addl	%esi,%eax
+	cmpl	$3248222580,%esi
+	jne	.L00300_15
+	movl	152(%esp),%ebx
+.align	16
+.L00416_63:
+	movl	%ebx,%esi
+	movl	100(%esp),%ecx
+	rorl	$11,%esi
+	movl	%ecx,%edi
+	xorl	%ebx,%esi
+	rorl	$7,%esi
+	shrl	$3,%ebx
+	rorl	$2,%edi
+	xorl	%esi,%ebx
+	xorl	%ecx,%edi
+	rorl	$17,%edi
+	shrl	$10,%ecx
+	addl	156(%esp),%ebx
+	xorl	%ecx,%edi
+	addl	120(%esp),%ebx
+	movl	%edx,%ecx
+	addl	%edi,%ebx
+	rorl	$14,%ecx
+	movl	20(%esp),%esi
+	xorl	%edx,%ecx
+	rorl	$5,%ecx
+	movl	%ebx,92(%esp)
+	xorl	%edx,%ecx
+	rorl	$6,%ecx
+	movl	24(%esp),%edi
+	addl	%ecx,%ebx
+	xorl	%edi,%esi
+	movl	%edx,16(%esp)
+	movl	%eax,%ecx
+	andl	%edx,%esi
+	movl	12(%esp),%edx
+	xorl	%edi,%esi
+	movl	%eax,%edi
+	addl	%esi,%ebx
+	rorl	$9,%ecx
+	addl	28(%esp),%ebx
+	xorl	%eax,%ecx
+	rorl	$11,%ecx
+	movl	4(%esp),%esi
+	xorl	%eax,%ecx
+	rorl	$2,%ecx
+	addl	%ebx,%edx
+	movl	8(%esp),%edi
+	addl	%ecx,%ebx
+	movl	%eax,(%esp)
+	movl	%eax,%ecx
+	subl	$4,%esp
+	orl	%esi,%eax
+	andl	%esi,%ecx
+	andl	%edi,%eax
+	movl	(%ebp),%esi
+	orl	%ecx,%eax
+	addl	$4,%ebp
+	addl	%ebx,%eax
+	movl	152(%esp),%ebx
+	addl	%esi,%edx
+	addl	%esi,%eax
+	cmpl	$3329325298,%esi
+	jne	.L00416_63
+	movl	352(%esp),%esi
+	movl	4(%esp),%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edi
+	addl	(%esi),%eax
+	addl	4(%esi),%ebx
+	addl	8(%esi),%ecx
+	addl	12(%esi),%edi
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	movl	%ecx,8(%esi)
+	movl	%edi,12(%esi)
+	movl	20(%esp),%eax
+	movl	24(%esp),%ebx
+	movl	28(%esp),%ecx
+	movl	356(%esp),%edi
+	addl	16(%esi),%edx
+	addl	20(%esi),%eax
+	addl	24(%esi),%ebx
+	addl	28(%esi),%ecx
+	movl	%edx,16(%esi)
+	movl	%eax,20(%esi)
+	movl	%ebx,24(%esi)
+	movl	%ecx,28(%esi)
+	addl	$352,%esp
+	subl	$256,%ebp
+	cmpl	8(%esp),%edi
+	jb	.L002loop
+	movl	12(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001K256:
+.long	1116352408,1899447441,3049323471,3921009573
+.long	961987163,1508970993,2453635748,2870763221
+.long	3624381080,310598401,607225278,1426881987
+.long	1925078388,2162078206,2614888103,3248222580
+.long	3835390401,4022224774,264347078,604807628
+.long	770255983,1249150122,1555081692,1996064986
+.long	2554220882,2821834349,2952996808,3210313671
+.long	3336571891,3584528711,113926993,338241895
+.long	666307205,773529912,1294757372,1396182291
+.long	1695183700,1986661051,2177026350,2456956037
+.long	2730485921,2820302411,3259730800,3345764771
+.long	3516065817,3600352804,4094571909,275423344
+.long	430227734,506948616,659060556,883997877
+.long	958139571,1322822218,1537002063,1747873779
+.long	1955562222,2024104815,2227730452,2361852424
+.long	2428436474,2756734187,3204031479,3329325298
+.size	sha256_block_data_order,.-.L_sha256_block_data_order_begin
+.byte	83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/sha256-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/sha256-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha256-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/sha256-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,259 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha256-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"sha512-586.s"
-.text
-.globl	sha256_block_data_order
-.type	sha256_block_data_order, at function
-.align	16
-sha256_block_data_order:
-.L_sha256_block_data_order_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	%esp,%ebx
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	.L001K256-.L000pic_point(%ebp),%ebp
-	subl	$16,%esp
-	andl	$-64,%esp
-	shll	$6,%eax
-	addl	%edi,%eax
-	movl	%esi,(%esp)
-	movl	%edi,4(%esp)
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-.align	16
-.L002loop:
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	16(%edi),%eax
-	movl	20(%edi),%ebx
-	movl	24(%edi),%ecx
-	movl	28(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	32(%edi),%eax
-	movl	36(%edi),%ebx
-	movl	40(%edi),%ecx
-	movl	44(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	48(%edi),%eax
-	movl	52(%edi),%ebx
-	movl	56(%edi),%ecx
-	movl	60(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	addl	$64,%edi
-	subl	$32,%esp
-	movl	%edi,100(%esp)
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edi
-	movl	%ebx,4(%esp)
-	movl	%ecx,8(%esp)
-	movl	%edi,12(%esp)
-	movl	16(%esi),%edx
-	movl	20(%esi),%ebx
-	movl	24(%esi),%ecx
-	movl	28(%esi),%edi
-	movl	%ebx,20(%esp)
-	movl	%ecx,24(%esp)
-	movl	%edi,28(%esp)
-.align	16
-.L00300_15:
-	movl	92(%esp),%ebx
-	movl	%edx,%ecx
-	rorl	$14,%ecx
-	movl	20(%esp),%esi
-	xorl	%edx,%ecx
-	rorl	$5,%ecx
-	xorl	%edx,%ecx
-	rorl	$6,%ecx
-	movl	24(%esp),%edi
-	addl	%ecx,%ebx
-	xorl	%edi,%esi
-	movl	%edx,16(%esp)
-	movl	%eax,%ecx
-	andl	%edx,%esi
-	movl	12(%esp),%edx
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	addl	%esi,%ebx
-	rorl	$9,%ecx
-	addl	28(%esp),%ebx
-	xorl	%eax,%ecx
-	rorl	$11,%ecx
-	movl	4(%esp),%esi
-	xorl	%eax,%ecx
-	rorl	$2,%ecx
-	addl	%ebx,%edx
-	movl	8(%esp),%edi
-	addl	%ecx,%ebx
-	movl	%eax,(%esp)
-	movl	%eax,%ecx
-	subl	$4,%esp
-	orl	%esi,%eax
-	andl	%esi,%ecx
-	andl	%edi,%eax
-	movl	(%ebp),%esi
-	orl	%ecx,%eax
-	addl	$4,%ebp
-	addl	%ebx,%eax
-	addl	%esi,%edx
-	addl	%esi,%eax
-	cmpl	$3248222580,%esi
-	jne	.L00300_15
-	movl	152(%esp),%ebx
-.align	16
-.L00416_63:
-	movl	%ebx,%esi
-	movl	100(%esp),%ecx
-	rorl	$11,%esi
-	movl	%ecx,%edi
-	xorl	%ebx,%esi
-	rorl	$7,%esi
-	shrl	$3,%ebx
-	rorl	$2,%edi
-	xorl	%esi,%ebx
-	xorl	%ecx,%edi
-	rorl	$17,%edi
-	shrl	$10,%ecx
-	addl	156(%esp),%ebx
-	xorl	%ecx,%edi
-	addl	120(%esp),%ebx
-	movl	%edx,%ecx
-	addl	%edi,%ebx
-	rorl	$14,%ecx
-	movl	20(%esp),%esi
-	xorl	%edx,%ecx
-	rorl	$5,%ecx
-	movl	%ebx,92(%esp)
-	xorl	%edx,%ecx
-	rorl	$6,%ecx
-	movl	24(%esp),%edi
-	addl	%ecx,%ebx
-	xorl	%edi,%esi
-	movl	%edx,16(%esp)
-	movl	%eax,%ecx
-	andl	%edx,%esi
-	movl	12(%esp),%edx
-	xorl	%edi,%esi
-	movl	%eax,%edi
-	addl	%esi,%ebx
-	rorl	$9,%ecx
-	addl	28(%esp),%ebx
-	xorl	%eax,%ecx
-	rorl	$11,%ecx
-	movl	4(%esp),%esi
-	xorl	%eax,%ecx
-	rorl	$2,%ecx
-	addl	%ebx,%edx
-	movl	8(%esp),%edi
-	addl	%ecx,%ebx
-	movl	%eax,(%esp)
-	movl	%eax,%ecx
-	subl	$4,%esp
-	orl	%esi,%eax
-	andl	%esi,%ecx
-	andl	%edi,%eax
-	movl	(%ebp),%esi
-	orl	%ecx,%eax
-	addl	$4,%ebp
-	addl	%ebx,%eax
-	movl	152(%esp),%ebx
-	addl	%esi,%edx
-	addl	%esi,%eax
-	cmpl	$3329325298,%esi
-	jne	.L00416_63
-	movl	352(%esp),%esi
-	movl	4(%esp),%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edi
-	addl	(%esi),%eax
-	addl	4(%esi),%ebx
-	addl	8(%esi),%ecx
-	addl	12(%esi),%edi
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	movl	%ecx,8(%esi)
-	movl	%edi,12(%esi)
-	movl	20(%esp),%eax
-	movl	24(%esp),%ebx
-	movl	28(%esp),%ecx
-	movl	356(%esp),%edi
-	addl	16(%esi),%edx
-	addl	20(%esi),%eax
-	addl	24(%esi),%ebx
-	addl	28(%esi),%ecx
-	movl	%edx,16(%esi)
-	movl	%eax,20(%esi)
-	movl	%ebx,24(%esi)
-	movl	%ecx,28(%esi)
-	addl	$352,%esp
-	subl	$256,%ebp
-	cmpl	8(%esp),%edi
-	jb	.L002loop
-	movl	12(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L001K256:
-.long	1116352408,1899447441,3049323471,3921009573
-.long	961987163,1508970993,2453635748,2870763221
-.long	3624381080,310598401,607225278,1426881987
-.long	1925078388,2162078206,2614888103,3248222580
-.long	3835390401,4022224774,264347078,604807628
-.long	770255983,1249150122,1555081692,1996064986
-.long	2554220882,2821834349,2952996808,3210313671
-.long	3336571891,3584528711,113926993,338241895
-.long	666307205,773529912,1294757372,1396182291
-.long	1695183700,1986661051,2177026350,2456956037
-.long	2730485921,2820302411,3259730800,3345764771
-.long	3516065817,3600352804,4094571909,275423344
-.long	430227734,506948616,659060556,883997877
-.long	958139571,1322822218,1537002063,1747873779
-.long	1955562222,2024104815,2227730452,2361852424
-.long	2428436474,2756734187,3204031479,3329325298
-.size	sha256_block_data_order,.-.L_sha256_block_data_order_begin
-.byte	83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
-.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
-.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
-.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
-.byte	62,0

Added: trunk/secure/lib/libcrypto/i386/sha512-586.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha512-586.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/sha512-586.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1677 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha512-586.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from sha512-586.pl.
+#ifdef PIC
+.file	"sha512-586.S"
+.text
+.globl	sha512_block_data_order
+.type	sha512_block_data_order, at function
+.align	16
+sha512_block_data_order:
+.L_sha512_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	%esp,%ebx
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001K512-.L000pic_point(%ebp),%ebp
+	subl	$16,%esp
+	andl	$-64,%esp
+	shll	$7,%eax
+	addl	%edi,%eax
+	movl	%esi,(%esp)
+	movl	%edi,4(%esp)
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L001K512](%ebp),%edx
+	movl	OPENSSL_ia32cap_P at GOT(%edx),%edx
+	btl	$26,(%edx)
+	jnc	.L002loop_x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm1
+	movq	16(%esi),%mm2
+	movq	24(%esi),%mm3
+	movq	32(%esi),%mm4
+	movq	40(%esi),%mm5
+	movq	48(%esi),%mm6
+	movq	56(%esi),%mm7
+	subl	$80,%esp
+.align	16
+.L003loop_sse2:
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	addl	$8,%edi
+	bswap	%ecx
+	bswap	%edx
+	movl	%ecx,76(%esp)
+	movl	%edx,72(%esp)
+.align	16
+.L00400_14_sse2:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	addl	$8,%edi
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,68(%esp)
+	movl	%ebx,64(%esp)
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+	cmpb	$53,%dl
+	jne	.L00400_14_sse2
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	movq	88(%esp),%mm6
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	movq	192(%esp),%mm2
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+.align	16
+.L00516_79_sse2:
+	movq	%mm2,%mm1
+	psrlq	$1,%mm2
+	movq	%mm6,%mm7
+	psrlq	$6,%mm6
+	movq	%mm2,%mm3
+	psrlq	$6,%mm2
+	movq	%mm6,%mm5
+	psrlq	$13,%mm6
+	pxor	%mm2,%mm3
+	psrlq	$1,%mm2
+	pxor	%mm6,%mm5
+	psrlq	$42,%mm6
+	pxor	%mm2,%mm3
+	movq	200(%esp),%mm2
+	psllq	$56,%mm1
+	pxor	%mm6,%mm5
+	psllq	$3,%mm7
+	pxor	%mm1,%mm3
+	paddq	128(%esp),%mm2
+	psllq	$7,%mm1
+	pxor	%mm7,%mm5
+	psllq	$42,%mm7
+	pxor	%mm1,%mm3
+	pxor	%mm7,%mm5
+	paddq	%mm5,%mm3
+	paddq	%mm2,%mm3
+	movq	%mm3,72(%esp)
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	movq	88(%esp),%mm6
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	movq	192(%esp),%mm2
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+	cmpb	$23,%dl
+	jne	.L00516_79_sse2
+	movq	8(%esp),%mm1
+	movq	16(%esp),%mm2
+	movq	24(%esp),%mm3
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	paddq	(%esi),%mm0
+	paddq	8(%esi),%mm1
+	paddq	16(%esi),%mm2
+	paddq	24(%esi),%mm3
+	paddq	32(%esi),%mm4
+	paddq	40(%esi),%mm5
+	paddq	48(%esi),%mm6
+	paddq	56(%esi),%mm7
+	movq	%mm0,(%esi)
+	movq	%mm1,8(%esi)
+	movq	%mm2,16(%esi)
+	movq	%mm3,24(%esi)
+	movq	%mm4,32(%esi)
+	movq	%mm5,40(%esi)
+	movq	%mm6,48(%esi)
+	movq	%mm7,56(%esi)
+	addl	$640,%esp
+	subl	$640,%ebp
+	cmpl	88(%esp),%edi
+	jb	.L003loop_sse2
+	emms
+	movl	92(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L002loop_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	16(%edi),%eax
+	movl	20(%edi),%ebx
+	movl	24(%edi),%ecx
+	movl	28(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	32(%edi),%eax
+	movl	36(%edi),%ebx
+	movl	40(%edi),%ecx
+	movl	44(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	48(%edi),%eax
+	movl	52(%edi),%ebx
+	movl	56(%edi),%ecx
+	movl	60(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	64(%edi),%eax
+	movl	68(%edi),%ebx
+	movl	72(%edi),%ecx
+	movl	76(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	80(%edi),%eax
+	movl	84(%edi),%ebx
+	movl	88(%edi),%ecx
+	movl	92(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	96(%edi),%eax
+	movl	100(%edi),%ebx
+	movl	104(%edi),%ecx
+	movl	108(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	112(%edi),%eax
+	movl	116(%edi),%ebx
+	movl	120(%edi),%ecx
+	movl	124(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	addl	$128,%edi
+	subl	$72,%esp
+	movl	%edi,204(%esp)
+	leal	8(%esp),%edi
+	movl	$16,%ecx
+.long	2784229001
+.align	16
+.L00600_15_x86:
+	movl	40(%esp),%ecx
+	movl	44(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$9,%ecx
+	movl	%edx,%edi
+	shrl	$9,%edx
+	movl	%ecx,%ebx
+	shll	$14,%esi
+	movl	%edx,%eax
+	shll	$14,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%eax
+	shll	$4,%esi
+	xorl	%edx,%ebx
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$4,%ecx
+	xorl	%edi,%eax
+	shrl	$4,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	48(%esp),%ecx
+	movl	52(%esp),%edx
+	movl	56(%esp),%esi
+	movl	60(%esp),%edi
+	addl	64(%esp),%eax
+	adcl	68(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	andl	40(%esp),%ecx
+	andl	44(%esp),%edx
+	addl	192(%esp),%eax
+	adcl	196(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	movl	(%ebp),%esi
+	movl	4(%ebp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	32(%esp),%ecx
+	movl	36(%esp),%edx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,%esi
+	shrl	$2,%ecx
+	movl	%edx,%edi
+	shrl	$2,%edx
+	movl	%ecx,%ebx
+	shll	$4,%esi
+	movl	%edx,%eax
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%ebx
+	shll	$21,%esi
+	xorl	%edx,%eax
+	shll	$21,%edi
+	xorl	%esi,%eax
+	shrl	$21,%ecx
+	xorl	%edi,%ebx
+	shrl	$21,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	16(%esp),%esi
+	movl	20(%esp),%edi
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	andl	24(%esp),%ecx
+	andl	28(%esp),%edx
+	andl	8(%esp),%esi
+	andl	12(%esp),%edi
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movb	(%ebp),%dl
+	subl	$8,%esp
+	leal	8(%ebp),%ebp
+	cmpb	$148,%dl
+	jne	.L00600_15_x86
+.align	16
+.L00716_79_x86:
+	movl	312(%esp),%ecx
+	movl	316(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$1,%ecx
+	movl	%edx,%edi
+	shrl	$1,%edx
+	movl	%ecx,%eax
+	shll	$24,%esi
+	movl	%edx,%ebx
+	shll	$24,%edi
+	xorl	%esi,%ebx
+	shrl	$6,%ecx
+	xorl	%edi,%eax
+	shrl	$6,%edx
+	xorl	%ecx,%eax
+	shll	$7,%esi
+	xorl	%edx,%ebx
+	shll	$1,%edi
+	xorl	%esi,%ebx
+	shrl	$1,%ecx
+	xorl	%edi,%eax
+	shrl	$1,%edx
+	xorl	%ecx,%eax
+	shll	$6,%edi
+	xorl	%edx,%ebx
+	xorl	%edi,%eax
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	208(%esp),%ecx
+	movl	212(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$6,%ecx
+	movl	%edx,%edi
+	shrl	$6,%edx
+	movl	%ecx,%eax
+	shll	$3,%esi
+	movl	%edx,%ebx
+	shll	$3,%edi
+	xorl	%esi,%eax
+	shrl	$13,%ecx
+	xorl	%edi,%ebx
+	shrl	$13,%edx
+	xorl	%ecx,%eax
+	shll	$10,%esi
+	xorl	%edx,%ebx
+	shll	$10,%edi
+	xorl	%esi,%ebx
+	shrl	$10,%ecx
+	xorl	%edi,%eax
+	shrl	$10,%edx
+	xorl	%ecx,%ebx
+	shll	$13,%edi
+	xorl	%edx,%eax
+	xorl	%edi,%eax
+	movl	320(%esp),%ecx
+	movl	324(%esp),%edx
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	movl	248(%esp),%esi
+	movl	252(%esp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,192(%esp)
+	movl	%ebx,196(%esp)
+	movl	40(%esp),%ecx
+	movl	44(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$9,%ecx
+	movl	%edx,%edi
+	shrl	$9,%edx
+	movl	%ecx,%ebx
+	shll	$14,%esi
+	movl	%edx,%eax
+	shll	$14,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%eax
+	shll	$4,%esi
+	xorl	%edx,%ebx
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$4,%ecx
+	xorl	%edi,%eax
+	shrl	$4,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	48(%esp),%ecx
+	movl	52(%esp),%edx
+	movl	56(%esp),%esi
+	movl	60(%esp),%edi
+	addl	64(%esp),%eax
+	adcl	68(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	andl	40(%esp),%ecx
+	andl	44(%esp),%edx
+	addl	192(%esp),%eax
+	adcl	196(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	movl	(%ebp),%esi
+	movl	4(%ebp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	32(%esp),%ecx
+	movl	36(%esp),%edx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,%esi
+	shrl	$2,%ecx
+	movl	%edx,%edi
+	shrl	$2,%edx
+	movl	%ecx,%ebx
+	shll	$4,%esi
+	movl	%edx,%eax
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%ebx
+	shll	$21,%esi
+	xorl	%edx,%eax
+	shll	$21,%edi
+	xorl	%esi,%eax
+	shrl	$21,%ecx
+	xorl	%edi,%ebx
+	shrl	$21,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	16(%esp),%esi
+	movl	20(%esp),%edi
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	andl	24(%esp),%ecx
+	andl	28(%esp),%edx
+	andl	8(%esp),%esi
+	andl	12(%esp),%edi
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movb	(%ebp),%dl
+	subl	$8,%esp
+	leal	8(%ebp),%ebp
+	cmpb	$23,%dl
+	jne	.L00716_79_x86
+	movl	840(%esp),%esi
+	movl	844(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	addl	8(%esp),%eax
+	adcl	12(%esp),%ebx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	addl	16(%esp),%ecx
+	adcl	20(%esp),%edx
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	addl	24(%esp),%eax
+	adcl	28(%esp),%ebx
+	movl	%eax,16(%esi)
+	movl	%ebx,20(%esi)
+	addl	32(%esp),%ecx
+	adcl	36(%esp),%edx
+	movl	%ecx,24(%esi)
+	movl	%edx,28(%esi)
+	movl	32(%esi),%eax
+	movl	36(%esi),%ebx
+	movl	40(%esi),%ecx
+	movl	44(%esi),%edx
+	addl	40(%esp),%eax
+	adcl	44(%esp),%ebx
+	movl	%eax,32(%esi)
+	movl	%ebx,36(%esi)
+	addl	48(%esp),%ecx
+	adcl	52(%esp),%edx
+	movl	%ecx,40(%esi)
+	movl	%edx,44(%esi)
+	movl	48(%esi),%eax
+	movl	52(%esi),%ebx
+	movl	56(%esi),%ecx
+	movl	60(%esi),%edx
+	addl	56(%esp),%eax
+	adcl	60(%esp),%ebx
+	movl	%eax,48(%esi)
+	movl	%ebx,52(%esi)
+	addl	64(%esp),%ecx
+	adcl	68(%esp),%edx
+	movl	%ecx,56(%esi)
+	movl	%edx,60(%esi)
+	addl	$840,%esp
+	subl	$640,%ebp
+	cmpl	8(%esp),%edi
+	jb	.L002loop_x86
+	movl	12(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001K512:
+.long	3609767458,1116352408
+.long	602891725,1899447441
+.long	3964484399,3049323471
+.long	2173295548,3921009573
+.long	4081628472,961987163
+.long	3053834265,1508970993
+.long	2937671579,2453635748
+.long	3664609560,2870763221
+.long	2734883394,3624381080
+.long	1164996542,310598401
+.long	1323610764,607225278
+.long	3590304994,1426881987
+.long	4068182383,1925078388
+.long	991336113,2162078206
+.long	633803317,2614888103
+.long	3479774868,3248222580
+.long	2666613458,3835390401
+.long	944711139,4022224774
+.long	2341262773,264347078
+.long	2007800933,604807628
+.long	1495990901,770255983
+.long	1856431235,1249150122
+.long	3175218132,1555081692
+.long	2198950837,1996064986
+.long	3999719339,2554220882
+.long	766784016,2821834349
+.long	2566594879,2952996808
+.long	3203337956,3210313671
+.long	1034457026,3336571891
+.long	2466948901,3584528711
+.long	3758326383,113926993
+.long	168717936,338241895
+.long	1188179964,666307205
+.long	1546045734,773529912
+.long	1522805485,1294757372
+.long	2643833823,1396182291
+.long	2343527390,1695183700
+.long	1014477480,1986661051
+.long	1206759142,2177026350
+.long	344077627,2456956037
+.long	1290863460,2730485921
+.long	3158454273,2820302411
+.long	3505952657,3259730800
+.long	106217008,3345764771
+.long	3606008344,3516065817
+.long	1432725776,3600352804
+.long	1467031594,4094571909
+.long	851169720,275423344
+.long	3100823752,430227734
+.long	1363258195,506948616
+.long	3750685593,659060556
+.long	3785050280,883997877
+.long	3318307427,958139571
+.long	3812723403,1322822218
+.long	2003034995,1537002063
+.long	3602036899,1747873779
+.long	1575990012,1955562222
+.long	1125592928,2024104815
+.long	2716904306,2227730452
+.long	442776044,2361852424
+.long	593698344,2428436474
+.long	3733110249,2756734187
+.long	2999351573,3204031479
+.long	3815920427,3329325298
+.long	3928383900,3391569614
+.long	566280711,3515267271
+.long	3454069534,3940187606
+.long	4000239992,4118630271
+.long	1914138554,116418474
+.long	2731055270,174292421
+.long	3203993006,289380356
+.long	320620315,460393269
+.long	587496836,685471733
+.long	1086792851,852142971
+.long	365543100,1017036298
+.long	2618297676,1126000580
+.long	3409855158,1288033470
+.long	4234509866,1501505948
+.long	987167468,1607167915
+.long	1246189591,1816402316
+.size	sha512_block_data_order,.-.L_sha512_block_data_order_begin
+.byte	83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"sha512-586.S"
+.text
+.globl	sha512_block_data_order
+.type	sha512_block_data_order, at function
+.align	16
+sha512_block_data_order:
+.L_sha512_block_data_order_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	%esp,%ebx
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001K512-.L000pic_point(%ebp),%ebp
+	subl	$16,%esp
+	andl	$-64,%esp
+	shll	$7,%eax
+	addl	%edi,%eax
+	movl	%esi,(%esp)
+	movl	%edi,4(%esp)
+	movl	%eax,8(%esp)
+	movl	%ebx,12(%esp)
+	leal	OPENSSL_ia32cap_P,%edx
+	btl	$26,(%edx)
+	jnc	.L002loop_x86
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm1
+	movq	16(%esi),%mm2
+	movq	24(%esi),%mm3
+	movq	32(%esi),%mm4
+	movq	40(%esi),%mm5
+	movq	48(%esi),%mm6
+	movq	56(%esi),%mm7
+	subl	$80,%esp
+.align	16
+.L003loop_sse2:
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	movl	(%edi),%ecx
+	movl	4(%edi),%edx
+	addl	$8,%edi
+	bswap	%ecx
+	bswap	%edx
+	movl	%ecx,76(%esp)
+	movl	%edx,72(%esp)
+.align	16
+.L00400_14_sse2:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	addl	$8,%edi
+	bswap	%eax
+	bswap	%ebx
+	movl	%eax,68(%esp)
+	movl	%ebx,64(%esp)
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+	cmpb	$53,%dl
+	jne	.L00400_14_sse2
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	movq	88(%esp),%mm6
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	movq	192(%esp),%mm2
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+.align	16
+.L00516_79_sse2:
+	movq	%mm2,%mm1
+	psrlq	$1,%mm2
+	movq	%mm6,%mm7
+	psrlq	$6,%mm6
+	movq	%mm2,%mm3
+	psrlq	$6,%mm2
+	movq	%mm6,%mm5
+	psrlq	$13,%mm6
+	pxor	%mm2,%mm3
+	psrlq	$1,%mm2
+	pxor	%mm6,%mm5
+	psrlq	$42,%mm6
+	pxor	%mm2,%mm3
+	movq	200(%esp),%mm2
+	psllq	$56,%mm1
+	pxor	%mm6,%mm5
+	psllq	$3,%mm7
+	pxor	%mm1,%mm3
+	paddq	128(%esp),%mm2
+	psllq	$7,%mm1
+	pxor	%mm7,%mm5
+	psllq	$42,%mm7
+	pxor	%mm1,%mm3
+	pxor	%mm7,%mm5
+	paddq	%mm5,%mm3
+	paddq	%mm2,%mm3
+	movq	%mm3,72(%esp)
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	movq	%mm4,%mm1
+	movq	%mm4,%mm2
+	psrlq	$14,%mm1
+	movq	%mm4,32(%esp)
+	psllq	$23,%mm2
+	movq	%mm1,%mm3
+	psrlq	$4,%mm1
+	pxor	%mm2,%mm3
+	psllq	$23,%mm2
+	pxor	%mm1,%mm3
+	psrlq	$23,%mm1
+	pxor	%mm2,%mm3
+	psllq	$4,%mm2
+	pxor	%mm1,%mm3
+	paddq	(%ebp),%mm7
+	pxor	%mm2,%mm3
+	pxor	%mm6,%mm5
+	movq	8(%esp),%mm1
+	pand	%mm4,%mm5
+	movq	16(%esp),%mm2
+	pxor	%mm6,%mm5
+	movq	24(%esp),%mm4
+	paddq	%mm5,%mm3
+	movq	%mm0,(%esp)
+	paddq	%mm7,%mm3
+	movq	%mm0,%mm5
+	movq	%mm0,%mm6
+	paddq	72(%esp),%mm3
+	psrlq	$28,%mm5
+	paddq	%mm3,%mm4
+	psllq	$25,%mm6
+	movq	%mm5,%mm7
+	psrlq	$6,%mm5
+	pxor	%mm6,%mm7
+	psllq	$5,%mm6
+	pxor	%mm5,%mm7
+	psrlq	$5,%mm5
+	pxor	%mm6,%mm7
+	psllq	$6,%mm6
+	pxor	%mm5,%mm7
+	subl	$8,%esp
+	pxor	%mm6,%mm7
+	movq	%mm0,%mm5
+	por	%mm2,%mm0
+	movq	88(%esp),%mm6
+	pand	%mm2,%mm5
+	pand	%mm1,%mm0
+	movq	192(%esp),%mm2
+	por	%mm0,%mm5
+	paddq	%mm5,%mm7
+	movq	%mm3,%mm0
+	movb	(%ebp),%dl
+	paddq	%mm7,%mm0
+	addl	$8,%ebp
+	cmpb	$23,%dl
+	jne	.L00516_79_sse2
+	movq	8(%esp),%mm1
+	movq	16(%esp),%mm2
+	movq	24(%esp),%mm3
+	movq	40(%esp),%mm5
+	movq	48(%esp),%mm6
+	movq	56(%esp),%mm7
+	paddq	(%esi),%mm0
+	paddq	8(%esi),%mm1
+	paddq	16(%esi),%mm2
+	paddq	24(%esi),%mm3
+	paddq	32(%esi),%mm4
+	paddq	40(%esi),%mm5
+	paddq	48(%esi),%mm6
+	paddq	56(%esi),%mm7
+	movq	%mm0,(%esi)
+	movq	%mm1,8(%esi)
+	movq	%mm2,16(%esi)
+	movq	%mm3,24(%esi)
+	movq	%mm4,32(%esi)
+	movq	%mm5,40(%esi)
+	movq	%mm6,48(%esi)
+	movq	%mm7,56(%esi)
+	addl	$640,%esp
+	subl	$640,%ebp
+	cmpl	88(%esp),%edi
+	jb	.L003loop_sse2
+	emms
+	movl	92(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	16
+.L002loop_x86:
+	movl	(%edi),%eax
+	movl	4(%edi),%ebx
+	movl	8(%edi),%ecx
+	movl	12(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	16(%edi),%eax
+	movl	20(%edi),%ebx
+	movl	24(%edi),%ecx
+	movl	28(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	32(%edi),%eax
+	movl	36(%edi),%ebx
+	movl	40(%edi),%ecx
+	movl	44(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	48(%edi),%eax
+	movl	52(%edi),%ebx
+	movl	56(%edi),%ecx
+	movl	60(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	64(%edi),%eax
+	movl	68(%edi),%ebx
+	movl	72(%edi),%ecx
+	movl	76(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	80(%edi),%eax
+	movl	84(%edi),%ebx
+	movl	88(%edi),%ecx
+	movl	92(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	96(%edi),%eax
+	movl	100(%edi),%ebx
+	movl	104(%edi),%ecx
+	movl	108(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	movl	112(%edi),%eax
+	movl	116(%edi),%ebx
+	movl	120(%edi),%ecx
+	movl	124(%edi),%edx
+	bswap	%eax
+	bswap	%ebx
+	bswap	%ecx
+	bswap	%edx
+	pushl	%eax
+	pushl	%ebx
+	pushl	%ecx
+	pushl	%edx
+	addl	$128,%edi
+	subl	$72,%esp
+	movl	%edi,204(%esp)
+	leal	8(%esp),%edi
+	movl	$16,%ecx
+.long	2784229001
+.align	16
+.L00600_15_x86:
+	movl	40(%esp),%ecx
+	movl	44(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$9,%ecx
+	movl	%edx,%edi
+	shrl	$9,%edx
+	movl	%ecx,%ebx
+	shll	$14,%esi
+	movl	%edx,%eax
+	shll	$14,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%eax
+	shll	$4,%esi
+	xorl	%edx,%ebx
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$4,%ecx
+	xorl	%edi,%eax
+	shrl	$4,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	48(%esp),%ecx
+	movl	52(%esp),%edx
+	movl	56(%esp),%esi
+	movl	60(%esp),%edi
+	addl	64(%esp),%eax
+	adcl	68(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	andl	40(%esp),%ecx
+	andl	44(%esp),%edx
+	addl	192(%esp),%eax
+	adcl	196(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	movl	(%ebp),%esi
+	movl	4(%ebp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	32(%esp),%ecx
+	movl	36(%esp),%edx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,%esi
+	shrl	$2,%ecx
+	movl	%edx,%edi
+	shrl	$2,%edx
+	movl	%ecx,%ebx
+	shll	$4,%esi
+	movl	%edx,%eax
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%ebx
+	shll	$21,%esi
+	xorl	%edx,%eax
+	shll	$21,%edi
+	xorl	%esi,%eax
+	shrl	$21,%ecx
+	xorl	%edi,%ebx
+	shrl	$21,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	16(%esp),%esi
+	movl	20(%esp),%edi
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	andl	24(%esp),%ecx
+	andl	28(%esp),%edx
+	andl	8(%esp),%esi
+	andl	12(%esp),%edi
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movb	(%ebp),%dl
+	subl	$8,%esp
+	leal	8(%ebp),%ebp
+	cmpb	$148,%dl
+	jne	.L00600_15_x86
+.align	16
+.L00716_79_x86:
+	movl	312(%esp),%ecx
+	movl	316(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$1,%ecx
+	movl	%edx,%edi
+	shrl	$1,%edx
+	movl	%ecx,%eax
+	shll	$24,%esi
+	movl	%edx,%ebx
+	shll	$24,%edi
+	xorl	%esi,%ebx
+	shrl	$6,%ecx
+	xorl	%edi,%eax
+	shrl	$6,%edx
+	xorl	%ecx,%eax
+	shll	$7,%esi
+	xorl	%edx,%ebx
+	shll	$1,%edi
+	xorl	%esi,%ebx
+	shrl	$1,%ecx
+	xorl	%edi,%eax
+	shrl	$1,%edx
+	xorl	%ecx,%eax
+	shll	$6,%edi
+	xorl	%edx,%ebx
+	xorl	%edi,%eax
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movl	208(%esp),%ecx
+	movl	212(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$6,%ecx
+	movl	%edx,%edi
+	shrl	$6,%edx
+	movl	%ecx,%eax
+	shll	$3,%esi
+	movl	%edx,%ebx
+	shll	$3,%edi
+	xorl	%esi,%eax
+	shrl	$13,%ecx
+	xorl	%edi,%ebx
+	shrl	$13,%edx
+	xorl	%ecx,%eax
+	shll	$10,%esi
+	xorl	%edx,%ebx
+	shll	$10,%edi
+	xorl	%esi,%ebx
+	shrl	$10,%ecx
+	xorl	%edi,%eax
+	shrl	$10,%edx
+	xorl	%ecx,%ebx
+	shll	$13,%edi
+	xorl	%edx,%eax
+	xorl	%edi,%eax
+	movl	320(%esp),%ecx
+	movl	324(%esp),%edx
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	movl	248(%esp),%esi
+	movl	252(%esp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,192(%esp)
+	movl	%ebx,196(%esp)
+	movl	40(%esp),%ecx
+	movl	44(%esp),%edx
+	movl	%ecx,%esi
+	shrl	$9,%ecx
+	movl	%edx,%edi
+	shrl	$9,%edx
+	movl	%ecx,%ebx
+	shll	$14,%esi
+	movl	%edx,%eax
+	shll	$14,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%eax
+	shll	$4,%esi
+	xorl	%edx,%ebx
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$4,%ecx
+	xorl	%edi,%eax
+	shrl	$4,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	48(%esp),%ecx
+	movl	52(%esp),%edx
+	movl	56(%esp),%esi
+	movl	60(%esp),%edi
+	addl	64(%esp),%eax
+	adcl	68(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	andl	40(%esp),%ecx
+	andl	44(%esp),%edx
+	addl	192(%esp),%eax
+	adcl	196(%esp),%ebx
+	xorl	%esi,%ecx
+	xorl	%edi,%edx
+	movl	(%ebp),%esi
+	movl	4(%ebp),%edi
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	32(%esp),%ecx
+	movl	36(%esp),%edx
+	addl	%esi,%eax
+	adcl	%edi,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	%eax,32(%esp)
+	movl	%ebx,36(%esp)
+	movl	%ecx,%esi
+	shrl	$2,%ecx
+	movl	%edx,%edi
+	shrl	$2,%edx
+	movl	%ecx,%ebx
+	shll	$4,%esi
+	movl	%edx,%eax
+	shll	$4,%edi
+	xorl	%esi,%ebx
+	shrl	$5,%ecx
+	xorl	%edi,%eax
+	shrl	$5,%edx
+	xorl	%ecx,%ebx
+	shll	$21,%esi
+	xorl	%edx,%eax
+	shll	$21,%edi
+	xorl	%esi,%eax
+	shrl	$21,%ecx
+	xorl	%edi,%ebx
+	shrl	$21,%edx
+	xorl	%ecx,%eax
+	shll	$5,%esi
+	xorl	%edx,%ebx
+	shll	$5,%edi
+	xorl	%esi,%eax
+	xorl	%edi,%ebx
+	movl	8(%esp),%ecx
+	movl	12(%esp),%edx
+	movl	16(%esp),%esi
+	movl	20(%esp),%edi
+	addl	(%esp),%eax
+	adcl	4(%esp),%ebx
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	andl	24(%esp),%ecx
+	andl	28(%esp),%edx
+	andl	8(%esp),%esi
+	andl	12(%esp),%edi
+	orl	%esi,%ecx
+	orl	%edi,%edx
+	addl	%ecx,%eax
+	adcl	%edx,%ebx
+	movl	%eax,(%esp)
+	movl	%ebx,4(%esp)
+	movb	(%ebp),%dl
+	subl	$8,%esp
+	leal	8(%ebp),%ebp
+	cmpb	$23,%dl
+	jne	.L00716_79_x86
+	movl	840(%esp),%esi
+	movl	844(%esp),%edi
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	addl	8(%esp),%eax
+	adcl	12(%esp),%ebx
+	movl	%eax,(%esi)
+	movl	%ebx,4(%esi)
+	addl	16(%esp),%ecx
+	adcl	20(%esp),%edx
+	movl	%ecx,8(%esi)
+	movl	%edx,12(%esi)
+	movl	16(%esi),%eax
+	movl	20(%esi),%ebx
+	movl	24(%esi),%ecx
+	movl	28(%esi),%edx
+	addl	24(%esp),%eax
+	adcl	28(%esp),%ebx
+	movl	%eax,16(%esi)
+	movl	%ebx,20(%esi)
+	addl	32(%esp),%ecx
+	adcl	36(%esp),%edx
+	movl	%ecx,24(%esi)
+	movl	%edx,28(%esi)
+	movl	32(%esi),%eax
+	movl	36(%esi),%ebx
+	movl	40(%esi),%ecx
+	movl	44(%esi),%edx
+	addl	40(%esp),%eax
+	adcl	44(%esp),%ebx
+	movl	%eax,32(%esi)
+	movl	%ebx,36(%esi)
+	addl	48(%esp),%ecx
+	adcl	52(%esp),%edx
+	movl	%ecx,40(%esi)
+	movl	%edx,44(%esi)
+	movl	48(%esi),%eax
+	movl	52(%esi),%ebx
+	movl	56(%esi),%ecx
+	movl	60(%esi),%edx
+	addl	56(%esp),%eax
+	adcl	60(%esp),%ebx
+	movl	%eax,48(%esi)
+	movl	%ebx,52(%esi)
+	addl	64(%esp),%ecx
+	adcl	68(%esp),%edx
+	movl	%ecx,56(%esi)
+	movl	%edx,60(%esi)
+	addl	$840,%esp
+	subl	$640,%ebp
+	cmpl	8(%esp),%edi
+	jb	.L002loop_x86
+	movl	12(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001K512:
+.long	3609767458,1116352408
+.long	602891725,1899447441
+.long	3964484399,3049323471
+.long	2173295548,3921009573
+.long	4081628472,961987163
+.long	3053834265,1508970993
+.long	2937671579,2453635748
+.long	3664609560,2870763221
+.long	2734883394,3624381080
+.long	1164996542,310598401
+.long	1323610764,607225278
+.long	3590304994,1426881987
+.long	4068182383,1925078388
+.long	991336113,2162078206
+.long	633803317,2614888103
+.long	3479774868,3248222580
+.long	2666613458,3835390401
+.long	944711139,4022224774
+.long	2341262773,264347078
+.long	2007800933,604807628
+.long	1495990901,770255983
+.long	1856431235,1249150122
+.long	3175218132,1555081692
+.long	2198950837,1996064986
+.long	3999719339,2554220882
+.long	766784016,2821834349
+.long	2566594879,2952996808
+.long	3203337956,3210313671
+.long	1034457026,3336571891
+.long	2466948901,3584528711
+.long	3758326383,113926993
+.long	168717936,338241895
+.long	1188179964,666307205
+.long	1546045734,773529912
+.long	1522805485,1294757372
+.long	2643833823,1396182291
+.long	2343527390,1695183700
+.long	1014477480,1986661051
+.long	1206759142,2177026350
+.long	344077627,2456956037
+.long	1290863460,2730485921
+.long	3158454273,2820302411
+.long	3505952657,3259730800
+.long	106217008,3345764771
+.long	3606008344,3516065817
+.long	1432725776,3600352804
+.long	1467031594,4094571909
+.long	851169720,275423344
+.long	3100823752,430227734
+.long	1363258195,506948616
+.long	3750685593,659060556
+.long	3785050280,883997877
+.long	3318307427,958139571
+.long	3812723403,1322822218
+.long	2003034995,1537002063
+.long	3602036899,1747873779
+.long	1575990012,1955562222
+.long	1125592928,2024104815
+.long	2716904306,2227730452
+.long	442776044,2361852424
+.long	593698344,2428436474
+.long	3733110249,2756734187
+.long	2999351573,3204031479
+.long	3815920427,3329325298
+.long	3928383900,3391569614
+.long	566280711,3515267271
+.long	3454069534,3940187606
+.long	4000239992,4118630271
+.long	1914138554,116418474
+.long	2731055270,174292421
+.long	3203993006,289380356
+.long	320620315,460393269
+.long	587496836,685471733
+.long	1086792851,852142971
+.long	365543100,1017036298
+.long	2618297676,1126000580
+.long	3409855158,1288033470
+.long	4234509866,1501505948
+.long	987167468,1607167915
+.long	1246189591,1816402316
+.size	sha512_block_data_order,.-.L_sha512_block_data_order_begin
+.byte	83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/sha512-586.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/sha512-586.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/sha512-586.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/sha512-586.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,836 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/sha512-586.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"sha512-586.s"
-.text
-.globl	sha512_block_data_order
-.type	sha512_block_data_order, at function
-.align	16
-sha512_block_data_order:
-.L_sha512_block_data_order_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	%esp,%ebx
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	.L001K512-.L000pic_point(%ebp),%ebp
-	subl	$16,%esp
-	andl	$-64,%esp
-	shll	$7,%eax
-	addl	%edi,%eax
-	movl	%esi,(%esp)
-	movl	%edi,4(%esp)
-	movl	%eax,8(%esp)
-	movl	%ebx,12(%esp)
-	leal	OPENSSL_ia32cap_P,%edx
-	btl	$26,(%edx)
-	jnc	.L002loop_x86
-	movq	(%esi),%mm0
-	movq	8(%esi),%mm1
-	movq	16(%esi),%mm2
-	movq	24(%esi),%mm3
-	movq	32(%esi),%mm4
-	movq	40(%esi),%mm5
-	movq	48(%esi),%mm6
-	movq	56(%esi),%mm7
-	subl	$80,%esp
-.align	16
-.L003loop_sse2:
-	movq	%mm1,8(%esp)
-	movq	%mm2,16(%esp)
-	movq	%mm3,24(%esp)
-	movq	%mm5,40(%esp)
-	movq	%mm6,48(%esp)
-	movq	%mm7,56(%esp)
-	movl	(%edi),%ecx
-	movl	4(%edi),%edx
-	addl	$8,%edi
-	bswap	%ecx
-	bswap	%edx
-	movl	%ecx,76(%esp)
-	movl	%edx,72(%esp)
-.align	16
-.L00400_14_sse2:
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	addl	$8,%edi
-	bswap	%eax
-	bswap	%ebx
-	movl	%eax,68(%esp)
-	movl	%ebx,64(%esp)
-	movq	40(%esp),%mm5
-	movq	48(%esp),%mm6
-	movq	56(%esp),%mm7
-	movq	%mm4,%mm1
-	movq	%mm4,%mm2
-	psrlq	$14,%mm1
-	movq	%mm4,32(%esp)
-	psllq	$23,%mm2
-	movq	%mm1,%mm3
-	psrlq	$4,%mm1
-	pxor	%mm2,%mm3
-	psllq	$23,%mm2
-	pxor	%mm1,%mm3
-	psrlq	$23,%mm1
-	pxor	%mm2,%mm3
-	psllq	$4,%mm2
-	pxor	%mm1,%mm3
-	paddq	(%ebp),%mm7
-	pxor	%mm2,%mm3
-	pxor	%mm6,%mm5
-	movq	8(%esp),%mm1
-	pand	%mm4,%mm5
-	movq	16(%esp),%mm2
-	pxor	%mm6,%mm5
-	movq	24(%esp),%mm4
-	paddq	%mm5,%mm3
-	movq	%mm0,(%esp)
-	paddq	%mm7,%mm3
-	movq	%mm0,%mm5
-	movq	%mm0,%mm6
-	paddq	72(%esp),%mm3
-	psrlq	$28,%mm5
-	paddq	%mm3,%mm4
-	psllq	$25,%mm6
-	movq	%mm5,%mm7
-	psrlq	$6,%mm5
-	pxor	%mm6,%mm7
-	psllq	$5,%mm6
-	pxor	%mm5,%mm7
-	psrlq	$5,%mm5
-	pxor	%mm6,%mm7
-	psllq	$6,%mm6
-	pxor	%mm5,%mm7
-	subl	$8,%esp
-	pxor	%mm6,%mm7
-	movq	%mm0,%mm5
-	por	%mm2,%mm0
-	pand	%mm2,%mm5
-	pand	%mm1,%mm0
-	por	%mm0,%mm5
-	paddq	%mm5,%mm7
-	movq	%mm3,%mm0
-	movb	(%ebp),%dl
-	paddq	%mm7,%mm0
-	addl	$8,%ebp
-	cmpb	$53,%dl
-	jne	.L00400_14_sse2
-	movq	40(%esp),%mm5
-	movq	48(%esp),%mm6
-	movq	56(%esp),%mm7
-	movq	%mm4,%mm1
-	movq	%mm4,%mm2
-	psrlq	$14,%mm1
-	movq	%mm4,32(%esp)
-	psllq	$23,%mm2
-	movq	%mm1,%mm3
-	psrlq	$4,%mm1
-	pxor	%mm2,%mm3
-	psllq	$23,%mm2
-	pxor	%mm1,%mm3
-	psrlq	$23,%mm1
-	pxor	%mm2,%mm3
-	psllq	$4,%mm2
-	pxor	%mm1,%mm3
-	paddq	(%ebp),%mm7
-	pxor	%mm2,%mm3
-	pxor	%mm6,%mm5
-	movq	8(%esp),%mm1
-	pand	%mm4,%mm5
-	movq	16(%esp),%mm2
-	pxor	%mm6,%mm5
-	movq	24(%esp),%mm4
-	paddq	%mm5,%mm3
-	movq	%mm0,(%esp)
-	paddq	%mm7,%mm3
-	movq	%mm0,%mm5
-	movq	%mm0,%mm6
-	paddq	72(%esp),%mm3
-	psrlq	$28,%mm5
-	paddq	%mm3,%mm4
-	psllq	$25,%mm6
-	movq	%mm5,%mm7
-	psrlq	$6,%mm5
-	pxor	%mm6,%mm7
-	psllq	$5,%mm6
-	pxor	%mm5,%mm7
-	psrlq	$5,%mm5
-	pxor	%mm6,%mm7
-	psllq	$6,%mm6
-	pxor	%mm5,%mm7
-	subl	$8,%esp
-	pxor	%mm6,%mm7
-	movq	%mm0,%mm5
-	por	%mm2,%mm0
-	movq	88(%esp),%mm6
-	pand	%mm2,%mm5
-	pand	%mm1,%mm0
-	movq	192(%esp),%mm2
-	por	%mm0,%mm5
-	paddq	%mm5,%mm7
-	movq	%mm3,%mm0
-	movb	(%ebp),%dl
-	paddq	%mm7,%mm0
-	addl	$8,%ebp
-.align	16
-.L00516_79_sse2:
-	movq	%mm2,%mm1
-	psrlq	$1,%mm2
-	movq	%mm6,%mm7
-	psrlq	$6,%mm6
-	movq	%mm2,%mm3
-	psrlq	$6,%mm2
-	movq	%mm6,%mm5
-	psrlq	$13,%mm6
-	pxor	%mm2,%mm3
-	psrlq	$1,%mm2
-	pxor	%mm6,%mm5
-	psrlq	$42,%mm6
-	pxor	%mm2,%mm3
-	movq	200(%esp),%mm2
-	psllq	$56,%mm1
-	pxor	%mm6,%mm5
-	psllq	$3,%mm7
-	pxor	%mm1,%mm3
-	paddq	128(%esp),%mm2
-	psllq	$7,%mm1
-	pxor	%mm7,%mm5
-	psllq	$42,%mm7
-	pxor	%mm1,%mm3
-	pxor	%mm7,%mm5
-	paddq	%mm5,%mm3
-	paddq	%mm2,%mm3
-	movq	%mm3,72(%esp)
-	movq	40(%esp),%mm5
-	movq	48(%esp),%mm6
-	movq	56(%esp),%mm7
-	movq	%mm4,%mm1
-	movq	%mm4,%mm2
-	psrlq	$14,%mm1
-	movq	%mm4,32(%esp)
-	psllq	$23,%mm2
-	movq	%mm1,%mm3
-	psrlq	$4,%mm1
-	pxor	%mm2,%mm3
-	psllq	$23,%mm2
-	pxor	%mm1,%mm3
-	psrlq	$23,%mm1
-	pxor	%mm2,%mm3
-	psllq	$4,%mm2
-	pxor	%mm1,%mm3
-	paddq	(%ebp),%mm7
-	pxor	%mm2,%mm3
-	pxor	%mm6,%mm5
-	movq	8(%esp),%mm1
-	pand	%mm4,%mm5
-	movq	16(%esp),%mm2
-	pxor	%mm6,%mm5
-	movq	24(%esp),%mm4
-	paddq	%mm5,%mm3
-	movq	%mm0,(%esp)
-	paddq	%mm7,%mm3
-	movq	%mm0,%mm5
-	movq	%mm0,%mm6
-	paddq	72(%esp),%mm3
-	psrlq	$28,%mm5
-	paddq	%mm3,%mm4
-	psllq	$25,%mm6
-	movq	%mm5,%mm7
-	psrlq	$6,%mm5
-	pxor	%mm6,%mm7
-	psllq	$5,%mm6
-	pxor	%mm5,%mm7
-	psrlq	$5,%mm5
-	pxor	%mm6,%mm7
-	psllq	$6,%mm6
-	pxor	%mm5,%mm7
-	subl	$8,%esp
-	pxor	%mm6,%mm7
-	movq	%mm0,%mm5
-	por	%mm2,%mm0
-	movq	88(%esp),%mm6
-	pand	%mm2,%mm5
-	pand	%mm1,%mm0
-	movq	192(%esp),%mm2
-	por	%mm0,%mm5
-	paddq	%mm5,%mm7
-	movq	%mm3,%mm0
-	movb	(%ebp),%dl
-	paddq	%mm7,%mm0
-	addl	$8,%ebp
-	cmpb	$23,%dl
-	jne	.L00516_79_sse2
-	movq	8(%esp),%mm1
-	movq	16(%esp),%mm2
-	movq	24(%esp),%mm3
-	movq	40(%esp),%mm5
-	movq	48(%esp),%mm6
-	movq	56(%esp),%mm7
-	paddq	(%esi),%mm0
-	paddq	8(%esi),%mm1
-	paddq	16(%esi),%mm2
-	paddq	24(%esi),%mm3
-	paddq	32(%esi),%mm4
-	paddq	40(%esi),%mm5
-	paddq	48(%esi),%mm6
-	paddq	56(%esi),%mm7
-	movq	%mm0,(%esi)
-	movq	%mm1,8(%esi)
-	movq	%mm2,16(%esi)
-	movq	%mm3,24(%esi)
-	movq	%mm4,32(%esi)
-	movq	%mm5,40(%esi)
-	movq	%mm6,48(%esi)
-	movq	%mm7,56(%esi)
-	addl	$640,%esp
-	subl	$640,%ebp
-	cmpl	88(%esp),%edi
-	jb	.L003loop_sse2
-	emms
-	movl	92(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	16
-.L002loop_x86:
-	movl	(%edi),%eax
-	movl	4(%edi),%ebx
-	movl	8(%edi),%ecx
-	movl	12(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	16(%edi),%eax
-	movl	20(%edi),%ebx
-	movl	24(%edi),%ecx
-	movl	28(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	32(%edi),%eax
-	movl	36(%edi),%ebx
-	movl	40(%edi),%ecx
-	movl	44(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	48(%edi),%eax
-	movl	52(%edi),%ebx
-	movl	56(%edi),%ecx
-	movl	60(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	64(%edi),%eax
-	movl	68(%edi),%ebx
-	movl	72(%edi),%ecx
-	movl	76(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	80(%edi),%eax
-	movl	84(%edi),%ebx
-	movl	88(%edi),%ecx
-	movl	92(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	96(%edi),%eax
-	movl	100(%edi),%ebx
-	movl	104(%edi),%ecx
-	movl	108(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	movl	112(%edi),%eax
-	movl	116(%edi),%ebx
-	movl	120(%edi),%ecx
-	movl	124(%edi),%edx
-	bswap	%eax
-	bswap	%ebx
-	bswap	%ecx
-	bswap	%edx
-	pushl	%eax
-	pushl	%ebx
-	pushl	%ecx
-	pushl	%edx
-	addl	$128,%edi
-	subl	$72,%esp
-	movl	%edi,204(%esp)
-	leal	8(%esp),%edi
-	movl	$16,%ecx
-.long	2784229001
-.align	16
-.L00600_15_x86:
-	movl	40(%esp),%ecx
-	movl	44(%esp),%edx
-	movl	%ecx,%esi
-	shrl	$9,%ecx
-	movl	%edx,%edi
-	shrl	$9,%edx
-	movl	%ecx,%ebx
-	shll	$14,%esi
-	movl	%edx,%eax
-	shll	$14,%edi
-	xorl	%esi,%ebx
-	shrl	$5,%ecx
-	xorl	%edi,%eax
-	shrl	$5,%edx
-	xorl	%ecx,%eax
-	shll	$4,%esi
-	xorl	%edx,%ebx
-	shll	$4,%edi
-	xorl	%esi,%ebx
-	shrl	$4,%ecx
-	xorl	%edi,%eax
-	shrl	$4,%edx
-	xorl	%ecx,%eax
-	shll	$5,%esi
-	xorl	%edx,%ebx
-	shll	$5,%edi
-	xorl	%esi,%eax
-	xorl	%edi,%ebx
-	movl	48(%esp),%ecx
-	movl	52(%esp),%edx
-	movl	56(%esp),%esi
-	movl	60(%esp),%edi
-	addl	64(%esp),%eax
-	adcl	68(%esp),%ebx
-	xorl	%esi,%ecx
-	xorl	%edi,%edx
-	andl	40(%esp),%ecx
-	andl	44(%esp),%edx
-	addl	192(%esp),%eax
-	adcl	196(%esp),%ebx
-	xorl	%esi,%ecx
-	xorl	%edi,%edx
-	movl	(%ebp),%esi
-	movl	4(%ebp),%edi
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	32(%esp),%ecx
-	movl	36(%esp),%edx
-	addl	%esi,%eax
-	adcl	%edi,%ebx
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	%eax,32(%esp)
-	movl	%ebx,36(%esp)
-	movl	%ecx,%esi
-	shrl	$2,%ecx
-	movl	%edx,%edi
-	shrl	$2,%edx
-	movl	%ecx,%ebx
-	shll	$4,%esi
-	movl	%edx,%eax
-	shll	$4,%edi
-	xorl	%esi,%ebx
-	shrl	$5,%ecx
-	xorl	%edi,%eax
-	shrl	$5,%edx
-	xorl	%ecx,%ebx
-	shll	$21,%esi
-	xorl	%edx,%eax
-	shll	$21,%edi
-	xorl	%esi,%eax
-	shrl	$21,%ecx
-	xorl	%edi,%ebx
-	shrl	$21,%edx
-	xorl	%ecx,%eax
-	shll	$5,%esi
-	xorl	%edx,%ebx
-	shll	$5,%edi
-	xorl	%esi,%eax
-	xorl	%edi,%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	16(%esp),%esi
-	movl	20(%esp),%edi
-	addl	(%esp),%eax
-	adcl	4(%esp),%ebx
-	orl	%esi,%ecx
-	orl	%edi,%edx
-	andl	24(%esp),%ecx
-	andl	28(%esp),%edx
-	andl	8(%esp),%esi
-	andl	12(%esp),%edi
-	orl	%esi,%ecx
-	orl	%edi,%edx
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movb	(%ebp),%dl
-	subl	$8,%esp
-	leal	8(%ebp),%ebp
-	cmpb	$148,%dl
-	jne	.L00600_15_x86
-.align	16
-.L00716_79_x86:
-	movl	312(%esp),%ecx
-	movl	316(%esp),%edx
-	movl	%ecx,%esi
-	shrl	$1,%ecx
-	movl	%edx,%edi
-	shrl	$1,%edx
-	movl	%ecx,%eax
-	shll	$24,%esi
-	movl	%edx,%ebx
-	shll	$24,%edi
-	xorl	%esi,%ebx
-	shrl	$6,%ecx
-	xorl	%edi,%eax
-	shrl	$6,%edx
-	xorl	%ecx,%eax
-	shll	$7,%esi
-	xorl	%edx,%ebx
-	shll	$1,%edi
-	xorl	%esi,%ebx
-	shrl	$1,%ecx
-	xorl	%edi,%eax
-	shrl	$1,%edx
-	xorl	%ecx,%eax
-	shll	$6,%edi
-	xorl	%edx,%ebx
-	xorl	%edi,%eax
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movl	208(%esp),%ecx
-	movl	212(%esp),%edx
-	movl	%ecx,%esi
-	shrl	$6,%ecx
-	movl	%edx,%edi
-	shrl	$6,%edx
-	movl	%ecx,%eax
-	shll	$3,%esi
-	movl	%edx,%ebx
-	shll	$3,%edi
-	xorl	%esi,%eax
-	shrl	$13,%ecx
-	xorl	%edi,%ebx
-	shrl	$13,%edx
-	xorl	%ecx,%eax
-	shll	$10,%esi
-	xorl	%edx,%ebx
-	shll	$10,%edi
-	xorl	%esi,%ebx
-	shrl	$10,%ecx
-	xorl	%edi,%eax
-	shrl	$10,%edx
-	xorl	%ecx,%ebx
-	shll	$13,%edi
-	xorl	%edx,%eax
-	xorl	%edi,%eax
-	movl	320(%esp),%ecx
-	movl	324(%esp),%edx
-	addl	(%esp),%eax
-	adcl	4(%esp),%ebx
-	movl	248(%esp),%esi
-	movl	252(%esp),%edi
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	addl	%esi,%eax
-	adcl	%edi,%ebx
-	movl	%eax,192(%esp)
-	movl	%ebx,196(%esp)
-	movl	40(%esp),%ecx
-	movl	44(%esp),%edx
-	movl	%ecx,%esi
-	shrl	$9,%ecx
-	movl	%edx,%edi
-	shrl	$9,%edx
-	movl	%ecx,%ebx
-	shll	$14,%esi
-	movl	%edx,%eax
-	shll	$14,%edi
-	xorl	%esi,%ebx
-	shrl	$5,%ecx
-	xorl	%edi,%eax
-	shrl	$5,%edx
-	xorl	%ecx,%eax
-	shll	$4,%esi
-	xorl	%edx,%ebx
-	shll	$4,%edi
-	xorl	%esi,%ebx
-	shrl	$4,%ecx
-	xorl	%edi,%eax
-	shrl	$4,%edx
-	xorl	%ecx,%eax
-	shll	$5,%esi
-	xorl	%edx,%ebx
-	shll	$5,%edi
-	xorl	%esi,%eax
-	xorl	%edi,%ebx
-	movl	48(%esp),%ecx
-	movl	52(%esp),%edx
-	movl	56(%esp),%esi
-	movl	60(%esp),%edi
-	addl	64(%esp),%eax
-	adcl	68(%esp),%ebx
-	xorl	%esi,%ecx
-	xorl	%edi,%edx
-	andl	40(%esp),%ecx
-	andl	44(%esp),%edx
-	addl	192(%esp),%eax
-	adcl	196(%esp),%ebx
-	xorl	%esi,%ecx
-	xorl	%edi,%edx
-	movl	(%ebp),%esi
-	movl	4(%ebp),%edi
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	32(%esp),%ecx
-	movl	36(%esp),%edx
-	addl	%esi,%eax
-	adcl	%edi,%ebx
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	%eax,32(%esp)
-	movl	%ebx,36(%esp)
-	movl	%ecx,%esi
-	shrl	$2,%ecx
-	movl	%edx,%edi
-	shrl	$2,%edx
-	movl	%ecx,%ebx
-	shll	$4,%esi
-	movl	%edx,%eax
-	shll	$4,%edi
-	xorl	%esi,%ebx
-	shrl	$5,%ecx
-	xorl	%edi,%eax
-	shrl	$5,%edx
-	xorl	%ecx,%ebx
-	shll	$21,%esi
-	xorl	%edx,%eax
-	shll	$21,%edi
-	xorl	%esi,%eax
-	shrl	$21,%ecx
-	xorl	%edi,%ebx
-	shrl	$21,%edx
-	xorl	%ecx,%eax
-	shll	$5,%esi
-	xorl	%edx,%ebx
-	shll	$5,%edi
-	xorl	%esi,%eax
-	xorl	%edi,%ebx
-	movl	8(%esp),%ecx
-	movl	12(%esp),%edx
-	movl	16(%esp),%esi
-	movl	20(%esp),%edi
-	addl	(%esp),%eax
-	adcl	4(%esp),%ebx
-	orl	%esi,%ecx
-	orl	%edi,%edx
-	andl	24(%esp),%ecx
-	andl	28(%esp),%edx
-	andl	8(%esp),%esi
-	andl	12(%esp),%edi
-	orl	%esi,%ecx
-	orl	%edi,%edx
-	addl	%ecx,%eax
-	adcl	%edx,%ebx
-	movl	%eax,(%esp)
-	movl	%ebx,4(%esp)
-	movb	(%ebp),%dl
-	subl	$8,%esp
-	leal	8(%ebp),%ebp
-	cmpb	$23,%dl
-	jne	.L00716_79_x86
-	movl	840(%esp),%esi
-	movl	844(%esp),%edi
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	addl	8(%esp),%eax
-	adcl	12(%esp),%ebx
-	movl	%eax,(%esi)
-	movl	%ebx,4(%esi)
-	addl	16(%esp),%ecx
-	adcl	20(%esp),%edx
-	movl	%ecx,8(%esi)
-	movl	%edx,12(%esi)
-	movl	16(%esi),%eax
-	movl	20(%esi),%ebx
-	movl	24(%esi),%ecx
-	movl	28(%esi),%edx
-	addl	24(%esp),%eax
-	adcl	28(%esp),%ebx
-	movl	%eax,16(%esi)
-	movl	%ebx,20(%esi)
-	addl	32(%esp),%ecx
-	adcl	36(%esp),%edx
-	movl	%ecx,24(%esi)
-	movl	%edx,28(%esi)
-	movl	32(%esi),%eax
-	movl	36(%esi),%ebx
-	movl	40(%esi),%ecx
-	movl	44(%esi),%edx
-	addl	40(%esp),%eax
-	adcl	44(%esp),%ebx
-	movl	%eax,32(%esi)
-	movl	%ebx,36(%esi)
-	addl	48(%esp),%ecx
-	adcl	52(%esp),%edx
-	movl	%ecx,40(%esi)
-	movl	%edx,44(%esi)
-	movl	48(%esi),%eax
-	movl	52(%esi),%ebx
-	movl	56(%esi),%ecx
-	movl	60(%esi),%edx
-	addl	56(%esp),%eax
-	adcl	60(%esp),%ebx
-	movl	%eax,48(%esi)
-	movl	%ebx,52(%esi)
-	addl	64(%esp),%ecx
-	adcl	68(%esp),%edx
-	movl	%ecx,56(%esi)
-	movl	%edx,60(%esi)
-	addl	$840,%esp
-	subl	$640,%ebp
-	cmpl	8(%esp),%edi
-	jb	.L002loop_x86
-	movl	12(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L001K512:
-.long	3609767458,1116352408
-.long	602891725,1899447441
-.long	3964484399,3049323471
-.long	2173295548,3921009573
-.long	4081628472,961987163
-.long	3053834265,1508970993
-.long	2937671579,2453635748
-.long	3664609560,2870763221
-.long	2734883394,3624381080
-.long	1164996542,310598401
-.long	1323610764,607225278
-.long	3590304994,1426881987
-.long	4068182383,1925078388
-.long	991336113,2162078206
-.long	633803317,2614888103
-.long	3479774868,3248222580
-.long	2666613458,3835390401
-.long	944711139,4022224774
-.long	2341262773,264347078
-.long	2007800933,604807628
-.long	1495990901,770255983
-.long	1856431235,1249150122
-.long	3175218132,1555081692
-.long	2198950837,1996064986
-.long	3999719339,2554220882
-.long	766784016,2821834349
-.long	2566594879,2952996808
-.long	3203337956,3210313671
-.long	1034457026,3336571891
-.long	2466948901,3584528711
-.long	3758326383,113926993
-.long	168717936,338241895
-.long	1188179964,666307205
-.long	1546045734,773529912
-.long	1522805485,1294757372
-.long	2643833823,1396182291
-.long	2343527390,1695183700
-.long	1014477480,1986661051
-.long	1206759142,2177026350
-.long	344077627,2456956037
-.long	1290863460,2730485921
-.long	3158454273,2820302411
-.long	3505952657,3259730800
-.long	106217008,3345764771
-.long	3606008344,3516065817
-.long	1432725776,3600352804
-.long	1467031594,4094571909
-.long	851169720,275423344
-.long	3100823752,430227734
-.long	1363258195,506948616
-.long	3750685593,659060556
-.long	3785050280,883997877
-.long	3318307427,958139571
-.long	3812723403,1322822218
-.long	2003034995,1537002063
-.long	3602036899,1747873779
-.long	1575990012,1955562222
-.long	1125592928,2024104815
-.long	2716904306,2227730452
-.long	442776044,2361852424
-.long	593698344,2428436474
-.long	3733110249,2756734187
-.long	2999351573,3204031479
-.long	3815920427,3329325298
-.long	3928383900,3391569614
-.long	566280711,3515267271
-.long	3454069534,3940187606
-.long	4000239992,4118630271
-.long	1914138554,116418474
-.long	2731055270,174292421
-.long	3203993006,289380356
-.long	320620315,460393269
-.long	587496836,685471733
-.long	1086792851,852142971
-.long	365543100,1017036298
-.long	2618297676,1126000580
-.long	3409855158,1288033470
-.long	4234509866,1501505948
-.long	987167468,1607167915
-.long	1246189591,1816402316
-.size	sha512_block_data_order,.-.L_sha512_block_data_order_begin
-.byte	83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
-.byte	110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
-.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
-.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
-.byte	62,0
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/vpaes-x86.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/vpaes-x86.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/vpaes-x86.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,1328 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/vpaes-x86.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from vpaes-x86.pl.
+#ifdef PIC
+.file	"vpaes-x86.S"
+.text
+.align	64
+.L_vpaes_consts:
+.long	218628480,235210255,168496130,67568393
+.long	252381056,17041926,33884169,51187212
+.long	252645135,252645135,252645135,252645135
+.long	1512730624,3266504856,1377990664,3401244816
+.long	830229760,1275146365,2969422977,3447763452
+.long	3411033600,2979783055,338359620,2782886510
+.long	4209124096,907596821,221174255,1006095553
+.long	191964160,3799684038,3164090317,1589111125
+.long	182528256,1777043520,2877432650,3265356744
+.long	1874708224,3503451415,3305285752,363511674
+.long	1606117888,3487855781,1093350906,2384367825
+.long	197121,67569157,134941193,202313229
+.long	67569157,134941193,202313229,197121
+.long	134941193,202313229,197121,67569157
+.long	202313229,197121,67569157,134941193
+.long	33619971,100992007,168364043,235736079
+.long	235736079,33619971,100992007,168364043
+.long	168364043,235736079,33619971,100992007
+.long	100992007,168364043,235736079,33619971
+.long	50462976,117835012,185207048,252579084
+.long	252314880,51251460,117574920,184942860
+.long	184682752,252054788,50987272,118359308
+.long	118099200,185467140,251790600,50727180
+.long	2946363062,528716217,1300004225,1881839624
+.long	1532713819,1532713819,1532713819,1532713819
+.long	3602276352,4288629033,3737020424,4153884961
+.long	1354558464,32357713,2958822624,3775749553
+.long	1201988352,132424512,1572796698,503232858
+.long	2213177600,1597421020,4103937655,675398315
+.long	2749646592,4273543773,1511898873,121693092
+.long	3040248576,1103263732,2871565598,1608280554
+.long	2236667136,2588920351,482954393,64377734
+.long	3069987328,291237287,2117370568,3650299247
+.long	533321216,3573750986,2572112006,1401264716
+.long	1339849704,2721158661,548607111,3445553514
+.long	2128193280,3054596040,2183486460,1257083700
+.long	655635200,1165381986,3923443150,2344132524
+.long	190078720,256924420,290342170,357187870
+.long	1610966272,2263057382,4103205268,309794674
+.long	2592527872,2233205587,1335446729,3402964816
+.long	3973531904,3225098121,3002836325,1918774430
+.long	3870401024,2102906079,2284471353,4117666579
+.long	617007872,1021508343,366931923,691083277
+.long	2528395776,3491914898,2968704004,1613121270
+.long	3445188352,3247741094,844474987,4093578302
+.long	651481088,1190302358,1689581232,574775300
+.long	4289380608,206939853,2555985458,2489840491
+.long	2130264064,327674451,3566485037,3349835193
+.long	2470714624,316102159,3636825756,3393945945
+.byte	86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
+.byte	111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
+.byte	83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
+.byte	114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
+.byte	118,101,114,115,105,116,121,41,0
+.align	64
+.type	_vpaes_preheat, at function
+.align	16
+_vpaes_preheat:
+	addl	(%esp),%ebp
+	movdqa	-48(%ebp),%xmm7
+	movdqa	-16(%ebp),%xmm6
+	ret
+.size	_vpaes_preheat,.-_vpaes_preheat
+.type	_vpaes_encrypt_core, at function
+.align	16
+_vpaes_encrypt_core:
+	movl	$16,%ecx
+	movl	240(%edx),%eax
+	movdqa	%xmm6,%xmm1
+	movdqa	(%ebp),%xmm2
+	pandn	%xmm0,%xmm1
+	movdqu	(%edx),%xmm5
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+.byte	102,15,56,0,208
+	movdqa	16(%ebp),%xmm0
+.byte	102,15,56,0,193
+	pxor	%xmm5,%xmm2
+	pxor	%xmm2,%xmm0
+	addl	$16,%edx
+	leal	192(%ebp),%ebx
+	jmp	.L000enc_entry
+.align	16
+.L001enc_loop:
+	movdqa	32(%ebp),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm5,%xmm4
+	movdqa	48(%ebp),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	movdqa	64(%ebp),%xmm5
+.byte	102,15,56,0,234
+	movdqa	-64(%ebx,%ecx,1),%xmm1
+	movdqa	80(%ebp),%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm5,%xmm2
+	movdqa	(%ebx,%ecx,1),%xmm4
+	movdqa	%xmm0,%xmm3
+.byte	102,15,56,0,193
+	addl	$16,%edx
+	pxor	%xmm2,%xmm0
+.byte	102,15,56,0,220
+	addl	$16,%ecx
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,0,193
+	andl	$48,%ecx
+	pxor	%xmm3,%xmm0
+	subl	$1,%eax
+.L000enc_entry:
+	movdqa	%xmm6,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+	movdqa	-32(%ebp),%xmm5
+.byte	102,15,56,0,232
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm5,%xmm3
+	movdqa	%xmm7,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm5,%xmm4
+	movdqa	%xmm7,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm7,%xmm3
+	movdqu	(%edx),%xmm5
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	jnz	.L001enc_loop
+	movdqa	96(%ebp),%xmm4
+	movdqa	112(%ebp),%xmm0
+.byte	102,15,56,0,226
+	pxor	%xmm5,%xmm4
+.byte	102,15,56,0,195
+	movdqa	64(%ebx,%ecx,1),%xmm1
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,193
+	ret
+.size	_vpaes_encrypt_core,.-_vpaes_encrypt_core
+.type	_vpaes_decrypt_core, at function
+.align	16
+_vpaes_decrypt_core:
+	movl	240(%edx),%eax
+	leal	608(%ebp),%ebx
+	movdqa	%xmm6,%xmm1
+	movdqa	-64(%ebx),%xmm2
+	pandn	%xmm0,%xmm1
+	movl	%eax,%ecx
+	psrld	$4,%xmm1
+	movdqu	(%edx),%xmm5
+	shll	$4,%ecx
+	pand	%xmm6,%xmm0
+.byte	102,15,56,0,208
+	movdqa	-48(%ebx),%xmm0
+	xorl	$48,%ecx
+.byte	102,15,56,0,193
+	andl	$48,%ecx
+	pxor	%xmm5,%xmm2
+	movdqa	176(%ebp),%xmm5
+	pxor	%xmm2,%xmm0
+	addl	$16,%edx
+	leal	-352(%ebx,%ecx,1),%ecx
+	jmp	.L002dec_entry
+.align	16
+.L003dec_loop:
+	movdqa	-32(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	-16(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	addl	$16,%edx
+.byte	102,15,56,0,197
+	movdqa	(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	16(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	subl	$1,%eax
+.byte	102,15,56,0,197
+	movdqa	32(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	48(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,197
+	movdqa	64(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	80(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,237,12
+.L002dec_entry:
+	movdqa	%xmm6,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+	movdqa	-32(%ebp),%xmm2
+.byte	102,15,56,0,208
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	movdqa	%xmm7,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm2,%xmm4
+	movdqa	%xmm7,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	movdqu	(%edx),%xmm0
+	jnz	.L003dec_loop
+	movdqa	96(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	112(%ebx),%xmm0
+	movdqa	(%ecx),%xmm2
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,194
+	ret
+.size	_vpaes_decrypt_core,.-_vpaes_decrypt_core
+.type	_vpaes_schedule_core, at function
+.align	16
+_vpaes_schedule_core:
+	addl	(%esp),%ebp
+	movdqu	(%esi),%xmm0
+	movdqa	320(%ebp),%xmm2
+	movdqa	%xmm0,%xmm3
+	leal	(%ebp),%ebx
+	movdqa	%xmm2,4(%esp)
+	call	_vpaes_schedule_transform
+	movdqa	%xmm0,%xmm7
+	testl	%edi,%edi
+	jnz	.L004schedule_am_decrypting
+	movdqu	%xmm0,(%edx)
+	jmp	.L005schedule_go
+.L004schedule_am_decrypting:
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,217
+	movdqu	%xmm3,(%edx)
+	xorl	$48,%ecx
+.L005schedule_go:
+	cmpl	$192,%eax
+	ja	.L006schedule_256
+	je	.L007schedule_192
+.L008schedule_128:
+	movl	$10,%eax
+.L009loop_schedule_128:
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	jmp	.L009loop_schedule_128
+.align	16
+.L007schedule_192:
+	movdqu	8(%esi),%xmm0
+	call	_vpaes_schedule_transform
+	movdqa	%xmm0,%xmm6
+	pxor	%xmm4,%xmm4
+	movhlps	%xmm4,%xmm6
+	movl	$4,%eax
+.L011loop_schedule_192:
+	call	_vpaes_schedule_round
+.byte	102,15,58,15,198,8
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_192_smear
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_192_smear
+	jmp	.L011loop_schedule_192
+.align	16
+.L006schedule_256:
+	movdqu	16(%esi),%xmm0
+	call	_vpaes_schedule_transform
+	movl	$7,%eax
+.L012loop_schedule_256:
+	call	_vpaes_schedule_mangle
+	movdqa	%xmm0,%xmm6
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	pshufd	$255,%xmm0,%xmm0
+	movdqa	%xmm7,20(%esp)
+	movdqa	%xmm6,%xmm7
+	call	.L_vpaes_schedule_low_round
+	movdqa	20(%esp),%xmm7
+	jmp	.L012loop_schedule_256
+.align	16
+.L010schedule_mangle_last:
+	leal	384(%ebp),%ebx
+	testl	%edi,%edi
+	jnz	.L013schedule_mangle_last_dec
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,193
+	leal	352(%ebp),%ebx
+	addl	$32,%edx
+.L013schedule_mangle_last_dec:
+	addl	$-16,%edx
+	pxor	336(%ebp),%xmm0
+	call	_vpaes_schedule_transform
+	movdqu	%xmm0,(%edx)
+	pxor	%xmm0,%xmm0
+	pxor	%xmm1,%xmm1
+	pxor	%xmm2,%xmm2
+	pxor	%xmm3,%xmm3
+	pxor	%xmm4,%xmm4
+	pxor	%xmm5,%xmm5
+	pxor	%xmm6,%xmm6
+	pxor	%xmm7,%xmm7
+	ret
+.size	_vpaes_schedule_core,.-_vpaes_schedule_core
+.type	_vpaes_schedule_192_smear, at function
+.align	16
+_vpaes_schedule_192_smear:
+	pshufd	$128,%xmm6,%xmm0
+	pxor	%xmm0,%xmm6
+	pshufd	$254,%xmm7,%xmm0
+	pxor	%xmm0,%xmm6
+	movdqa	%xmm6,%xmm0
+	pxor	%xmm1,%xmm1
+	movhlps	%xmm1,%xmm6
+	ret
+.size	_vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
+.type	_vpaes_schedule_round, at function
+.align	16
+_vpaes_schedule_round:
+	movdqa	8(%esp),%xmm2
+	pxor	%xmm1,%xmm1
+.byte	102,15,58,15,202,15
+.byte	102,15,58,15,210,15
+	pxor	%xmm1,%xmm7
+	pshufd	$255,%xmm0,%xmm0
+.byte	102,15,58,15,192,1
+	movdqa	%xmm2,8(%esp)
+.L_vpaes_schedule_low_round:
+	movdqa	%xmm7,%xmm1
+	pslldq	$4,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqa	%xmm7,%xmm1
+	pslldq	$8,%xmm7
+	pxor	%xmm1,%xmm7
+	pxor	336(%ebp),%xmm7
+	movdqa	-16(%ebp),%xmm4
+	movdqa	-48(%ebp),%xmm5
+	movdqa	%xmm4,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm4,%xmm0
+	movdqa	-32(%ebp),%xmm2
+.byte	102,15,56,0,208
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm5,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	movdqa	%xmm5,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm2,%xmm4
+	movdqa	%xmm5,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm5,%xmm3
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	movdqa	32(%ebp),%xmm4
+.byte	102,15,56,0,226
+	movdqa	48(%ebp),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	pxor	%xmm7,%xmm0
+	movdqa	%xmm0,%xmm7
+	ret
+.size	_vpaes_schedule_round,.-_vpaes_schedule_round
+.type	_vpaes_schedule_transform, at function
+.align	16
+_vpaes_schedule_transform:
+	movdqa	-16(%ebp),%xmm2
+	movdqa	%xmm2,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm2,%xmm0
+	movdqa	(%ebx),%xmm2
+.byte	102,15,56,0,208
+	movdqa	16(%ebx),%xmm0
+.byte	102,15,56,0,193
+	pxor	%xmm2,%xmm0
+	ret
+.size	_vpaes_schedule_transform,.-_vpaes_schedule_transform
+.type	_vpaes_schedule_mangle, at function
+.align	16
+_vpaes_schedule_mangle:
+	movdqa	%xmm0,%xmm4
+	movdqa	128(%ebp),%xmm5
+	testl	%edi,%edi
+	jnz	.L014schedule_mangle_dec
+	addl	$16,%edx
+	pxor	336(%ebp),%xmm4
+.byte	102,15,56,0,229
+	movdqa	%xmm4,%xmm3
+.byte	102,15,56,0,229
+	pxor	%xmm4,%xmm3
+.byte	102,15,56,0,229
+	pxor	%xmm4,%xmm3
+	jmp	.L015schedule_mangle_both
+.align	16
+.L014schedule_mangle_dec:
+	movdqa	-16(%ebp),%xmm2
+	leal	416(%ebp),%esi
+	movdqa	%xmm2,%xmm1
+	pandn	%xmm4,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm2,%xmm4
+	movdqa	(%esi),%xmm2
+.byte	102,15,56,0,212
+	movdqa	16(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	32(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	48(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	64(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	80(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	96(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	112(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	addl	$-16,%edx
+.L015schedule_mangle_both:
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,217
+	addl	$-16,%ecx
+	andl	$48,%ecx
+	movdqu	%xmm3,(%edx)
+	ret
+.size	_vpaes_schedule_mangle,.-_vpaes_schedule_mangle
+.globl	vpaes_set_encrypt_key
+.type	vpaes_set_encrypt_key, at function
+.align	16
+vpaes_set_encrypt_key:
+.L_vpaes_set_encrypt_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%eax
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movl	%eax,%ebx
+	shrl	$5,%ebx
+	addl	$5,%ebx
+	movl	%ebx,240(%edx)
+	movl	$48,%ecx
+	movl	$0,%edi
+	leal	.L_vpaes_consts+0x30-.L016pic_point,%ebp
+	call	_vpaes_schedule_core
+.L016pic_point:
+	movl	48(%esp),%esp
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
+.globl	vpaes_set_decrypt_key
+.type	vpaes_set_decrypt_key, at function
+.align	16
+vpaes_set_decrypt_key:
+.L_vpaes_set_decrypt_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%eax
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movl	%eax,%ebx
+	shrl	$5,%ebx
+	addl	$5,%ebx
+	movl	%ebx,240(%edx)
+	shll	$4,%ebx
+	leal	16(%edx,%ebx,1),%edx
+	movl	$1,%edi
+	movl	%eax,%ecx
+	shrl	$1,%ecx
+	andl	$32,%ecx
+	xorl	$32,%ecx
+	leal	.L_vpaes_consts+0x30-.L017pic_point,%ebp
+	call	_vpaes_schedule_core
+.L017pic_point:
+	movl	48(%esp),%esp
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
+.globl	vpaes_encrypt
+.type	vpaes_encrypt, at function
+.align	16
+vpaes_encrypt:
+.L_vpaes_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	leal	.L_vpaes_consts+0x30-.L018pic_point,%ebp
+	call	_vpaes_preheat
+.L018pic_point:
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%edi
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movdqu	(%esi),%xmm0
+	call	_vpaes_encrypt_core
+	movdqu	%xmm0,(%edi)
+	movl	48(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_encrypt,.-.L_vpaes_encrypt_begin
+.globl	vpaes_decrypt
+.type	vpaes_decrypt, at function
+.align	16
+vpaes_decrypt:
+.L_vpaes_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	leal	.L_vpaes_consts+0x30-.L019pic_point,%ebp
+	call	_vpaes_preheat
+.L019pic_point:
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%edi
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movdqu	(%esi),%xmm0
+	call	_vpaes_decrypt_core
+	movdqu	%xmm0,(%edi)
+	movl	48(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_decrypt,.-.L_vpaes_decrypt_begin
+.globl	vpaes_cbc_encrypt
+.type	vpaes_cbc_encrypt, at function
+.align	16
+vpaes_cbc_encrypt:
+.L_vpaes_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	subl	$16,%eax
+	jc	.L020cbc_abort
+	leal	-56(%esp),%ebx
+	movl	36(%esp),%ebp
+	andl	$-16,%ebx
+	movl	40(%esp),%ecx
+	xchgl	%esp,%ebx
+	movdqu	(%ebp),%xmm1
+	subl	%esi,%edi
+	movl	%ebx,48(%esp)
+	movl	%edi,(%esp)
+	movl	%edx,4(%esp)
+	movl	%ebp,8(%esp)
+	movl	%eax,%edi
+	leal	.L_vpaes_consts+0x30-.L021pic_point,%ebp
+	call	_vpaes_preheat
+.L021pic_point:
+	cmpl	$0,%ecx
+	je	.L022cbc_dec_loop
+	jmp	.L023cbc_enc_loop
+.align	16
+.L023cbc_enc_loop:
+	movdqu	(%esi),%xmm0
+	pxor	%xmm1,%xmm0
+	call	_vpaes_encrypt_core
+	movl	(%esp),%ebx
+	movl	4(%esp),%edx
+	movdqa	%xmm0,%xmm1
+	movdqu	%xmm0,(%ebx,%esi,1)
+	leal	16(%esi),%esi
+	subl	$16,%edi
+	jnc	.L023cbc_enc_loop
+	jmp	.L024cbc_done
+.align	16
+.L022cbc_dec_loop:
+	movdqu	(%esi),%xmm0
+	movdqa	%xmm1,16(%esp)
+	movdqa	%xmm0,32(%esp)
+	call	_vpaes_decrypt_core
+	movl	(%esp),%ebx
+	movl	4(%esp),%edx
+	pxor	16(%esp),%xmm0
+	movdqa	32(%esp),%xmm1
+	movdqu	%xmm0,(%ebx,%esi,1)
+	leal	16(%esi),%esi
+	subl	$16,%edi
+	jnc	.L022cbc_dec_loop
+.L024cbc_done:
+	movl	8(%esp),%ebx
+	movl	48(%esp),%esp
+	movdqu	%xmm1,(%ebx)
+.L020cbc_abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin
+#else
+.file	"vpaes-x86.S"
+.text
+.align	64
+.L_vpaes_consts:
+.long	218628480,235210255,168496130,67568393
+.long	252381056,17041926,33884169,51187212
+.long	252645135,252645135,252645135,252645135
+.long	1512730624,3266504856,1377990664,3401244816
+.long	830229760,1275146365,2969422977,3447763452
+.long	3411033600,2979783055,338359620,2782886510
+.long	4209124096,907596821,221174255,1006095553
+.long	191964160,3799684038,3164090317,1589111125
+.long	182528256,1777043520,2877432650,3265356744
+.long	1874708224,3503451415,3305285752,363511674
+.long	1606117888,3487855781,1093350906,2384367825
+.long	197121,67569157,134941193,202313229
+.long	67569157,134941193,202313229,197121
+.long	134941193,202313229,197121,67569157
+.long	202313229,197121,67569157,134941193
+.long	33619971,100992007,168364043,235736079
+.long	235736079,33619971,100992007,168364043
+.long	168364043,235736079,33619971,100992007
+.long	100992007,168364043,235736079,33619971
+.long	50462976,117835012,185207048,252579084
+.long	252314880,51251460,117574920,184942860
+.long	184682752,252054788,50987272,118359308
+.long	118099200,185467140,251790600,50727180
+.long	2946363062,528716217,1300004225,1881839624
+.long	1532713819,1532713819,1532713819,1532713819
+.long	3602276352,4288629033,3737020424,4153884961
+.long	1354558464,32357713,2958822624,3775749553
+.long	1201988352,132424512,1572796698,503232858
+.long	2213177600,1597421020,4103937655,675398315
+.long	2749646592,4273543773,1511898873,121693092
+.long	3040248576,1103263732,2871565598,1608280554
+.long	2236667136,2588920351,482954393,64377734
+.long	3069987328,291237287,2117370568,3650299247
+.long	533321216,3573750986,2572112006,1401264716
+.long	1339849704,2721158661,548607111,3445553514
+.long	2128193280,3054596040,2183486460,1257083700
+.long	655635200,1165381986,3923443150,2344132524
+.long	190078720,256924420,290342170,357187870
+.long	1610966272,2263057382,4103205268,309794674
+.long	2592527872,2233205587,1335446729,3402964816
+.long	3973531904,3225098121,3002836325,1918774430
+.long	3870401024,2102906079,2284471353,4117666579
+.long	617007872,1021508343,366931923,691083277
+.long	2528395776,3491914898,2968704004,1613121270
+.long	3445188352,3247741094,844474987,4093578302
+.long	651481088,1190302358,1689581232,574775300
+.long	4289380608,206939853,2555985458,2489840491
+.long	2130264064,327674451,3566485037,3349835193
+.long	2470714624,316102159,3636825756,3393945945
+.byte	86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
+.byte	111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
+.byte	83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
+.byte	114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
+.byte	118,101,114,115,105,116,121,41,0
+.align	64
+.type	_vpaes_preheat, at function
+.align	16
+_vpaes_preheat:
+	addl	(%esp),%ebp
+	movdqa	-48(%ebp),%xmm7
+	movdqa	-16(%ebp),%xmm6
+	ret
+.size	_vpaes_preheat,.-_vpaes_preheat
+.type	_vpaes_encrypt_core, at function
+.align	16
+_vpaes_encrypt_core:
+	movl	$16,%ecx
+	movl	240(%edx),%eax
+	movdqa	%xmm6,%xmm1
+	movdqa	(%ebp),%xmm2
+	pandn	%xmm0,%xmm1
+	movdqu	(%edx),%xmm5
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+.byte	102,15,56,0,208
+	movdqa	16(%ebp),%xmm0
+.byte	102,15,56,0,193
+	pxor	%xmm5,%xmm2
+	pxor	%xmm2,%xmm0
+	addl	$16,%edx
+	leal	192(%ebp),%ebx
+	jmp	.L000enc_entry
+.align	16
+.L001enc_loop:
+	movdqa	32(%ebp),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm5,%xmm4
+	movdqa	48(%ebp),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	movdqa	64(%ebp),%xmm5
+.byte	102,15,56,0,234
+	movdqa	-64(%ebx,%ecx,1),%xmm1
+	movdqa	80(%ebp),%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm5,%xmm2
+	movdqa	(%ebx,%ecx,1),%xmm4
+	movdqa	%xmm0,%xmm3
+.byte	102,15,56,0,193
+	addl	$16,%edx
+	pxor	%xmm2,%xmm0
+.byte	102,15,56,0,220
+	addl	$16,%ecx
+	pxor	%xmm0,%xmm3
+.byte	102,15,56,0,193
+	andl	$48,%ecx
+	pxor	%xmm3,%xmm0
+	subl	$1,%eax
+.L000enc_entry:
+	movdqa	%xmm6,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+	movdqa	-32(%ebp),%xmm5
+.byte	102,15,56,0,232
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm5,%xmm3
+	movdqa	%xmm7,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm5,%xmm4
+	movdqa	%xmm7,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm7,%xmm3
+	movdqu	(%edx),%xmm5
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	jnz	.L001enc_loop
+	movdqa	96(%ebp),%xmm4
+	movdqa	112(%ebp),%xmm0
+.byte	102,15,56,0,226
+	pxor	%xmm5,%xmm4
+.byte	102,15,56,0,195
+	movdqa	64(%ebx,%ecx,1),%xmm1
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,193
+	ret
+.size	_vpaes_encrypt_core,.-_vpaes_encrypt_core
+.type	_vpaes_decrypt_core, at function
+.align	16
+_vpaes_decrypt_core:
+	movl	240(%edx),%eax
+	leal	608(%ebp),%ebx
+	movdqa	%xmm6,%xmm1
+	movdqa	-64(%ebx),%xmm2
+	pandn	%xmm0,%xmm1
+	movl	%eax,%ecx
+	psrld	$4,%xmm1
+	movdqu	(%edx),%xmm5
+	shll	$4,%ecx
+	pand	%xmm6,%xmm0
+.byte	102,15,56,0,208
+	movdqa	-48(%ebx),%xmm0
+	xorl	$48,%ecx
+.byte	102,15,56,0,193
+	andl	$48,%ecx
+	pxor	%xmm5,%xmm2
+	movdqa	176(%ebp),%xmm5
+	pxor	%xmm2,%xmm0
+	addl	$16,%edx
+	leal	-352(%ebx,%ecx,1),%ecx
+	jmp	.L002dec_entry
+.align	16
+.L003dec_loop:
+	movdqa	-32(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	-16(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	addl	$16,%edx
+.byte	102,15,56,0,197
+	movdqa	(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	16(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	subl	$1,%eax
+.byte	102,15,56,0,197
+	movdqa	32(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	48(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,197
+	movdqa	64(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	80(%ebx),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,58,15,237,12
+.L002dec_entry:
+	movdqa	%xmm6,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm6,%xmm0
+	movdqa	-32(%ebp),%xmm2
+.byte	102,15,56,0,208
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	movdqa	%xmm7,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm2,%xmm4
+	movdqa	%xmm7,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm7,%xmm3
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	movdqu	(%edx),%xmm0
+	jnz	.L003dec_loop
+	movdqa	96(%ebx),%xmm4
+.byte	102,15,56,0,226
+	pxor	%xmm0,%xmm4
+	movdqa	112(%ebx),%xmm0
+	movdqa	(%ecx),%xmm2
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+.byte	102,15,56,0,194
+	ret
+.size	_vpaes_decrypt_core,.-_vpaes_decrypt_core
+.type	_vpaes_schedule_core, at function
+.align	16
+_vpaes_schedule_core:
+	addl	(%esp),%ebp
+	movdqu	(%esi),%xmm0
+	movdqa	320(%ebp),%xmm2
+	movdqa	%xmm0,%xmm3
+	leal	(%ebp),%ebx
+	movdqa	%xmm2,4(%esp)
+	call	_vpaes_schedule_transform
+	movdqa	%xmm0,%xmm7
+	testl	%edi,%edi
+	jnz	.L004schedule_am_decrypting
+	movdqu	%xmm0,(%edx)
+	jmp	.L005schedule_go
+.L004schedule_am_decrypting:
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,217
+	movdqu	%xmm3,(%edx)
+	xorl	$48,%ecx
+.L005schedule_go:
+	cmpl	$192,%eax
+	ja	.L006schedule_256
+	je	.L007schedule_192
+.L008schedule_128:
+	movl	$10,%eax
+.L009loop_schedule_128:
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	jmp	.L009loop_schedule_128
+.align	16
+.L007schedule_192:
+	movdqu	8(%esi),%xmm0
+	call	_vpaes_schedule_transform
+	movdqa	%xmm0,%xmm6
+	pxor	%xmm4,%xmm4
+	movhlps	%xmm4,%xmm6
+	movl	$4,%eax
+.L011loop_schedule_192:
+	call	_vpaes_schedule_round
+.byte	102,15,58,15,198,8
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_192_smear
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	call	_vpaes_schedule_192_smear
+	jmp	.L011loop_schedule_192
+.align	16
+.L006schedule_256:
+	movdqu	16(%esi),%xmm0
+	call	_vpaes_schedule_transform
+	movl	$7,%eax
+.L012loop_schedule_256:
+	call	_vpaes_schedule_mangle
+	movdqa	%xmm0,%xmm6
+	call	_vpaes_schedule_round
+	decl	%eax
+	jz	.L010schedule_mangle_last
+	call	_vpaes_schedule_mangle
+	pshufd	$255,%xmm0,%xmm0
+	movdqa	%xmm7,20(%esp)
+	movdqa	%xmm6,%xmm7
+	call	.L_vpaes_schedule_low_round
+	movdqa	20(%esp),%xmm7
+	jmp	.L012loop_schedule_256
+.align	16
+.L010schedule_mangle_last:
+	leal	384(%ebp),%ebx
+	testl	%edi,%edi
+	jnz	.L013schedule_mangle_last_dec
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,193
+	leal	352(%ebp),%ebx
+	addl	$32,%edx
+.L013schedule_mangle_last_dec:
+	addl	$-16,%edx
+	pxor	336(%ebp),%xmm0
+	call	_vpaes_schedule_transform
+	movdqu	%xmm0,(%edx)
+	pxor	%xmm0,%xmm0
+	pxor	%xmm1,%xmm1
+	pxor	%xmm2,%xmm2
+	pxor	%xmm3,%xmm3
+	pxor	%xmm4,%xmm4
+	pxor	%xmm5,%xmm5
+	pxor	%xmm6,%xmm6
+	pxor	%xmm7,%xmm7
+	ret
+.size	_vpaes_schedule_core,.-_vpaes_schedule_core
+.type	_vpaes_schedule_192_smear, at function
+.align	16
+_vpaes_schedule_192_smear:
+	pshufd	$128,%xmm6,%xmm0
+	pxor	%xmm0,%xmm6
+	pshufd	$254,%xmm7,%xmm0
+	pxor	%xmm0,%xmm6
+	movdqa	%xmm6,%xmm0
+	pxor	%xmm1,%xmm1
+	movhlps	%xmm1,%xmm6
+	ret
+.size	_vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
+.type	_vpaes_schedule_round, at function
+.align	16
+_vpaes_schedule_round:
+	movdqa	8(%esp),%xmm2
+	pxor	%xmm1,%xmm1
+.byte	102,15,58,15,202,15
+.byte	102,15,58,15,210,15
+	pxor	%xmm1,%xmm7
+	pshufd	$255,%xmm0,%xmm0
+.byte	102,15,58,15,192,1
+	movdqa	%xmm2,8(%esp)
+.L_vpaes_schedule_low_round:
+	movdqa	%xmm7,%xmm1
+	pslldq	$4,%xmm7
+	pxor	%xmm1,%xmm7
+	movdqa	%xmm7,%xmm1
+	pslldq	$8,%xmm7
+	pxor	%xmm1,%xmm7
+	pxor	336(%ebp),%xmm7
+	movdqa	-16(%ebp),%xmm4
+	movdqa	-48(%ebp),%xmm5
+	movdqa	%xmm4,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm4,%xmm0
+	movdqa	-32(%ebp),%xmm2
+.byte	102,15,56,0,208
+	pxor	%xmm1,%xmm0
+	movdqa	%xmm5,%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	movdqa	%xmm5,%xmm4
+.byte	102,15,56,0,224
+	pxor	%xmm2,%xmm4
+	movdqa	%xmm5,%xmm2
+.byte	102,15,56,0,211
+	pxor	%xmm0,%xmm2
+	movdqa	%xmm5,%xmm3
+.byte	102,15,56,0,220
+	pxor	%xmm1,%xmm3
+	movdqa	32(%ebp),%xmm4
+.byte	102,15,56,0,226
+	movdqa	48(%ebp),%xmm0
+.byte	102,15,56,0,195
+	pxor	%xmm4,%xmm0
+	pxor	%xmm7,%xmm0
+	movdqa	%xmm0,%xmm7
+	ret
+.size	_vpaes_schedule_round,.-_vpaes_schedule_round
+.type	_vpaes_schedule_transform, at function
+.align	16
+_vpaes_schedule_transform:
+	movdqa	-16(%ebp),%xmm2
+	movdqa	%xmm2,%xmm1
+	pandn	%xmm0,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm2,%xmm0
+	movdqa	(%ebx),%xmm2
+.byte	102,15,56,0,208
+	movdqa	16(%ebx),%xmm0
+.byte	102,15,56,0,193
+	pxor	%xmm2,%xmm0
+	ret
+.size	_vpaes_schedule_transform,.-_vpaes_schedule_transform
+.type	_vpaes_schedule_mangle, at function
+.align	16
+_vpaes_schedule_mangle:
+	movdqa	%xmm0,%xmm4
+	movdqa	128(%ebp),%xmm5
+	testl	%edi,%edi
+	jnz	.L014schedule_mangle_dec
+	addl	$16,%edx
+	pxor	336(%ebp),%xmm4
+.byte	102,15,56,0,229
+	movdqa	%xmm4,%xmm3
+.byte	102,15,56,0,229
+	pxor	%xmm4,%xmm3
+.byte	102,15,56,0,229
+	pxor	%xmm4,%xmm3
+	jmp	.L015schedule_mangle_both
+.align	16
+.L014schedule_mangle_dec:
+	movdqa	-16(%ebp),%xmm2
+	leal	416(%ebp),%esi
+	movdqa	%xmm2,%xmm1
+	pandn	%xmm4,%xmm1
+	psrld	$4,%xmm1
+	pand	%xmm2,%xmm4
+	movdqa	(%esi),%xmm2
+.byte	102,15,56,0,212
+	movdqa	16(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	32(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	48(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	64(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	80(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+.byte	102,15,56,0,221
+	movdqa	96(%esi),%xmm2
+.byte	102,15,56,0,212
+	pxor	%xmm3,%xmm2
+	movdqa	112(%esi),%xmm3
+.byte	102,15,56,0,217
+	pxor	%xmm2,%xmm3
+	addl	$-16,%edx
+.L015schedule_mangle_both:
+	movdqa	256(%ebp,%ecx,1),%xmm1
+.byte	102,15,56,0,217
+	addl	$-16,%ecx
+	andl	$48,%ecx
+	movdqu	%xmm3,(%edx)
+	ret
+.size	_vpaes_schedule_mangle,.-_vpaes_schedule_mangle
+.globl	vpaes_set_encrypt_key
+.type	vpaes_set_encrypt_key, at function
+.align	16
+vpaes_set_encrypt_key:
+.L_vpaes_set_encrypt_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%eax
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movl	%eax,%ebx
+	shrl	$5,%ebx
+	addl	$5,%ebx
+	movl	%ebx,240(%edx)
+	movl	$48,%ecx
+	movl	$0,%edi
+	leal	.L_vpaes_consts+0x30-.L016pic_point,%ebp
+	call	_vpaes_schedule_core
+.L016pic_point:
+	movl	48(%esp),%esp
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
+.globl	vpaes_set_decrypt_key
+.type	vpaes_set_decrypt_key, at function
+.align	16
+vpaes_set_decrypt_key:
+.L_vpaes_set_decrypt_key_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%eax
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movl	%eax,%ebx
+	shrl	$5,%ebx
+	addl	$5,%ebx
+	movl	%ebx,240(%edx)
+	shll	$4,%ebx
+	leal	16(%edx,%ebx,1),%edx
+	movl	$1,%edi
+	movl	%eax,%ecx
+	shrl	$1,%ecx
+	andl	$32,%ecx
+	xorl	$32,%ecx
+	leal	.L_vpaes_consts+0x30-.L017pic_point,%ebp
+	call	_vpaes_schedule_core
+.L017pic_point:
+	movl	48(%esp),%esp
+	xorl	%eax,%eax
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
+.globl	vpaes_encrypt
+.type	vpaes_encrypt, at function
+.align	16
+vpaes_encrypt:
+.L_vpaes_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	leal	.L_vpaes_consts+0x30-.L018pic_point,%ebp
+	call	_vpaes_preheat
+.L018pic_point:
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%edi
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movdqu	(%esi),%xmm0
+	call	_vpaes_encrypt_core
+	movdqu	%xmm0,(%edi)
+	movl	48(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_encrypt,.-.L_vpaes_encrypt_begin
+.globl	vpaes_decrypt
+.type	vpaes_decrypt, at function
+.align	16
+vpaes_decrypt:
+.L_vpaes_decrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	leal	.L_vpaes_consts+0x30-.L019pic_point,%ebp
+	call	_vpaes_preheat
+.L019pic_point:
+	movl	20(%esp),%esi
+	leal	-56(%esp),%ebx
+	movl	24(%esp),%edi
+	andl	$-16,%ebx
+	movl	28(%esp),%edx
+	xchgl	%esp,%ebx
+	movl	%ebx,48(%esp)
+	movdqu	(%esi),%xmm0
+	call	_vpaes_decrypt_core
+	movdqu	%xmm0,(%edi)
+	movl	48(%esp),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_decrypt,.-.L_vpaes_decrypt_begin
+.globl	vpaes_cbc_encrypt
+.type	vpaes_cbc_encrypt, at function
+.align	16
+vpaes_cbc_encrypt:
+.L_vpaes_cbc_encrypt_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%eax
+	movl	32(%esp),%edx
+	subl	$16,%eax
+	jc	.L020cbc_abort
+	leal	-56(%esp),%ebx
+	movl	36(%esp),%ebp
+	andl	$-16,%ebx
+	movl	40(%esp),%ecx
+	xchgl	%esp,%ebx
+	movdqu	(%ebp),%xmm1
+	subl	%esi,%edi
+	movl	%ebx,48(%esp)
+	movl	%edi,(%esp)
+	movl	%edx,4(%esp)
+	movl	%ebp,8(%esp)
+	movl	%eax,%edi
+	leal	.L_vpaes_consts+0x30-.L021pic_point,%ebp
+	call	_vpaes_preheat
+.L021pic_point:
+	cmpl	$0,%ecx
+	je	.L022cbc_dec_loop
+	jmp	.L023cbc_enc_loop
+.align	16
+.L023cbc_enc_loop:
+	movdqu	(%esi),%xmm0
+	pxor	%xmm1,%xmm0
+	call	_vpaes_encrypt_core
+	movl	(%esp),%ebx
+	movl	4(%esp),%edx
+	movdqa	%xmm0,%xmm1
+	movdqu	%xmm0,(%ebx,%esi,1)
+	leal	16(%esi),%esi
+	subl	$16,%edi
+	jnc	.L023cbc_enc_loop
+	jmp	.L024cbc_done
+.align	16
+.L022cbc_dec_loop:
+	movdqu	(%esi),%xmm0
+	movdqa	%xmm1,16(%esp)
+	movdqa	%xmm0,32(%esp)
+	call	_vpaes_decrypt_core
+	movl	(%esp),%ebx
+	movl	4(%esp),%edx
+	pxor	16(%esp),%xmm0
+	movdqa	32(%esp),%xmm1
+	movdqu	%xmm0,(%ebx,%esi,1)
+	leal	16(%esi),%esi
+	subl	$16,%edi
+	jnc	.L022cbc_dec_loop
+.L024cbc_done:
+	movl	8(%esp),%ebx
+	movl	48(%esp),%esp
+	movdqu	%xmm1,(%ebx)
+.L020cbc_abort:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/vpaes-x86.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/vpaes-x86.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/vpaes-x86.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/vpaes-x86.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,662 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/vpaes-x86.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"vpaes-x86.s"
-.text
-.align	64
-.L_vpaes_consts:
-.long	218628480,235210255,168496130,67568393
-.long	252381056,17041926,33884169,51187212
-.long	252645135,252645135,252645135,252645135
-.long	1512730624,3266504856,1377990664,3401244816
-.long	830229760,1275146365,2969422977,3447763452
-.long	3411033600,2979783055,338359620,2782886510
-.long	4209124096,907596821,221174255,1006095553
-.long	191964160,3799684038,3164090317,1589111125
-.long	182528256,1777043520,2877432650,3265356744
-.long	1874708224,3503451415,3305285752,363511674
-.long	1606117888,3487855781,1093350906,2384367825
-.long	197121,67569157,134941193,202313229
-.long	67569157,134941193,202313229,197121
-.long	134941193,202313229,197121,67569157
-.long	202313229,197121,67569157,134941193
-.long	33619971,100992007,168364043,235736079
-.long	235736079,33619971,100992007,168364043
-.long	168364043,235736079,33619971,100992007
-.long	100992007,168364043,235736079,33619971
-.long	50462976,117835012,185207048,252579084
-.long	252314880,51251460,117574920,184942860
-.long	184682752,252054788,50987272,118359308
-.long	118099200,185467140,251790600,50727180
-.long	2946363062,528716217,1300004225,1881839624
-.long	1532713819,1532713819,1532713819,1532713819
-.long	3602276352,4288629033,3737020424,4153884961
-.long	1354558464,32357713,2958822624,3775749553
-.long	1201988352,132424512,1572796698,503232858
-.long	2213177600,1597421020,4103937655,675398315
-.long	2749646592,4273543773,1511898873,121693092
-.long	3040248576,1103263732,2871565598,1608280554
-.long	2236667136,2588920351,482954393,64377734
-.long	3069987328,291237287,2117370568,3650299247
-.long	533321216,3573750986,2572112006,1401264716
-.long	1339849704,2721158661,548607111,3445553514
-.long	2128193280,3054596040,2183486460,1257083700
-.long	655635200,1165381986,3923443150,2344132524
-.long	190078720,256924420,290342170,357187870
-.long	1610966272,2263057382,4103205268,309794674
-.long	2592527872,2233205587,1335446729,3402964816
-.long	3973531904,3225098121,3002836325,1918774430
-.long	3870401024,2102906079,2284471353,4117666579
-.long	617007872,1021508343,366931923,691083277
-.long	2528395776,3491914898,2968704004,1613121270
-.long	3445188352,3247741094,844474987,4093578302
-.long	651481088,1190302358,1689581232,574775300
-.long	4289380608,206939853,2555985458,2489840491
-.long	2130264064,327674451,3566485037,3349835193
-.long	2470714624,316102159,3636825756,3393945945
-.byte	86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
-.byte	111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
-.byte	83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
-.byte	114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
-.byte	118,101,114,115,105,116,121,41,0
-.align	64
-.type	_vpaes_preheat, at function
-.align	16
-_vpaes_preheat:
-	addl	(%esp),%ebp
-	movdqa	-48(%ebp),%xmm7
-	movdqa	-16(%ebp),%xmm6
-	ret
-.size	_vpaes_preheat,.-_vpaes_preheat
-.type	_vpaes_encrypt_core, at function
-.align	16
-_vpaes_encrypt_core:
-	movl	$16,%ecx
-	movl	240(%edx),%eax
-	movdqa	%xmm6,%xmm1
-	movdqa	(%ebp),%xmm2
-	pandn	%xmm0,%xmm1
-	movdqu	(%edx),%xmm5
-	psrld	$4,%xmm1
-	pand	%xmm6,%xmm0
-.byte	102,15,56,0,208
-	movdqa	16(%ebp),%xmm0
-.byte	102,15,56,0,193
-	pxor	%xmm5,%xmm2
-	pxor	%xmm2,%xmm0
-	addl	$16,%edx
-	leal	192(%ebp),%ebx
-	jmp	.L000enc_entry
-.align	16
-.L001enc_loop:
-	movdqa	32(%ebp),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm5,%xmm4
-	movdqa	48(%ebp),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-	movdqa	64(%ebp),%xmm5
-.byte	102,15,56,0,234
-	movdqa	-64(%ebx,%ecx,1),%xmm1
-	movdqa	80(%ebp),%xmm2
-.byte	102,15,56,0,211
-	pxor	%xmm5,%xmm2
-	movdqa	(%ebx,%ecx,1),%xmm4
-	movdqa	%xmm0,%xmm3
-.byte	102,15,56,0,193
-	addl	$16,%edx
-	pxor	%xmm2,%xmm0
-.byte	102,15,56,0,220
-	addl	$16,%ecx
-	pxor	%xmm0,%xmm3
-.byte	102,15,56,0,193
-	andl	$48,%ecx
-	pxor	%xmm3,%xmm0
-	subl	$1,%eax
-.L000enc_entry:
-	movdqa	%xmm6,%xmm1
-	pandn	%xmm0,%xmm1
-	psrld	$4,%xmm1
-	pand	%xmm6,%xmm0
-	movdqa	-32(%ebp),%xmm5
-.byte	102,15,56,0,232
-	pxor	%xmm1,%xmm0
-	movdqa	%xmm7,%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm5,%xmm3
-	movdqa	%xmm7,%xmm4
-.byte	102,15,56,0,224
-	pxor	%xmm5,%xmm4
-	movdqa	%xmm7,%xmm2
-.byte	102,15,56,0,211
-	pxor	%xmm0,%xmm2
-	movdqa	%xmm7,%xmm3
-	movdqu	(%edx),%xmm5
-.byte	102,15,56,0,220
-	pxor	%xmm1,%xmm3
-	jnz	.L001enc_loop
-	movdqa	96(%ebp),%xmm4
-	movdqa	112(%ebp),%xmm0
-.byte	102,15,56,0,226
-	pxor	%xmm5,%xmm4
-.byte	102,15,56,0,195
-	movdqa	64(%ebx,%ecx,1),%xmm1
-	pxor	%xmm4,%xmm0
-.byte	102,15,56,0,193
-	ret
-.size	_vpaes_encrypt_core,.-_vpaes_encrypt_core
-.type	_vpaes_decrypt_core, at function
-.align	16
-_vpaes_decrypt_core:
-	movl	240(%edx),%eax
-	leal	608(%ebp),%ebx
-	movdqa	%xmm6,%xmm1
-	movdqa	-64(%ebx),%xmm2
-	pandn	%xmm0,%xmm1
-	movl	%eax,%ecx
-	psrld	$4,%xmm1
-	movdqu	(%edx),%xmm5
-	shll	$4,%ecx
-	pand	%xmm6,%xmm0
-.byte	102,15,56,0,208
-	movdqa	-48(%ebx),%xmm0
-	xorl	$48,%ecx
-.byte	102,15,56,0,193
-	andl	$48,%ecx
-	pxor	%xmm5,%xmm2
-	movdqa	176(%ebp),%xmm5
-	pxor	%xmm2,%xmm0
-	addl	$16,%edx
-	leal	-352(%ebx,%ecx,1),%ecx
-	jmp	.L002dec_entry
-.align	16
-.L003dec_loop:
-	movdqa	-32(%ebx),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm0,%xmm4
-	movdqa	-16(%ebx),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-	addl	$16,%edx
-.byte	102,15,56,0,197
-	movdqa	(%ebx),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm0,%xmm4
-	movdqa	16(%ebx),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-	subl	$1,%eax
-.byte	102,15,56,0,197
-	movdqa	32(%ebx),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm0,%xmm4
-	movdqa	48(%ebx),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-.byte	102,15,56,0,197
-	movdqa	64(%ebx),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm0,%xmm4
-	movdqa	80(%ebx),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-.byte	102,15,58,15,237,12
-.L002dec_entry:
-	movdqa	%xmm6,%xmm1
-	pandn	%xmm0,%xmm1
-	psrld	$4,%xmm1
-	pand	%xmm6,%xmm0
-	movdqa	-32(%ebp),%xmm2
-.byte	102,15,56,0,208
-	pxor	%xmm1,%xmm0
-	movdqa	%xmm7,%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-	movdqa	%xmm7,%xmm4
-.byte	102,15,56,0,224
-	pxor	%xmm2,%xmm4
-	movdqa	%xmm7,%xmm2
-.byte	102,15,56,0,211
-	pxor	%xmm0,%xmm2
-	movdqa	%xmm7,%xmm3
-.byte	102,15,56,0,220
-	pxor	%xmm1,%xmm3
-	movdqu	(%edx),%xmm0
-	jnz	.L003dec_loop
-	movdqa	96(%ebx),%xmm4
-.byte	102,15,56,0,226
-	pxor	%xmm0,%xmm4
-	movdqa	112(%ebx),%xmm0
-	movdqa	(%ecx),%xmm2
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-.byte	102,15,56,0,194
-	ret
-.size	_vpaes_decrypt_core,.-_vpaes_decrypt_core
-.type	_vpaes_schedule_core, at function
-.align	16
-_vpaes_schedule_core:
-	addl	(%esp),%ebp
-	movdqu	(%esi),%xmm0
-	movdqa	320(%ebp),%xmm2
-	movdqa	%xmm0,%xmm3
-	leal	(%ebp),%ebx
-	movdqa	%xmm2,4(%esp)
-	call	_vpaes_schedule_transform
-	movdqa	%xmm0,%xmm7
-	testl	%edi,%edi
-	jnz	.L004schedule_am_decrypting
-	movdqu	%xmm0,(%edx)
-	jmp	.L005schedule_go
-.L004schedule_am_decrypting:
-	movdqa	256(%ebp,%ecx,1),%xmm1
-.byte	102,15,56,0,217
-	movdqu	%xmm3,(%edx)
-	xorl	$48,%ecx
-.L005schedule_go:
-	cmpl	$192,%eax
-	ja	.L006schedule_256
-	je	.L007schedule_192
-.L008schedule_128:
-	movl	$10,%eax
-.L009loop_schedule_128:
-	call	_vpaes_schedule_round
-	decl	%eax
-	jz	.L010schedule_mangle_last
-	call	_vpaes_schedule_mangle
-	jmp	.L009loop_schedule_128
-.align	16
-.L007schedule_192:
-	movdqu	8(%esi),%xmm0
-	call	_vpaes_schedule_transform
-	movdqa	%xmm0,%xmm6
-	pxor	%xmm4,%xmm4
-	movhlps	%xmm4,%xmm6
-	movl	$4,%eax
-.L011loop_schedule_192:
-	call	_vpaes_schedule_round
-.byte	102,15,58,15,198,8
-	call	_vpaes_schedule_mangle
-	call	_vpaes_schedule_192_smear
-	call	_vpaes_schedule_mangle
-	call	_vpaes_schedule_round
-	decl	%eax
-	jz	.L010schedule_mangle_last
-	call	_vpaes_schedule_mangle
-	call	_vpaes_schedule_192_smear
-	jmp	.L011loop_schedule_192
-.align	16
-.L006schedule_256:
-	movdqu	16(%esi),%xmm0
-	call	_vpaes_schedule_transform
-	movl	$7,%eax
-.L012loop_schedule_256:
-	call	_vpaes_schedule_mangle
-	movdqa	%xmm0,%xmm6
-	call	_vpaes_schedule_round
-	decl	%eax
-	jz	.L010schedule_mangle_last
-	call	_vpaes_schedule_mangle
-	pshufd	$255,%xmm0,%xmm0
-	movdqa	%xmm7,20(%esp)
-	movdqa	%xmm6,%xmm7
-	call	.L_vpaes_schedule_low_round
-	movdqa	20(%esp),%xmm7
-	jmp	.L012loop_schedule_256
-.align	16
-.L010schedule_mangle_last:
-	leal	384(%ebp),%ebx
-	testl	%edi,%edi
-	jnz	.L013schedule_mangle_last_dec
-	movdqa	256(%ebp,%ecx,1),%xmm1
-.byte	102,15,56,0,193
-	leal	352(%ebp),%ebx
-	addl	$32,%edx
-.L013schedule_mangle_last_dec:
-	addl	$-16,%edx
-	pxor	336(%ebp),%xmm0
-	call	_vpaes_schedule_transform
-	movdqu	%xmm0,(%edx)
-	pxor	%xmm0,%xmm0
-	pxor	%xmm1,%xmm1
-	pxor	%xmm2,%xmm2
-	pxor	%xmm3,%xmm3
-	pxor	%xmm4,%xmm4
-	pxor	%xmm5,%xmm5
-	pxor	%xmm6,%xmm6
-	pxor	%xmm7,%xmm7
-	ret
-.size	_vpaes_schedule_core,.-_vpaes_schedule_core
-.type	_vpaes_schedule_192_smear, at function
-.align	16
-_vpaes_schedule_192_smear:
-	pshufd	$128,%xmm6,%xmm0
-	pxor	%xmm0,%xmm6
-	pshufd	$254,%xmm7,%xmm0
-	pxor	%xmm0,%xmm6
-	movdqa	%xmm6,%xmm0
-	pxor	%xmm1,%xmm1
-	movhlps	%xmm1,%xmm6
-	ret
-.size	_vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
-.type	_vpaes_schedule_round, at function
-.align	16
-_vpaes_schedule_round:
-	movdqa	8(%esp),%xmm2
-	pxor	%xmm1,%xmm1
-.byte	102,15,58,15,202,15
-.byte	102,15,58,15,210,15
-	pxor	%xmm1,%xmm7
-	pshufd	$255,%xmm0,%xmm0
-.byte	102,15,58,15,192,1
-	movdqa	%xmm2,8(%esp)
-.L_vpaes_schedule_low_round:
-	movdqa	%xmm7,%xmm1
-	pslldq	$4,%xmm7
-	pxor	%xmm1,%xmm7
-	movdqa	%xmm7,%xmm1
-	pslldq	$8,%xmm7
-	pxor	%xmm1,%xmm7
-	pxor	336(%ebp),%xmm7
-	movdqa	-16(%ebp),%xmm4
-	movdqa	-48(%ebp),%xmm5
-	movdqa	%xmm4,%xmm1
-	pandn	%xmm0,%xmm1
-	psrld	$4,%xmm1
-	pand	%xmm4,%xmm0
-	movdqa	-32(%ebp),%xmm2
-.byte	102,15,56,0,208
-	pxor	%xmm1,%xmm0
-	movdqa	%xmm5,%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-	movdqa	%xmm5,%xmm4
-.byte	102,15,56,0,224
-	pxor	%xmm2,%xmm4
-	movdqa	%xmm5,%xmm2
-.byte	102,15,56,0,211
-	pxor	%xmm0,%xmm2
-	movdqa	%xmm5,%xmm3
-.byte	102,15,56,0,220
-	pxor	%xmm1,%xmm3
-	movdqa	32(%ebp),%xmm4
-.byte	102,15,56,0,226
-	movdqa	48(%ebp),%xmm0
-.byte	102,15,56,0,195
-	pxor	%xmm4,%xmm0
-	pxor	%xmm7,%xmm0
-	movdqa	%xmm0,%xmm7
-	ret
-.size	_vpaes_schedule_round,.-_vpaes_schedule_round
-.type	_vpaes_schedule_transform, at function
-.align	16
-_vpaes_schedule_transform:
-	movdqa	-16(%ebp),%xmm2
-	movdqa	%xmm2,%xmm1
-	pandn	%xmm0,%xmm1
-	psrld	$4,%xmm1
-	pand	%xmm2,%xmm0
-	movdqa	(%ebx),%xmm2
-.byte	102,15,56,0,208
-	movdqa	16(%ebx),%xmm0
-.byte	102,15,56,0,193
-	pxor	%xmm2,%xmm0
-	ret
-.size	_vpaes_schedule_transform,.-_vpaes_schedule_transform
-.type	_vpaes_schedule_mangle, at function
-.align	16
-_vpaes_schedule_mangle:
-	movdqa	%xmm0,%xmm4
-	movdqa	128(%ebp),%xmm5
-	testl	%edi,%edi
-	jnz	.L014schedule_mangle_dec
-	addl	$16,%edx
-	pxor	336(%ebp),%xmm4
-.byte	102,15,56,0,229
-	movdqa	%xmm4,%xmm3
-.byte	102,15,56,0,229
-	pxor	%xmm4,%xmm3
-.byte	102,15,56,0,229
-	pxor	%xmm4,%xmm3
-	jmp	.L015schedule_mangle_both
-.align	16
-.L014schedule_mangle_dec:
-	movdqa	-16(%ebp),%xmm2
-	leal	416(%ebp),%esi
-	movdqa	%xmm2,%xmm1
-	pandn	%xmm4,%xmm1
-	psrld	$4,%xmm1
-	pand	%xmm2,%xmm4
-	movdqa	(%esi),%xmm2
-.byte	102,15,56,0,212
-	movdqa	16(%esi),%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-.byte	102,15,56,0,221
-	movdqa	32(%esi),%xmm2
-.byte	102,15,56,0,212
-	pxor	%xmm3,%xmm2
-	movdqa	48(%esi),%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-.byte	102,15,56,0,221
-	movdqa	64(%esi),%xmm2
-.byte	102,15,56,0,212
-	pxor	%xmm3,%xmm2
-	movdqa	80(%esi),%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-.byte	102,15,56,0,221
-	movdqa	96(%esi),%xmm2
-.byte	102,15,56,0,212
-	pxor	%xmm3,%xmm2
-	movdqa	112(%esi),%xmm3
-.byte	102,15,56,0,217
-	pxor	%xmm2,%xmm3
-	addl	$-16,%edx
-.L015schedule_mangle_both:
-	movdqa	256(%ebp,%ecx,1),%xmm1
-.byte	102,15,56,0,217
-	addl	$-16,%ecx
-	andl	$48,%ecx
-	movdqu	%xmm3,(%edx)
-	ret
-.size	_vpaes_schedule_mangle,.-_vpaes_schedule_mangle
-.globl	vpaes_set_encrypt_key
-.type	vpaes_set_encrypt_key, at function
-.align	16
-vpaes_set_encrypt_key:
-.L_vpaes_set_encrypt_key_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	leal	-56(%esp),%ebx
-	movl	24(%esp),%eax
-	andl	$-16,%ebx
-	movl	28(%esp),%edx
-	xchgl	%esp,%ebx
-	movl	%ebx,48(%esp)
-	movl	%eax,%ebx
-	shrl	$5,%ebx
-	addl	$5,%ebx
-	movl	%ebx,240(%edx)
-	movl	$48,%ecx
-	movl	$0,%edi
-	leal	.L_vpaes_consts+0x30-.L016pic_point,%ebp
-	call	_vpaes_schedule_core
-.L016pic_point:
-	movl	48(%esp),%esp
-	xorl	%eax,%eax
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
-.globl	vpaes_set_decrypt_key
-.type	vpaes_set_decrypt_key, at function
-.align	16
-vpaes_set_decrypt_key:
-.L_vpaes_set_decrypt_key_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	leal	-56(%esp),%ebx
-	movl	24(%esp),%eax
-	andl	$-16,%ebx
-	movl	28(%esp),%edx
-	xchgl	%esp,%ebx
-	movl	%ebx,48(%esp)
-	movl	%eax,%ebx
-	shrl	$5,%ebx
-	addl	$5,%ebx
-	movl	%ebx,240(%edx)
-	shll	$4,%ebx
-	leal	16(%edx,%ebx,1),%edx
-	movl	$1,%edi
-	movl	%eax,%ecx
-	shrl	$1,%ecx
-	andl	$32,%ecx
-	xorl	$32,%ecx
-	leal	.L_vpaes_consts+0x30-.L017pic_point,%ebp
-	call	_vpaes_schedule_core
-.L017pic_point:
-	movl	48(%esp),%esp
-	xorl	%eax,%eax
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
-.globl	vpaes_encrypt
-.type	vpaes_encrypt, at function
-.align	16
-vpaes_encrypt:
-.L_vpaes_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	leal	.L_vpaes_consts+0x30-.L018pic_point,%ebp
-	call	_vpaes_preheat
-.L018pic_point:
-	movl	20(%esp),%esi
-	leal	-56(%esp),%ebx
-	movl	24(%esp),%edi
-	andl	$-16,%ebx
-	movl	28(%esp),%edx
-	xchgl	%esp,%ebx
-	movl	%ebx,48(%esp)
-	movdqu	(%esi),%xmm0
-	call	_vpaes_encrypt_core
-	movdqu	%xmm0,(%edi)
-	movl	48(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	vpaes_encrypt,.-.L_vpaes_encrypt_begin
-.globl	vpaes_decrypt
-.type	vpaes_decrypt, at function
-.align	16
-vpaes_decrypt:
-.L_vpaes_decrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	leal	.L_vpaes_consts+0x30-.L019pic_point,%ebp
-	call	_vpaes_preheat
-.L019pic_point:
-	movl	20(%esp),%esi
-	leal	-56(%esp),%ebx
-	movl	24(%esp),%edi
-	andl	$-16,%ebx
-	movl	28(%esp),%edx
-	xchgl	%esp,%ebx
-	movl	%ebx,48(%esp)
-	movdqu	(%esi),%xmm0
-	call	_vpaes_decrypt_core
-	movdqu	%xmm0,(%edi)
-	movl	48(%esp),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	vpaes_decrypt,.-.L_vpaes_decrypt_begin
-.globl	vpaes_cbc_encrypt
-.type	vpaes_cbc_encrypt, at function
-.align	16
-vpaes_cbc_encrypt:
-.L_vpaes_cbc_encrypt_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%eax
-	movl	32(%esp),%edx
-	subl	$16,%eax
-	jc	.L020cbc_abort
-	leal	-56(%esp),%ebx
-	movl	36(%esp),%ebp
-	andl	$-16,%ebx
-	movl	40(%esp),%ecx
-	xchgl	%esp,%ebx
-	movdqu	(%ebp),%xmm1
-	subl	%esi,%edi
-	movl	%ebx,48(%esp)
-	movl	%edi,(%esp)
-	movl	%edx,4(%esp)
-	movl	%ebp,8(%esp)
-	movl	%eax,%edi
-	leal	.L_vpaes_consts+0x30-.L021pic_point,%ebp
-	call	_vpaes_preheat
-.L021pic_point:
-	cmpl	$0,%ecx
-	je	.L022cbc_dec_loop
-	jmp	.L023cbc_enc_loop
-.align	16
-.L023cbc_enc_loop:
-	movdqu	(%esi),%xmm0
-	pxor	%xmm1,%xmm0
-	call	_vpaes_encrypt_core
-	movl	(%esp),%ebx
-	movl	4(%esp),%edx
-	movdqa	%xmm0,%xmm1
-	movdqu	%xmm0,(%ebx,%esi,1)
-	leal	16(%esi),%esi
-	subl	$16,%edi
-	jnc	.L023cbc_enc_loop
-	jmp	.L024cbc_done
-.align	16
-.L022cbc_dec_loop:
-	movdqu	(%esi),%xmm0
-	movdqa	%xmm1,16(%esp)
-	movdqa	%xmm0,32(%esp)
-	call	_vpaes_decrypt_core
-	movl	(%esp),%ebx
-	movl	4(%esp),%edx
-	pxor	16(%esp),%xmm0
-	movdqa	32(%esp),%xmm1
-	movdqu	%xmm0,(%ebx,%esi,1)
-	leal	16(%esi),%esi
-	subl	$16,%edi
-	jnc	.L022cbc_dec_loop
-.L024cbc_done:
-	movl	8(%esp),%ebx
-	movl	48(%esp),%esp
-	movdqu	%xmm1,(%ebx)
-.L020cbc_abort:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin

Added: trunk/secure/lib/libcrypto/i386/wp-mmx.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/wp-mmx.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/wp-mmx.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,2216 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/wp-mmx.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from wp-mmx.pl.
+#ifdef PIC
+.file	"wp-mmx.S"
+.text
+.globl	whirlpool_block_mmx
+.type	whirlpool_block_mmx, at function
+.align	16
+whirlpool_block_mmx:
+.L_whirlpool_block_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%ebp
+	movl	%esp,%eax
+	subl	$148,%esp
+	andl	$-64,%esp
+	leal	128(%esp),%ebx
+	movl	%esi,(%ebx)
+	movl	%edi,4(%ebx)
+	movl	%ebp,8(%ebx)
+	movl	%eax,16(%ebx)
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001table-.L000pic_point(%ebp),%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm1
+	movq	16(%esi),%mm2
+	movq	24(%esi),%mm3
+	movq	32(%esi),%mm4
+	movq	40(%esi),%mm5
+	movq	48(%esi),%mm6
+	movq	56(%esi),%mm7
+.L002outerloop:
+	movq	%mm0,(%esp)
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm4,32(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm1
+	pxor	16(%edi),%mm2
+	pxor	24(%edi),%mm3
+	pxor	32(%edi),%mm4
+	pxor	40(%edi),%mm5
+	pxor	48(%edi),%mm6
+	pxor	56(%edi),%mm7
+	movq	%mm0,64(%esp)
+	movq	%mm1,72(%esp)
+	movq	%mm2,80(%esp)
+	movq	%mm3,88(%esp)
+	movq	%mm4,96(%esp)
+	movq	%mm5,104(%esp)
+	movq	%mm6,112(%esp)
+	movq	%mm7,120(%esp)
+	xorl	%esi,%esi
+	movl	%esi,12(%ebx)
+.align	16
+.L003round:
+	movq	4096(%ebp,%esi,8),%mm0
+	movl	(%esp),%eax
+	movl	4(%esp),%ebx
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm0
+	movq	7(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	8(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	movq	6(%ebp,%esi,8),%mm2
+	movq	5(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	movq	4(%ebp,%esi,8),%mm4
+	movq	3(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	12(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	movq	2(%ebp,%esi,8),%mm6
+	movq	1(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm1
+	pxor	7(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	16(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm3
+	pxor	5(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm5
+	pxor	3(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	20(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm7
+	pxor	1(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm2
+	pxor	7(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	24(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm4
+	pxor	5(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm6
+	pxor	3(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	28(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm0
+	pxor	1(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm3
+	pxor	7(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	32(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm5
+	pxor	5(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm7
+	pxor	3(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	36(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm1
+	pxor	1(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm4
+	pxor	7(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	40(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm6
+	pxor	5(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm0
+	pxor	3(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	44(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm2
+	pxor	1(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm5
+	pxor	7(%ebp,%edi,8),%mm6
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	48(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm7
+	pxor	5(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm1
+	pxor	3(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	52(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm3
+	pxor	1(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm6
+	pxor	7(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	56(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm0
+	pxor	5(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm2
+	pxor	3(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	60(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm4
+	pxor	1(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm7
+	pxor	7(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	64(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm1
+	pxor	5(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm3
+	pxor	3(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	68(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm5
+	pxor	1(%ebp,%edi,8),%mm6
+	movq	%mm0,(%esp)
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm4,32(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm0
+	pxor	7(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	72(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm2
+	pxor	5(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm4
+	pxor	3(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	76(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm6
+	pxor	1(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm1
+	pxor	7(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	80(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm3
+	pxor	5(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm5
+	pxor	3(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	84(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm7
+	pxor	1(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm2
+	pxor	7(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	88(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm4
+	pxor	5(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm6
+	pxor	3(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	92(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm0
+	pxor	1(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm3
+	pxor	7(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	96(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm5
+	pxor	5(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm7
+	pxor	3(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	100(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm1
+	pxor	1(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm4
+	pxor	7(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	104(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm6
+	pxor	5(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm0
+	pxor	3(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	108(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm2
+	pxor	1(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm5
+	pxor	7(%ebp,%edi,8),%mm6
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	112(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm7
+	pxor	5(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm1
+	pxor	3(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	116(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm3
+	pxor	1(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm6
+	pxor	7(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	120(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm0
+	pxor	5(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm2
+	pxor	3(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	124(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm4
+	pxor	1(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm7
+	pxor	7(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm1
+	pxor	5(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm3
+	pxor	3(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm5
+	pxor	1(%ebp,%edi,8),%mm6
+	leal	128(%esp),%ebx
+	movl	12(%ebx),%esi
+	addl	$1,%esi
+	cmpl	$10,%esi
+	je	.L004roundsdone
+	movl	%esi,12(%ebx)
+	movq	%mm0,64(%esp)
+	movq	%mm1,72(%esp)
+	movq	%mm2,80(%esp)
+	movq	%mm3,88(%esp)
+	movq	%mm4,96(%esp)
+	movq	%mm5,104(%esp)
+	movq	%mm6,112(%esp)
+	movq	%mm7,120(%esp)
+	jmp	.L003round
+.align	16
+.L004roundsdone:
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	movl	8(%ebx),%eax
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm1
+	pxor	16(%edi),%mm2
+	pxor	24(%edi),%mm3
+	pxor	32(%edi),%mm4
+	pxor	40(%edi),%mm5
+	pxor	48(%edi),%mm6
+	pxor	56(%edi),%mm7
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm1
+	pxor	16(%esi),%mm2
+	pxor	24(%esi),%mm3
+	pxor	32(%esi),%mm4
+	pxor	40(%esi),%mm5
+	pxor	48(%esi),%mm6
+	pxor	56(%esi),%mm7
+	movq	%mm0,(%esi)
+	movq	%mm1,8(%esi)
+	movq	%mm2,16(%esi)
+	movq	%mm3,24(%esi)
+	movq	%mm4,32(%esi)
+	movq	%mm5,40(%esi)
+	movq	%mm6,48(%esi)
+	movq	%mm7,56(%esi)
+	leal	64(%edi),%edi
+	subl	$1,%eax
+	jz	.L005alldone
+	movl	%edi,4(%ebx)
+	movl	%eax,8(%ebx)
+	jmp	.L002outerloop
+.L005alldone:
+	emms
+	movl	16(%ebx),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001table:
+.byte	24,24,96,24,192,120,48,216
+.byte	24,24,96,24,192,120,48,216
+.byte	35,35,140,35,5,175,70,38
+.byte	35,35,140,35,5,175,70,38
+.byte	198,198,63,198,126,249,145,184
+.byte	198,198,63,198,126,249,145,184
+.byte	232,232,135,232,19,111,205,251
+.byte	232,232,135,232,19,111,205,251
+.byte	135,135,38,135,76,161,19,203
+.byte	135,135,38,135,76,161,19,203
+.byte	184,184,218,184,169,98,109,17
+.byte	184,184,218,184,169,98,109,17
+.byte	1,1,4,1,8,5,2,9
+.byte	1,1,4,1,8,5,2,9
+.byte	79,79,33,79,66,110,158,13
+.byte	79,79,33,79,66,110,158,13
+.byte	54,54,216,54,173,238,108,155
+.byte	54,54,216,54,173,238,108,155
+.byte	166,166,162,166,89,4,81,255
+.byte	166,166,162,166,89,4,81,255
+.byte	210,210,111,210,222,189,185,12
+.byte	210,210,111,210,222,189,185,12
+.byte	245,245,243,245,251,6,247,14
+.byte	245,245,243,245,251,6,247,14
+.byte	121,121,249,121,239,128,242,150
+.byte	121,121,249,121,239,128,242,150
+.byte	111,111,161,111,95,206,222,48
+.byte	111,111,161,111,95,206,222,48
+.byte	145,145,126,145,252,239,63,109
+.byte	145,145,126,145,252,239,63,109
+.byte	82,82,85,82,170,7,164,248
+.byte	82,82,85,82,170,7,164,248
+.byte	96,96,157,96,39,253,192,71
+.byte	96,96,157,96,39,253,192,71
+.byte	188,188,202,188,137,118,101,53
+.byte	188,188,202,188,137,118,101,53
+.byte	155,155,86,155,172,205,43,55
+.byte	155,155,86,155,172,205,43,55
+.byte	142,142,2,142,4,140,1,138
+.byte	142,142,2,142,4,140,1,138
+.byte	163,163,182,163,113,21,91,210
+.byte	163,163,182,163,113,21,91,210
+.byte	12,12,48,12,96,60,24,108
+.byte	12,12,48,12,96,60,24,108
+.byte	123,123,241,123,255,138,246,132
+.byte	123,123,241,123,255,138,246,132
+.byte	53,53,212,53,181,225,106,128
+.byte	53,53,212,53,181,225,106,128
+.byte	29,29,116,29,232,105,58,245
+.byte	29,29,116,29,232,105,58,245
+.byte	224,224,167,224,83,71,221,179
+.byte	224,224,167,224,83,71,221,179
+.byte	215,215,123,215,246,172,179,33
+.byte	215,215,123,215,246,172,179,33
+.byte	194,194,47,194,94,237,153,156
+.byte	194,194,47,194,94,237,153,156
+.byte	46,46,184,46,109,150,92,67
+.byte	46,46,184,46,109,150,92,67
+.byte	75,75,49,75,98,122,150,41
+.byte	75,75,49,75,98,122,150,41
+.byte	254,254,223,254,163,33,225,93
+.byte	254,254,223,254,163,33,225,93
+.byte	87,87,65,87,130,22,174,213
+.byte	87,87,65,87,130,22,174,213
+.byte	21,21,84,21,168,65,42,189
+.byte	21,21,84,21,168,65,42,189
+.byte	119,119,193,119,159,182,238,232
+.byte	119,119,193,119,159,182,238,232
+.byte	55,55,220,55,165,235,110,146
+.byte	55,55,220,55,165,235,110,146
+.byte	229,229,179,229,123,86,215,158
+.byte	229,229,179,229,123,86,215,158
+.byte	159,159,70,159,140,217,35,19
+.byte	159,159,70,159,140,217,35,19
+.byte	240,240,231,240,211,23,253,35
+.byte	240,240,231,240,211,23,253,35
+.byte	74,74,53,74,106,127,148,32
+.byte	74,74,53,74,106,127,148,32
+.byte	218,218,79,218,158,149,169,68
+.byte	218,218,79,218,158,149,169,68
+.byte	88,88,125,88,250,37,176,162
+.byte	88,88,125,88,250,37,176,162
+.byte	201,201,3,201,6,202,143,207
+.byte	201,201,3,201,6,202,143,207
+.byte	41,41,164,41,85,141,82,124
+.byte	41,41,164,41,85,141,82,124
+.byte	10,10,40,10,80,34,20,90
+.byte	10,10,40,10,80,34,20,90
+.byte	177,177,254,177,225,79,127,80
+.byte	177,177,254,177,225,79,127,80
+.byte	160,160,186,160,105,26,93,201
+.byte	160,160,186,160,105,26,93,201
+.byte	107,107,177,107,127,218,214,20
+.byte	107,107,177,107,127,218,214,20
+.byte	133,133,46,133,92,171,23,217
+.byte	133,133,46,133,92,171,23,217
+.byte	189,189,206,189,129,115,103,60
+.byte	189,189,206,189,129,115,103,60
+.byte	93,93,105,93,210,52,186,143
+.byte	93,93,105,93,210,52,186,143
+.byte	16,16,64,16,128,80,32,144
+.byte	16,16,64,16,128,80,32,144
+.byte	244,244,247,244,243,3,245,7
+.byte	244,244,247,244,243,3,245,7
+.byte	203,203,11,203,22,192,139,221
+.byte	203,203,11,203,22,192,139,221
+.byte	62,62,248,62,237,198,124,211
+.byte	62,62,248,62,237,198,124,211
+.byte	5,5,20,5,40,17,10,45
+.byte	5,5,20,5,40,17,10,45
+.byte	103,103,129,103,31,230,206,120
+.byte	103,103,129,103,31,230,206,120
+.byte	228,228,183,228,115,83,213,151
+.byte	228,228,183,228,115,83,213,151
+.byte	39,39,156,39,37,187,78,2
+.byte	39,39,156,39,37,187,78,2
+.byte	65,65,25,65,50,88,130,115
+.byte	65,65,25,65,50,88,130,115
+.byte	139,139,22,139,44,157,11,167
+.byte	139,139,22,139,44,157,11,167
+.byte	167,167,166,167,81,1,83,246
+.byte	167,167,166,167,81,1,83,246
+.byte	125,125,233,125,207,148,250,178
+.byte	125,125,233,125,207,148,250,178
+.byte	149,149,110,149,220,251,55,73
+.byte	149,149,110,149,220,251,55,73
+.byte	216,216,71,216,142,159,173,86
+.byte	216,216,71,216,142,159,173,86
+.byte	251,251,203,251,139,48,235,112
+.byte	251,251,203,251,139,48,235,112
+.byte	238,238,159,238,35,113,193,205
+.byte	238,238,159,238,35,113,193,205
+.byte	124,124,237,124,199,145,248,187
+.byte	124,124,237,124,199,145,248,187
+.byte	102,102,133,102,23,227,204,113
+.byte	102,102,133,102,23,227,204,113
+.byte	221,221,83,221,166,142,167,123
+.byte	221,221,83,221,166,142,167,123
+.byte	23,23,92,23,184,75,46,175
+.byte	23,23,92,23,184,75,46,175
+.byte	71,71,1,71,2,70,142,69
+.byte	71,71,1,71,2,70,142,69
+.byte	158,158,66,158,132,220,33,26
+.byte	158,158,66,158,132,220,33,26
+.byte	202,202,15,202,30,197,137,212
+.byte	202,202,15,202,30,197,137,212
+.byte	45,45,180,45,117,153,90,88
+.byte	45,45,180,45,117,153,90,88
+.byte	191,191,198,191,145,121,99,46
+.byte	191,191,198,191,145,121,99,46
+.byte	7,7,28,7,56,27,14,63
+.byte	7,7,28,7,56,27,14,63
+.byte	173,173,142,173,1,35,71,172
+.byte	173,173,142,173,1,35,71,172
+.byte	90,90,117,90,234,47,180,176
+.byte	90,90,117,90,234,47,180,176
+.byte	131,131,54,131,108,181,27,239
+.byte	131,131,54,131,108,181,27,239
+.byte	51,51,204,51,133,255,102,182
+.byte	51,51,204,51,133,255,102,182
+.byte	99,99,145,99,63,242,198,92
+.byte	99,99,145,99,63,242,198,92
+.byte	2,2,8,2,16,10,4,18
+.byte	2,2,8,2,16,10,4,18
+.byte	170,170,146,170,57,56,73,147
+.byte	170,170,146,170,57,56,73,147
+.byte	113,113,217,113,175,168,226,222
+.byte	113,113,217,113,175,168,226,222
+.byte	200,200,7,200,14,207,141,198
+.byte	200,200,7,200,14,207,141,198
+.byte	25,25,100,25,200,125,50,209
+.byte	25,25,100,25,200,125,50,209
+.byte	73,73,57,73,114,112,146,59
+.byte	73,73,57,73,114,112,146,59
+.byte	217,217,67,217,134,154,175,95
+.byte	217,217,67,217,134,154,175,95
+.byte	242,242,239,242,195,29,249,49
+.byte	242,242,239,242,195,29,249,49
+.byte	227,227,171,227,75,72,219,168
+.byte	227,227,171,227,75,72,219,168
+.byte	91,91,113,91,226,42,182,185
+.byte	91,91,113,91,226,42,182,185
+.byte	136,136,26,136,52,146,13,188
+.byte	136,136,26,136,52,146,13,188
+.byte	154,154,82,154,164,200,41,62
+.byte	154,154,82,154,164,200,41,62
+.byte	38,38,152,38,45,190,76,11
+.byte	38,38,152,38,45,190,76,11
+.byte	50,50,200,50,141,250,100,191
+.byte	50,50,200,50,141,250,100,191
+.byte	176,176,250,176,233,74,125,89
+.byte	176,176,250,176,233,74,125,89
+.byte	233,233,131,233,27,106,207,242
+.byte	233,233,131,233,27,106,207,242
+.byte	15,15,60,15,120,51,30,119
+.byte	15,15,60,15,120,51,30,119
+.byte	213,213,115,213,230,166,183,51
+.byte	213,213,115,213,230,166,183,51
+.byte	128,128,58,128,116,186,29,244
+.byte	128,128,58,128,116,186,29,244
+.byte	190,190,194,190,153,124,97,39
+.byte	190,190,194,190,153,124,97,39
+.byte	205,205,19,205,38,222,135,235
+.byte	205,205,19,205,38,222,135,235
+.byte	52,52,208,52,189,228,104,137
+.byte	52,52,208,52,189,228,104,137
+.byte	72,72,61,72,122,117,144,50
+.byte	72,72,61,72,122,117,144,50
+.byte	255,255,219,255,171,36,227,84
+.byte	255,255,219,255,171,36,227,84
+.byte	122,122,245,122,247,143,244,141
+.byte	122,122,245,122,247,143,244,141
+.byte	144,144,122,144,244,234,61,100
+.byte	144,144,122,144,244,234,61,100
+.byte	95,95,97,95,194,62,190,157
+.byte	95,95,97,95,194,62,190,157
+.byte	32,32,128,32,29,160,64,61
+.byte	32,32,128,32,29,160,64,61
+.byte	104,104,189,104,103,213,208,15
+.byte	104,104,189,104,103,213,208,15
+.byte	26,26,104,26,208,114,52,202
+.byte	26,26,104,26,208,114,52,202
+.byte	174,174,130,174,25,44,65,183
+.byte	174,174,130,174,25,44,65,183
+.byte	180,180,234,180,201,94,117,125
+.byte	180,180,234,180,201,94,117,125
+.byte	84,84,77,84,154,25,168,206
+.byte	84,84,77,84,154,25,168,206
+.byte	147,147,118,147,236,229,59,127
+.byte	147,147,118,147,236,229,59,127
+.byte	34,34,136,34,13,170,68,47
+.byte	34,34,136,34,13,170,68,47
+.byte	100,100,141,100,7,233,200,99
+.byte	100,100,141,100,7,233,200,99
+.byte	241,241,227,241,219,18,255,42
+.byte	241,241,227,241,219,18,255,42
+.byte	115,115,209,115,191,162,230,204
+.byte	115,115,209,115,191,162,230,204
+.byte	18,18,72,18,144,90,36,130
+.byte	18,18,72,18,144,90,36,130
+.byte	64,64,29,64,58,93,128,122
+.byte	64,64,29,64,58,93,128,122
+.byte	8,8,32,8,64,40,16,72
+.byte	8,8,32,8,64,40,16,72
+.byte	195,195,43,195,86,232,155,149
+.byte	195,195,43,195,86,232,155,149
+.byte	236,236,151,236,51,123,197,223
+.byte	236,236,151,236,51,123,197,223
+.byte	219,219,75,219,150,144,171,77
+.byte	219,219,75,219,150,144,171,77
+.byte	161,161,190,161,97,31,95,192
+.byte	161,161,190,161,97,31,95,192
+.byte	141,141,14,141,28,131,7,145
+.byte	141,141,14,141,28,131,7,145
+.byte	61,61,244,61,245,201,122,200
+.byte	61,61,244,61,245,201,122,200
+.byte	151,151,102,151,204,241,51,91
+.byte	151,151,102,151,204,241,51,91
+.byte	0,0,0,0,0,0,0,0
+.byte	0,0,0,0,0,0,0,0
+.byte	207,207,27,207,54,212,131,249
+.byte	207,207,27,207,54,212,131,249
+.byte	43,43,172,43,69,135,86,110
+.byte	43,43,172,43,69,135,86,110
+.byte	118,118,197,118,151,179,236,225
+.byte	118,118,197,118,151,179,236,225
+.byte	130,130,50,130,100,176,25,230
+.byte	130,130,50,130,100,176,25,230
+.byte	214,214,127,214,254,169,177,40
+.byte	214,214,127,214,254,169,177,40
+.byte	27,27,108,27,216,119,54,195
+.byte	27,27,108,27,216,119,54,195
+.byte	181,181,238,181,193,91,119,116
+.byte	181,181,238,181,193,91,119,116
+.byte	175,175,134,175,17,41,67,190
+.byte	175,175,134,175,17,41,67,190
+.byte	106,106,181,106,119,223,212,29
+.byte	106,106,181,106,119,223,212,29
+.byte	80,80,93,80,186,13,160,234
+.byte	80,80,93,80,186,13,160,234
+.byte	69,69,9,69,18,76,138,87
+.byte	69,69,9,69,18,76,138,87
+.byte	243,243,235,243,203,24,251,56
+.byte	243,243,235,243,203,24,251,56
+.byte	48,48,192,48,157,240,96,173
+.byte	48,48,192,48,157,240,96,173
+.byte	239,239,155,239,43,116,195,196
+.byte	239,239,155,239,43,116,195,196
+.byte	63,63,252,63,229,195,126,218
+.byte	63,63,252,63,229,195,126,218
+.byte	85,85,73,85,146,28,170,199
+.byte	85,85,73,85,146,28,170,199
+.byte	162,162,178,162,121,16,89,219
+.byte	162,162,178,162,121,16,89,219
+.byte	234,234,143,234,3,101,201,233
+.byte	234,234,143,234,3,101,201,233
+.byte	101,101,137,101,15,236,202,106
+.byte	101,101,137,101,15,236,202,106
+.byte	186,186,210,186,185,104,105,3
+.byte	186,186,210,186,185,104,105,3
+.byte	47,47,188,47,101,147,94,74
+.byte	47,47,188,47,101,147,94,74
+.byte	192,192,39,192,78,231,157,142
+.byte	192,192,39,192,78,231,157,142
+.byte	222,222,95,222,190,129,161,96
+.byte	222,222,95,222,190,129,161,96
+.byte	28,28,112,28,224,108,56,252
+.byte	28,28,112,28,224,108,56,252
+.byte	253,253,211,253,187,46,231,70
+.byte	253,253,211,253,187,46,231,70
+.byte	77,77,41,77,82,100,154,31
+.byte	77,77,41,77,82,100,154,31
+.byte	146,146,114,146,228,224,57,118
+.byte	146,146,114,146,228,224,57,118
+.byte	117,117,201,117,143,188,234,250
+.byte	117,117,201,117,143,188,234,250
+.byte	6,6,24,6,48,30,12,54
+.byte	6,6,24,6,48,30,12,54
+.byte	138,138,18,138,36,152,9,174
+.byte	138,138,18,138,36,152,9,174
+.byte	178,178,242,178,249,64,121,75
+.byte	178,178,242,178,249,64,121,75
+.byte	230,230,191,230,99,89,209,133
+.byte	230,230,191,230,99,89,209,133
+.byte	14,14,56,14,112,54,28,126
+.byte	14,14,56,14,112,54,28,126
+.byte	31,31,124,31,248,99,62,231
+.byte	31,31,124,31,248,99,62,231
+.byte	98,98,149,98,55,247,196,85
+.byte	98,98,149,98,55,247,196,85
+.byte	212,212,119,212,238,163,181,58
+.byte	212,212,119,212,238,163,181,58
+.byte	168,168,154,168,41,50,77,129
+.byte	168,168,154,168,41,50,77,129
+.byte	150,150,98,150,196,244,49,82
+.byte	150,150,98,150,196,244,49,82
+.byte	249,249,195,249,155,58,239,98
+.byte	249,249,195,249,155,58,239,98
+.byte	197,197,51,197,102,246,151,163
+.byte	197,197,51,197,102,246,151,163
+.byte	37,37,148,37,53,177,74,16
+.byte	37,37,148,37,53,177,74,16
+.byte	89,89,121,89,242,32,178,171
+.byte	89,89,121,89,242,32,178,171
+.byte	132,132,42,132,84,174,21,208
+.byte	132,132,42,132,84,174,21,208
+.byte	114,114,213,114,183,167,228,197
+.byte	114,114,213,114,183,167,228,197
+.byte	57,57,228,57,213,221,114,236
+.byte	57,57,228,57,213,221,114,236
+.byte	76,76,45,76,90,97,152,22
+.byte	76,76,45,76,90,97,152,22
+.byte	94,94,101,94,202,59,188,148
+.byte	94,94,101,94,202,59,188,148
+.byte	120,120,253,120,231,133,240,159
+.byte	120,120,253,120,231,133,240,159
+.byte	56,56,224,56,221,216,112,229
+.byte	56,56,224,56,221,216,112,229
+.byte	140,140,10,140,20,134,5,152
+.byte	140,140,10,140,20,134,5,152
+.byte	209,209,99,209,198,178,191,23
+.byte	209,209,99,209,198,178,191,23
+.byte	165,165,174,165,65,11,87,228
+.byte	165,165,174,165,65,11,87,228
+.byte	226,226,175,226,67,77,217,161
+.byte	226,226,175,226,67,77,217,161
+.byte	97,97,153,97,47,248,194,78
+.byte	97,97,153,97,47,248,194,78
+.byte	179,179,246,179,241,69,123,66
+.byte	179,179,246,179,241,69,123,66
+.byte	33,33,132,33,21,165,66,52
+.byte	33,33,132,33,21,165,66,52
+.byte	156,156,74,156,148,214,37,8
+.byte	156,156,74,156,148,214,37,8
+.byte	30,30,120,30,240,102,60,238
+.byte	30,30,120,30,240,102,60,238
+.byte	67,67,17,67,34,82,134,97
+.byte	67,67,17,67,34,82,134,97
+.byte	199,199,59,199,118,252,147,177
+.byte	199,199,59,199,118,252,147,177
+.byte	252,252,215,252,179,43,229,79
+.byte	252,252,215,252,179,43,229,79
+.byte	4,4,16,4,32,20,8,36
+.byte	4,4,16,4,32,20,8,36
+.byte	81,81,89,81,178,8,162,227
+.byte	81,81,89,81,178,8,162,227
+.byte	153,153,94,153,188,199,47,37
+.byte	153,153,94,153,188,199,47,37
+.byte	109,109,169,109,79,196,218,34
+.byte	109,109,169,109,79,196,218,34
+.byte	13,13,52,13,104,57,26,101
+.byte	13,13,52,13,104,57,26,101
+.byte	250,250,207,250,131,53,233,121
+.byte	250,250,207,250,131,53,233,121
+.byte	223,223,91,223,182,132,163,105
+.byte	223,223,91,223,182,132,163,105
+.byte	126,126,229,126,215,155,252,169
+.byte	126,126,229,126,215,155,252,169
+.byte	36,36,144,36,61,180,72,25
+.byte	36,36,144,36,61,180,72,25
+.byte	59,59,236,59,197,215,118,254
+.byte	59,59,236,59,197,215,118,254
+.byte	171,171,150,171,49,61,75,154
+.byte	171,171,150,171,49,61,75,154
+.byte	206,206,31,206,62,209,129,240
+.byte	206,206,31,206,62,209,129,240
+.byte	17,17,68,17,136,85,34,153
+.byte	17,17,68,17,136,85,34,153
+.byte	143,143,6,143,12,137,3,131
+.byte	143,143,6,143,12,137,3,131
+.byte	78,78,37,78,74,107,156,4
+.byte	78,78,37,78,74,107,156,4
+.byte	183,183,230,183,209,81,115,102
+.byte	183,183,230,183,209,81,115,102
+.byte	235,235,139,235,11,96,203,224
+.byte	235,235,139,235,11,96,203,224
+.byte	60,60,240,60,253,204,120,193
+.byte	60,60,240,60,253,204,120,193
+.byte	129,129,62,129,124,191,31,253
+.byte	129,129,62,129,124,191,31,253
+.byte	148,148,106,148,212,254,53,64
+.byte	148,148,106,148,212,254,53,64
+.byte	247,247,251,247,235,12,243,28
+.byte	247,247,251,247,235,12,243,28
+.byte	185,185,222,185,161,103,111,24
+.byte	185,185,222,185,161,103,111,24
+.byte	19,19,76,19,152,95,38,139
+.byte	19,19,76,19,152,95,38,139
+.byte	44,44,176,44,125,156,88,81
+.byte	44,44,176,44,125,156,88,81
+.byte	211,211,107,211,214,184,187,5
+.byte	211,211,107,211,214,184,187,5
+.byte	231,231,187,231,107,92,211,140
+.byte	231,231,187,231,107,92,211,140
+.byte	110,110,165,110,87,203,220,57
+.byte	110,110,165,110,87,203,220,57
+.byte	196,196,55,196,110,243,149,170
+.byte	196,196,55,196,110,243,149,170
+.byte	3,3,12,3,24,15,6,27
+.byte	3,3,12,3,24,15,6,27
+.byte	86,86,69,86,138,19,172,220
+.byte	86,86,69,86,138,19,172,220
+.byte	68,68,13,68,26,73,136,94
+.byte	68,68,13,68,26,73,136,94
+.byte	127,127,225,127,223,158,254,160
+.byte	127,127,225,127,223,158,254,160
+.byte	169,169,158,169,33,55,79,136
+.byte	169,169,158,169,33,55,79,136
+.byte	42,42,168,42,77,130,84,103
+.byte	42,42,168,42,77,130,84,103
+.byte	187,187,214,187,177,109,107,10
+.byte	187,187,214,187,177,109,107,10
+.byte	193,193,35,193,70,226,159,135
+.byte	193,193,35,193,70,226,159,135
+.byte	83,83,81,83,162,2,166,241
+.byte	83,83,81,83,162,2,166,241
+.byte	220,220,87,220,174,139,165,114
+.byte	220,220,87,220,174,139,165,114
+.byte	11,11,44,11,88,39,22,83
+.byte	11,11,44,11,88,39,22,83
+.byte	157,157,78,157,156,211,39,1
+.byte	157,157,78,157,156,211,39,1
+.byte	108,108,173,108,71,193,216,43
+.byte	108,108,173,108,71,193,216,43
+.byte	49,49,196,49,149,245,98,164
+.byte	49,49,196,49,149,245,98,164
+.byte	116,116,205,116,135,185,232,243
+.byte	116,116,205,116,135,185,232,243
+.byte	246,246,255,246,227,9,241,21
+.byte	246,246,255,246,227,9,241,21
+.byte	70,70,5,70,10,67,140,76
+.byte	70,70,5,70,10,67,140,76
+.byte	172,172,138,172,9,38,69,165
+.byte	172,172,138,172,9,38,69,165
+.byte	137,137,30,137,60,151,15,181
+.byte	137,137,30,137,60,151,15,181
+.byte	20,20,80,20,160,68,40,180
+.byte	20,20,80,20,160,68,40,180
+.byte	225,225,163,225,91,66,223,186
+.byte	225,225,163,225,91,66,223,186
+.byte	22,22,88,22,176,78,44,166
+.byte	22,22,88,22,176,78,44,166
+.byte	58,58,232,58,205,210,116,247
+.byte	58,58,232,58,205,210,116,247
+.byte	105,105,185,105,111,208,210,6
+.byte	105,105,185,105,111,208,210,6
+.byte	9,9,36,9,72,45,18,65
+.byte	9,9,36,9,72,45,18,65
+.byte	112,112,221,112,167,173,224,215
+.byte	112,112,221,112,167,173,224,215
+.byte	182,182,226,182,217,84,113,111
+.byte	182,182,226,182,217,84,113,111
+.byte	208,208,103,208,206,183,189,30
+.byte	208,208,103,208,206,183,189,30
+.byte	237,237,147,237,59,126,199,214
+.byte	237,237,147,237,59,126,199,214
+.byte	204,204,23,204,46,219,133,226
+.byte	204,204,23,204,46,219,133,226
+.byte	66,66,21,66,42,87,132,104
+.byte	66,66,21,66,42,87,132,104
+.byte	152,152,90,152,180,194,45,44
+.byte	152,152,90,152,180,194,45,44
+.byte	164,164,170,164,73,14,85,237
+.byte	164,164,170,164,73,14,85,237
+.byte	40,40,160,40,93,136,80,117
+.byte	40,40,160,40,93,136,80,117
+.byte	92,92,109,92,218,49,184,134
+.byte	92,92,109,92,218,49,184,134
+.byte	248,248,199,248,147,63,237,107
+.byte	248,248,199,248,147,63,237,107
+.byte	134,134,34,134,68,164,17,194
+.byte	134,134,34,134,68,164,17,194
+.byte	24,35,198,232,135,184,1,79
+.byte	54,166,210,245,121,111,145,82
+.byte	96,188,155,142,163,12,123,53
+.byte	29,224,215,194,46,75,254,87
+.byte	21,119,55,229,159,240,74,218
+.byte	88,201,41,10,177,160,107,133
+.byte	189,93,16,244,203,62,5,103
+.byte	228,39,65,139,167,125,149,216
+.byte	251,238,124,102,221,23,71,158
+.byte	202,45,191,7,173,90,131,51
+.size	whirlpool_block_mmx,.-.L_whirlpool_block_mmx_begin
+#else
+.file	"wp-mmx.S"
+.text
+.globl	whirlpool_block_mmx
+.type	whirlpool_block_mmx, at function
+.align	16
+whirlpool_block_mmx:
+.L_whirlpool_block_mmx_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	20(%esp),%esi
+	movl	24(%esp),%edi
+	movl	28(%esp),%ebp
+	movl	%esp,%eax
+	subl	$148,%esp
+	andl	$-64,%esp
+	leal	128(%esp),%ebx
+	movl	%esi,(%ebx)
+	movl	%edi,4(%ebx)
+	movl	%ebp,8(%ebx)
+	movl	%eax,16(%ebx)
+	call	.L000pic_point
+.L000pic_point:
+	popl	%ebp
+	leal	.L001table-.L000pic_point(%ebp),%ebp
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	movq	(%esi),%mm0
+	movq	8(%esi),%mm1
+	movq	16(%esi),%mm2
+	movq	24(%esi),%mm3
+	movq	32(%esi),%mm4
+	movq	40(%esi),%mm5
+	movq	48(%esi),%mm6
+	movq	56(%esi),%mm7
+.L002outerloop:
+	movq	%mm0,(%esp)
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm4,32(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm1
+	pxor	16(%edi),%mm2
+	pxor	24(%edi),%mm3
+	pxor	32(%edi),%mm4
+	pxor	40(%edi),%mm5
+	pxor	48(%edi),%mm6
+	pxor	56(%edi),%mm7
+	movq	%mm0,64(%esp)
+	movq	%mm1,72(%esp)
+	movq	%mm2,80(%esp)
+	movq	%mm3,88(%esp)
+	movq	%mm4,96(%esp)
+	movq	%mm5,104(%esp)
+	movq	%mm6,112(%esp)
+	movq	%mm7,120(%esp)
+	xorl	%esi,%esi
+	movl	%esi,12(%ebx)
+.align	16
+.L003round:
+	movq	4096(%ebp,%esi,8),%mm0
+	movl	(%esp),%eax
+	movl	4(%esp),%ebx
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm0
+	movq	7(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	8(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	movq	6(%ebp,%esi,8),%mm2
+	movq	5(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	movq	4(%ebp,%esi,8),%mm4
+	movq	3(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	12(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	movq	2(%ebp,%esi,8),%mm6
+	movq	1(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm1
+	pxor	7(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	16(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm3
+	pxor	5(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm5
+	pxor	3(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	20(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm7
+	pxor	1(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm2
+	pxor	7(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	24(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm4
+	pxor	5(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm6
+	pxor	3(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	28(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm0
+	pxor	1(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm3
+	pxor	7(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	32(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm5
+	pxor	5(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm7
+	pxor	3(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	36(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm1
+	pxor	1(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm4
+	pxor	7(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	40(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm6
+	pxor	5(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm0
+	pxor	3(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	44(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm2
+	pxor	1(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm5
+	pxor	7(%ebp,%edi,8),%mm6
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	48(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm7
+	pxor	5(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm1
+	pxor	3(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	52(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm3
+	pxor	1(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm6
+	pxor	7(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	56(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm0
+	pxor	5(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm2
+	pxor	3(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	60(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm4
+	pxor	1(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm7
+	pxor	7(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	64(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm1
+	pxor	5(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm3
+	pxor	3(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	68(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm5
+	pxor	1(%ebp,%edi,8),%mm6
+	movq	%mm0,(%esp)
+	movq	%mm1,8(%esp)
+	movq	%mm2,16(%esp)
+	movq	%mm3,24(%esp)
+	movq	%mm4,32(%esp)
+	movq	%mm5,40(%esp)
+	movq	%mm6,48(%esp)
+	movq	%mm7,56(%esp)
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm0
+	pxor	7(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	72(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm2
+	pxor	5(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm4
+	pxor	3(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	76(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm6
+	pxor	1(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm1
+	pxor	7(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	80(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm3
+	pxor	5(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm5
+	pxor	3(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	84(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm7
+	pxor	1(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm2
+	pxor	7(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	88(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm4
+	pxor	5(%ebp,%edi,8),%mm5
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm6
+	pxor	3(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	92(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm0
+	pxor	1(%ebp,%edi,8),%mm1
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm3
+	pxor	7(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	96(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm5
+	pxor	5(%ebp,%edi,8),%mm6
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm7
+	pxor	3(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	100(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm1
+	pxor	1(%ebp,%edi,8),%mm2
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm4
+	pxor	7(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	104(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm6
+	pxor	5(%ebp,%edi,8),%mm7
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm0
+	pxor	3(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	108(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm2
+	pxor	1(%ebp,%edi,8),%mm3
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm5
+	pxor	7(%ebp,%edi,8),%mm6
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	112(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm7
+	pxor	5(%ebp,%edi,8),%mm0
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm1
+	pxor	3(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	116(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm3
+	pxor	1(%ebp,%edi,8),%mm4
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm6
+	pxor	7(%ebp,%edi,8),%mm7
+	movb	%al,%cl
+	movb	%ah,%dl
+	movl	120(%esp),%eax
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm0
+	pxor	5(%ebp,%edi,8),%mm1
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm2
+	pxor	3(%ebp,%edi,8),%mm3
+	movb	%bl,%cl
+	movb	%bh,%dl
+	movl	124(%esp),%ebx
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm4
+	pxor	1(%ebp,%edi,8),%mm5
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%eax
+	pxor	(%ebp,%esi,8),%mm7
+	pxor	7(%ebp,%edi,8),%mm0
+	movb	%al,%cl
+	movb	%ah,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	6(%ebp,%esi,8),%mm1
+	pxor	5(%ebp,%edi,8),%mm2
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	shrl	$16,%ebx
+	pxor	4(%ebp,%esi,8),%mm3
+	pxor	3(%ebp,%edi,8),%mm4
+	movb	%bl,%cl
+	movb	%bh,%dl
+	leal	(%ecx,%ecx,1),%esi
+	leal	(%edx,%edx,1),%edi
+	pxor	2(%ebp,%esi,8),%mm5
+	pxor	1(%ebp,%edi,8),%mm6
+	leal	128(%esp),%ebx
+	movl	12(%ebx),%esi
+	addl	$1,%esi
+	cmpl	$10,%esi
+	je	.L004roundsdone
+	movl	%esi,12(%ebx)
+	movq	%mm0,64(%esp)
+	movq	%mm1,72(%esp)
+	movq	%mm2,80(%esp)
+	movq	%mm3,88(%esp)
+	movq	%mm4,96(%esp)
+	movq	%mm5,104(%esp)
+	movq	%mm6,112(%esp)
+	movq	%mm7,120(%esp)
+	jmp	.L003round
+.align	16
+.L004roundsdone:
+	movl	(%ebx),%esi
+	movl	4(%ebx),%edi
+	movl	8(%ebx),%eax
+	pxor	(%edi),%mm0
+	pxor	8(%edi),%mm1
+	pxor	16(%edi),%mm2
+	pxor	24(%edi),%mm3
+	pxor	32(%edi),%mm4
+	pxor	40(%edi),%mm5
+	pxor	48(%edi),%mm6
+	pxor	56(%edi),%mm7
+	pxor	(%esi),%mm0
+	pxor	8(%esi),%mm1
+	pxor	16(%esi),%mm2
+	pxor	24(%esi),%mm3
+	pxor	32(%esi),%mm4
+	pxor	40(%esi),%mm5
+	pxor	48(%esi),%mm6
+	pxor	56(%esi),%mm7
+	movq	%mm0,(%esi)
+	movq	%mm1,8(%esi)
+	movq	%mm2,16(%esi)
+	movq	%mm3,24(%esi)
+	movq	%mm4,32(%esi)
+	movq	%mm5,40(%esi)
+	movq	%mm6,48(%esi)
+	movq	%mm7,56(%esi)
+	leal	64(%edi),%edi
+	subl	$1,%eax
+	jz	.L005alldone
+	movl	%edi,4(%ebx)
+	movl	%eax,8(%ebx)
+	jmp	.L002outerloop
+.L005alldone:
+	emms
+	movl	16(%ebx),%esp
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.align	64
+.L001table:
+.byte	24,24,96,24,192,120,48,216
+.byte	24,24,96,24,192,120,48,216
+.byte	35,35,140,35,5,175,70,38
+.byte	35,35,140,35,5,175,70,38
+.byte	198,198,63,198,126,249,145,184
+.byte	198,198,63,198,126,249,145,184
+.byte	232,232,135,232,19,111,205,251
+.byte	232,232,135,232,19,111,205,251
+.byte	135,135,38,135,76,161,19,203
+.byte	135,135,38,135,76,161,19,203
+.byte	184,184,218,184,169,98,109,17
+.byte	184,184,218,184,169,98,109,17
+.byte	1,1,4,1,8,5,2,9
+.byte	1,1,4,1,8,5,2,9
+.byte	79,79,33,79,66,110,158,13
+.byte	79,79,33,79,66,110,158,13
+.byte	54,54,216,54,173,238,108,155
+.byte	54,54,216,54,173,238,108,155
+.byte	166,166,162,166,89,4,81,255
+.byte	166,166,162,166,89,4,81,255
+.byte	210,210,111,210,222,189,185,12
+.byte	210,210,111,210,222,189,185,12
+.byte	245,245,243,245,251,6,247,14
+.byte	245,245,243,245,251,6,247,14
+.byte	121,121,249,121,239,128,242,150
+.byte	121,121,249,121,239,128,242,150
+.byte	111,111,161,111,95,206,222,48
+.byte	111,111,161,111,95,206,222,48
+.byte	145,145,126,145,252,239,63,109
+.byte	145,145,126,145,252,239,63,109
+.byte	82,82,85,82,170,7,164,248
+.byte	82,82,85,82,170,7,164,248
+.byte	96,96,157,96,39,253,192,71
+.byte	96,96,157,96,39,253,192,71
+.byte	188,188,202,188,137,118,101,53
+.byte	188,188,202,188,137,118,101,53
+.byte	155,155,86,155,172,205,43,55
+.byte	155,155,86,155,172,205,43,55
+.byte	142,142,2,142,4,140,1,138
+.byte	142,142,2,142,4,140,1,138
+.byte	163,163,182,163,113,21,91,210
+.byte	163,163,182,163,113,21,91,210
+.byte	12,12,48,12,96,60,24,108
+.byte	12,12,48,12,96,60,24,108
+.byte	123,123,241,123,255,138,246,132
+.byte	123,123,241,123,255,138,246,132
+.byte	53,53,212,53,181,225,106,128
+.byte	53,53,212,53,181,225,106,128
+.byte	29,29,116,29,232,105,58,245
+.byte	29,29,116,29,232,105,58,245
+.byte	224,224,167,224,83,71,221,179
+.byte	224,224,167,224,83,71,221,179
+.byte	215,215,123,215,246,172,179,33
+.byte	215,215,123,215,246,172,179,33
+.byte	194,194,47,194,94,237,153,156
+.byte	194,194,47,194,94,237,153,156
+.byte	46,46,184,46,109,150,92,67
+.byte	46,46,184,46,109,150,92,67
+.byte	75,75,49,75,98,122,150,41
+.byte	75,75,49,75,98,122,150,41
+.byte	254,254,223,254,163,33,225,93
+.byte	254,254,223,254,163,33,225,93
+.byte	87,87,65,87,130,22,174,213
+.byte	87,87,65,87,130,22,174,213
+.byte	21,21,84,21,168,65,42,189
+.byte	21,21,84,21,168,65,42,189
+.byte	119,119,193,119,159,182,238,232
+.byte	119,119,193,119,159,182,238,232
+.byte	55,55,220,55,165,235,110,146
+.byte	55,55,220,55,165,235,110,146
+.byte	229,229,179,229,123,86,215,158
+.byte	229,229,179,229,123,86,215,158
+.byte	159,159,70,159,140,217,35,19
+.byte	159,159,70,159,140,217,35,19
+.byte	240,240,231,240,211,23,253,35
+.byte	240,240,231,240,211,23,253,35
+.byte	74,74,53,74,106,127,148,32
+.byte	74,74,53,74,106,127,148,32
+.byte	218,218,79,218,158,149,169,68
+.byte	218,218,79,218,158,149,169,68
+.byte	88,88,125,88,250,37,176,162
+.byte	88,88,125,88,250,37,176,162
+.byte	201,201,3,201,6,202,143,207
+.byte	201,201,3,201,6,202,143,207
+.byte	41,41,164,41,85,141,82,124
+.byte	41,41,164,41,85,141,82,124
+.byte	10,10,40,10,80,34,20,90
+.byte	10,10,40,10,80,34,20,90
+.byte	177,177,254,177,225,79,127,80
+.byte	177,177,254,177,225,79,127,80
+.byte	160,160,186,160,105,26,93,201
+.byte	160,160,186,160,105,26,93,201
+.byte	107,107,177,107,127,218,214,20
+.byte	107,107,177,107,127,218,214,20
+.byte	133,133,46,133,92,171,23,217
+.byte	133,133,46,133,92,171,23,217
+.byte	189,189,206,189,129,115,103,60
+.byte	189,189,206,189,129,115,103,60
+.byte	93,93,105,93,210,52,186,143
+.byte	93,93,105,93,210,52,186,143
+.byte	16,16,64,16,128,80,32,144
+.byte	16,16,64,16,128,80,32,144
+.byte	244,244,247,244,243,3,245,7
+.byte	244,244,247,244,243,3,245,7
+.byte	203,203,11,203,22,192,139,221
+.byte	203,203,11,203,22,192,139,221
+.byte	62,62,248,62,237,198,124,211
+.byte	62,62,248,62,237,198,124,211
+.byte	5,5,20,5,40,17,10,45
+.byte	5,5,20,5,40,17,10,45
+.byte	103,103,129,103,31,230,206,120
+.byte	103,103,129,103,31,230,206,120
+.byte	228,228,183,228,115,83,213,151
+.byte	228,228,183,228,115,83,213,151
+.byte	39,39,156,39,37,187,78,2
+.byte	39,39,156,39,37,187,78,2
+.byte	65,65,25,65,50,88,130,115
+.byte	65,65,25,65,50,88,130,115
+.byte	139,139,22,139,44,157,11,167
+.byte	139,139,22,139,44,157,11,167
+.byte	167,167,166,167,81,1,83,246
+.byte	167,167,166,167,81,1,83,246
+.byte	125,125,233,125,207,148,250,178
+.byte	125,125,233,125,207,148,250,178
+.byte	149,149,110,149,220,251,55,73
+.byte	149,149,110,149,220,251,55,73
+.byte	216,216,71,216,142,159,173,86
+.byte	216,216,71,216,142,159,173,86
+.byte	251,251,203,251,139,48,235,112
+.byte	251,251,203,251,139,48,235,112
+.byte	238,238,159,238,35,113,193,205
+.byte	238,238,159,238,35,113,193,205
+.byte	124,124,237,124,199,145,248,187
+.byte	124,124,237,124,199,145,248,187
+.byte	102,102,133,102,23,227,204,113
+.byte	102,102,133,102,23,227,204,113
+.byte	221,221,83,221,166,142,167,123
+.byte	221,221,83,221,166,142,167,123
+.byte	23,23,92,23,184,75,46,175
+.byte	23,23,92,23,184,75,46,175
+.byte	71,71,1,71,2,70,142,69
+.byte	71,71,1,71,2,70,142,69
+.byte	158,158,66,158,132,220,33,26
+.byte	158,158,66,158,132,220,33,26
+.byte	202,202,15,202,30,197,137,212
+.byte	202,202,15,202,30,197,137,212
+.byte	45,45,180,45,117,153,90,88
+.byte	45,45,180,45,117,153,90,88
+.byte	191,191,198,191,145,121,99,46
+.byte	191,191,198,191,145,121,99,46
+.byte	7,7,28,7,56,27,14,63
+.byte	7,7,28,7,56,27,14,63
+.byte	173,173,142,173,1,35,71,172
+.byte	173,173,142,173,1,35,71,172
+.byte	90,90,117,90,234,47,180,176
+.byte	90,90,117,90,234,47,180,176
+.byte	131,131,54,131,108,181,27,239
+.byte	131,131,54,131,108,181,27,239
+.byte	51,51,204,51,133,255,102,182
+.byte	51,51,204,51,133,255,102,182
+.byte	99,99,145,99,63,242,198,92
+.byte	99,99,145,99,63,242,198,92
+.byte	2,2,8,2,16,10,4,18
+.byte	2,2,8,2,16,10,4,18
+.byte	170,170,146,170,57,56,73,147
+.byte	170,170,146,170,57,56,73,147
+.byte	113,113,217,113,175,168,226,222
+.byte	113,113,217,113,175,168,226,222
+.byte	200,200,7,200,14,207,141,198
+.byte	200,200,7,200,14,207,141,198
+.byte	25,25,100,25,200,125,50,209
+.byte	25,25,100,25,200,125,50,209
+.byte	73,73,57,73,114,112,146,59
+.byte	73,73,57,73,114,112,146,59
+.byte	217,217,67,217,134,154,175,95
+.byte	217,217,67,217,134,154,175,95
+.byte	242,242,239,242,195,29,249,49
+.byte	242,242,239,242,195,29,249,49
+.byte	227,227,171,227,75,72,219,168
+.byte	227,227,171,227,75,72,219,168
+.byte	91,91,113,91,226,42,182,185
+.byte	91,91,113,91,226,42,182,185
+.byte	136,136,26,136,52,146,13,188
+.byte	136,136,26,136,52,146,13,188
+.byte	154,154,82,154,164,200,41,62
+.byte	154,154,82,154,164,200,41,62
+.byte	38,38,152,38,45,190,76,11
+.byte	38,38,152,38,45,190,76,11
+.byte	50,50,200,50,141,250,100,191
+.byte	50,50,200,50,141,250,100,191
+.byte	176,176,250,176,233,74,125,89
+.byte	176,176,250,176,233,74,125,89
+.byte	233,233,131,233,27,106,207,242
+.byte	233,233,131,233,27,106,207,242
+.byte	15,15,60,15,120,51,30,119
+.byte	15,15,60,15,120,51,30,119
+.byte	213,213,115,213,230,166,183,51
+.byte	213,213,115,213,230,166,183,51
+.byte	128,128,58,128,116,186,29,244
+.byte	128,128,58,128,116,186,29,244
+.byte	190,190,194,190,153,124,97,39
+.byte	190,190,194,190,153,124,97,39
+.byte	205,205,19,205,38,222,135,235
+.byte	205,205,19,205,38,222,135,235
+.byte	52,52,208,52,189,228,104,137
+.byte	52,52,208,52,189,228,104,137
+.byte	72,72,61,72,122,117,144,50
+.byte	72,72,61,72,122,117,144,50
+.byte	255,255,219,255,171,36,227,84
+.byte	255,255,219,255,171,36,227,84
+.byte	122,122,245,122,247,143,244,141
+.byte	122,122,245,122,247,143,244,141
+.byte	144,144,122,144,244,234,61,100
+.byte	144,144,122,144,244,234,61,100
+.byte	95,95,97,95,194,62,190,157
+.byte	95,95,97,95,194,62,190,157
+.byte	32,32,128,32,29,160,64,61
+.byte	32,32,128,32,29,160,64,61
+.byte	104,104,189,104,103,213,208,15
+.byte	104,104,189,104,103,213,208,15
+.byte	26,26,104,26,208,114,52,202
+.byte	26,26,104,26,208,114,52,202
+.byte	174,174,130,174,25,44,65,183
+.byte	174,174,130,174,25,44,65,183
+.byte	180,180,234,180,201,94,117,125
+.byte	180,180,234,180,201,94,117,125
+.byte	84,84,77,84,154,25,168,206
+.byte	84,84,77,84,154,25,168,206
+.byte	147,147,118,147,236,229,59,127
+.byte	147,147,118,147,236,229,59,127
+.byte	34,34,136,34,13,170,68,47
+.byte	34,34,136,34,13,170,68,47
+.byte	100,100,141,100,7,233,200,99
+.byte	100,100,141,100,7,233,200,99
+.byte	241,241,227,241,219,18,255,42
+.byte	241,241,227,241,219,18,255,42
+.byte	115,115,209,115,191,162,230,204
+.byte	115,115,209,115,191,162,230,204
+.byte	18,18,72,18,144,90,36,130
+.byte	18,18,72,18,144,90,36,130
+.byte	64,64,29,64,58,93,128,122
+.byte	64,64,29,64,58,93,128,122
+.byte	8,8,32,8,64,40,16,72
+.byte	8,8,32,8,64,40,16,72
+.byte	195,195,43,195,86,232,155,149
+.byte	195,195,43,195,86,232,155,149
+.byte	236,236,151,236,51,123,197,223
+.byte	236,236,151,236,51,123,197,223
+.byte	219,219,75,219,150,144,171,77
+.byte	219,219,75,219,150,144,171,77
+.byte	161,161,190,161,97,31,95,192
+.byte	161,161,190,161,97,31,95,192
+.byte	141,141,14,141,28,131,7,145
+.byte	141,141,14,141,28,131,7,145
+.byte	61,61,244,61,245,201,122,200
+.byte	61,61,244,61,245,201,122,200
+.byte	151,151,102,151,204,241,51,91
+.byte	151,151,102,151,204,241,51,91
+.byte	0,0,0,0,0,0,0,0
+.byte	0,0,0,0,0,0,0,0
+.byte	207,207,27,207,54,212,131,249
+.byte	207,207,27,207,54,212,131,249
+.byte	43,43,172,43,69,135,86,110
+.byte	43,43,172,43,69,135,86,110
+.byte	118,118,197,118,151,179,236,225
+.byte	118,118,197,118,151,179,236,225
+.byte	130,130,50,130,100,176,25,230
+.byte	130,130,50,130,100,176,25,230
+.byte	214,214,127,214,254,169,177,40
+.byte	214,214,127,214,254,169,177,40
+.byte	27,27,108,27,216,119,54,195
+.byte	27,27,108,27,216,119,54,195
+.byte	181,181,238,181,193,91,119,116
+.byte	181,181,238,181,193,91,119,116
+.byte	175,175,134,175,17,41,67,190
+.byte	175,175,134,175,17,41,67,190
+.byte	106,106,181,106,119,223,212,29
+.byte	106,106,181,106,119,223,212,29
+.byte	80,80,93,80,186,13,160,234
+.byte	80,80,93,80,186,13,160,234
+.byte	69,69,9,69,18,76,138,87
+.byte	69,69,9,69,18,76,138,87
+.byte	243,243,235,243,203,24,251,56
+.byte	243,243,235,243,203,24,251,56
+.byte	48,48,192,48,157,240,96,173
+.byte	48,48,192,48,157,240,96,173
+.byte	239,239,155,239,43,116,195,196
+.byte	239,239,155,239,43,116,195,196
+.byte	63,63,252,63,229,195,126,218
+.byte	63,63,252,63,229,195,126,218
+.byte	85,85,73,85,146,28,170,199
+.byte	85,85,73,85,146,28,170,199
+.byte	162,162,178,162,121,16,89,219
+.byte	162,162,178,162,121,16,89,219
+.byte	234,234,143,234,3,101,201,233
+.byte	234,234,143,234,3,101,201,233
+.byte	101,101,137,101,15,236,202,106
+.byte	101,101,137,101,15,236,202,106
+.byte	186,186,210,186,185,104,105,3
+.byte	186,186,210,186,185,104,105,3
+.byte	47,47,188,47,101,147,94,74
+.byte	47,47,188,47,101,147,94,74
+.byte	192,192,39,192,78,231,157,142
+.byte	192,192,39,192,78,231,157,142
+.byte	222,222,95,222,190,129,161,96
+.byte	222,222,95,222,190,129,161,96
+.byte	28,28,112,28,224,108,56,252
+.byte	28,28,112,28,224,108,56,252
+.byte	253,253,211,253,187,46,231,70
+.byte	253,253,211,253,187,46,231,70
+.byte	77,77,41,77,82,100,154,31
+.byte	77,77,41,77,82,100,154,31
+.byte	146,146,114,146,228,224,57,118
+.byte	146,146,114,146,228,224,57,118
+.byte	117,117,201,117,143,188,234,250
+.byte	117,117,201,117,143,188,234,250
+.byte	6,6,24,6,48,30,12,54
+.byte	6,6,24,6,48,30,12,54
+.byte	138,138,18,138,36,152,9,174
+.byte	138,138,18,138,36,152,9,174
+.byte	178,178,242,178,249,64,121,75
+.byte	178,178,242,178,249,64,121,75
+.byte	230,230,191,230,99,89,209,133
+.byte	230,230,191,230,99,89,209,133
+.byte	14,14,56,14,112,54,28,126
+.byte	14,14,56,14,112,54,28,126
+.byte	31,31,124,31,248,99,62,231
+.byte	31,31,124,31,248,99,62,231
+.byte	98,98,149,98,55,247,196,85
+.byte	98,98,149,98,55,247,196,85
+.byte	212,212,119,212,238,163,181,58
+.byte	212,212,119,212,238,163,181,58
+.byte	168,168,154,168,41,50,77,129
+.byte	168,168,154,168,41,50,77,129
+.byte	150,150,98,150,196,244,49,82
+.byte	150,150,98,150,196,244,49,82
+.byte	249,249,195,249,155,58,239,98
+.byte	249,249,195,249,155,58,239,98
+.byte	197,197,51,197,102,246,151,163
+.byte	197,197,51,197,102,246,151,163
+.byte	37,37,148,37,53,177,74,16
+.byte	37,37,148,37,53,177,74,16
+.byte	89,89,121,89,242,32,178,171
+.byte	89,89,121,89,242,32,178,171
+.byte	132,132,42,132,84,174,21,208
+.byte	132,132,42,132,84,174,21,208
+.byte	114,114,213,114,183,167,228,197
+.byte	114,114,213,114,183,167,228,197
+.byte	57,57,228,57,213,221,114,236
+.byte	57,57,228,57,213,221,114,236
+.byte	76,76,45,76,90,97,152,22
+.byte	76,76,45,76,90,97,152,22
+.byte	94,94,101,94,202,59,188,148
+.byte	94,94,101,94,202,59,188,148
+.byte	120,120,253,120,231,133,240,159
+.byte	120,120,253,120,231,133,240,159
+.byte	56,56,224,56,221,216,112,229
+.byte	56,56,224,56,221,216,112,229
+.byte	140,140,10,140,20,134,5,152
+.byte	140,140,10,140,20,134,5,152
+.byte	209,209,99,209,198,178,191,23
+.byte	209,209,99,209,198,178,191,23
+.byte	165,165,174,165,65,11,87,228
+.byte	165,165,174,165,65,11,87,228
+.byte	226,226,175,226,67,77,217,161
+.byte	226,226,175,226,67,77,217,161
+.byte	97,97,153,97,47,248,194,78
+.byte	97,97,153,97,47,248,194,78
+.byte	179,179,246,179,241,69,123,66
+.byte	179,179,246,179,241,69,123,66
+.byte	33,33,132,33,21,165,66,52
+.byte	33,33,132,33,21,165,66,52
+.byte	156,156,74,156,148,214,37,8
+.byte	156,156,74,156,148,214,37,8
+.byte	30,30,120,30,240,102,60,238
+.byte	30,30,120,30,240,102,60,238
+.byte	67,67,17,67,34,82,134,97
+.byte	67,67,17,67,34,82,134,97
+.byte	199,199,59,199,118,252,147,177
+.byte	199,199,59,199,118,252,147,177
+.byte	252,252,215,252,179,43,229,79
+.byte	252,252,215,252,179,43,229,79
+.byte	4,4,16,4,32,20,8,36
+.byte	4,4,16,4,32,20,8,36
+.byte	81,81,89,81,178,8,162,227
+.byte	81,81,89,81,178,8,162,227
+.byte	153,153,94,153,188,199,47,37
+.byte	153,153,94,153,188,199,47,37
+.byte	109,109,169,109,79,196,218,34
+.byte	109,109,169,109,79,196,218,34
+.byte	13,13,52,13,104,57,26,101
+.byte	13,13,52,13,104,57,26,101
+.byte	250,250,207,250,131,53,233,121
+.byte	250,250,207,250,131,53,233,121
+.byte	223,223,91,223,182,132,163,105
+.byte	223,223,91,223,182,132,163,105
+.byte	126,126,229,126,215,155,252,169
+.byte	126,126,229,126,215,155,252,169
+.byte	36,36,144,36,61,180,72,25
+.byte	36,36,144,36,61,180,72,25
+.byte	59,59,236,59,197,215,118,254
+.byte	59,59,236,59,197,215,118,254
+.byte	171,171,150,171,49,61,75,154
+.byte	171,171,150,171,49,61,75,154
+.byte	206,206,31,206,62,209,129,240
+.byte	206,206,31,206,62,209,129,240
+.byte	17,17,68,17,136,85,34,153
+.byte	17,17,68,17,136,85,34,153
+.byte	143,143,6,143,12,137,3,131
+.byte	143,143,6,143,12,137,3,131
+.byte	78,78,37,78,74,107,156,4
+.byte	78,78,37,78,74,107,156,4
+.byte	183,183,230,183,209,81,115,102
+.byte	183,183,230,183,209,81,115,102
+.byte	235,235,139,235,11,96,203,224
+.byte	235,235,139,235,11,96,203,224
+.byte	60,60,240,60,253,204,120,193
+.byte	60,60,240,60,253,204,120,193
+.byte	129,129,62,129,124,191,31,253
+.byte	129,129,62,129,124,191,31,253
+.byte	148,148,106,148,212,254,53,64
+.byte	148,148,106,148,212,254,53,64
+.byte	247,247,251,247,235,12,243,28
+.byte	247,247,251,247,235,12,243,28
+.byte	185,185,222,185,161,103,111,24
+.byte	185,185,222,185,161,103,111,24
+.byte	19,19,76,19,152,95,38,139
+.byte	19,19,76,19,152,95,38,139
+.byte	44,44,176,44,125,156,88,81
+.byte	44,44,176,44,125,156,88,81
+.byte	211,211,107,211,214,184,187,5
+.byte	211,211,107,211,214,184,187,5
+.byte	231,231,187,231,107,92,211,140
+.byte	231,231,187,231,107,92,211,140
+.byte	110,110,165,110,87,203,220,57
+.byte	110,110,165,110,87,203,220,57
+.byte	196,196,55,196,110,243,149,170
+.byte	196,196,55,196,110,243,149,170
+.byte	3,3,12,3,24,15,6,27
+.byte	3,3,12,3,24,15,6,27
+.byte	86,86,69,86,138,19,172,220
+.byte	86,86,69,86,138,19,172,220
+.byte	68,68,13,68,26,73,136,94
+.byte	68,68,13,68,26,73,136,94
+.byte	127,127,225,127,223,158,254,160
+.byte	127,127,225,127,223,158,254,160
+.byte	169,169,158,169,33,55,79,136
+.byte	169,169,158,169,33,55,79,136
+.byte	42,42,168,42,77,130,84,103
+.byte	42,42,168,42,77,130,84,103
+.byte	187,187,214,187,177,109,107,10
+.byte	187,187,214,187,177,109,107,10
+.byte	193,193,35,193,70,226,159,135
+.byte	193,193,35,193,70,226,159,135
+.byte	83,83,81,83,162,2,166,241
+.byte	83,83,81,83,162,2,166,241
+.byte	220,220,87,220,174,139,165,114
+.byte	220,220,87,220,174,139,165,114
+.byte	11,11,44,11,88,39,22,83
+.byte	11,11,44,11,88,39,22,83
+.byte	157,157,78,157,156,211,39,1
+.byte	157,157,78,157,156,211,39,1
+.byte	108,108,173,108,71,193,216,43
+.byte	108,108,173,108,71,193,216,43
+.byte	49,49,196,49,149,245,98,164
+.byte	49,49,196,49,149,245,98,164
+.byte	116,116,205,116,135,185,232,243
+.byte	116,116,205,116,135,185,232,243
+.byte	246,246,255,246,227,9,241,21
+.byte	246,246,255,246,227,9,241,21
+.byte	70,70,5,70,10,67,140,76
+.byte	70,70,5,70,10,67,140,76
+.byte	172,172,138,172,9,38,69,165
+.byte	172,172,138,172,9,38,69,165
+.byte	137,137,30,137,60,151,15,181
+.byte	137,137,30,137,60,151,15,181
+.byte	20,20,80,20,160,68,40,180
+.byte	20,20,80,20,160,68,40,180
+.byte	225,225,163,225,91,66,223,186
+.byte	225,225,163,225,91,66,223,186
+.byte	22,22,88,22,176,78,44,166
+.byte	22,22,88,22,176,78,44,166
+.byte	58,58,232,58,205,210,116,247
+.byte	58,58,232,58,205,210,116,247
+.byte	105,105,185,105,111,208,210,6
+.byte	105,105,185,105,111,208,210,6
+.byte	9,9,36,9,72,45,18,65
+.byte	9,9,36,9,72,45,18,65
+.byte	112,112,221,112,167,173,224,215
+.byte	112,112,221,112,167,173,224,215
+.byte	182,182,226,182,217,84,113,111
+.byte	182,182,226,182,217,84,113,111
+.byte	208,208,103,208,206,183,189,30
+.byte	208,208,103,208,206,183,189,30
+.byte	237,237,147,237,59,126,199,214
+.byte	237,237,147,237,59,126,199,214
+.byte	204,204,23,204,46,219,133,226
+.byte	204,204,23,204,46,219,133,226
+.byte	66,66,21,66,42,87,132,104
+.byte	66,66,21,66,42,87,132,104
+.byte	152,152,90,152,180,194,45,44
+.byte	152,152,90,152,180,194,45,44
+.byte	164,164,170,164,73,14,85,237
+.byte	164,164,170,164,73,14,85,237
+.byte	40,40,160,40,93,136,80,117
+.byte	40,40,160,40,93,136,80,117
+.byte	92,92,109,92,218,49,184,134
+.byte	92,92,109,92,218,49,184,134
+.byte	248,248,199,248,147,63,237,107
+.byte	248,248,199,248,147,63,237,107
+.byte	134,134,34,134,68,164,17,194
+.byte	134,134,34,134,68,164,17,194
+.byte	24,35,198,232,135,184,1,79
+.byte	54,166,210,245,121,111,145,82
+.byte	96,188,155,142,163,12,123,53
+.byte	29,224,215,194,46,75,254,87
+.byte	21,119,55,229,159,240,74,218
+.byte	88,201,41,10,177,160,107,133
+.byte	189,93,16,244,203,62,5,103
+.byte	228,39,65,139,167,125,149,216
+.byte	251,238,124,102,221,23,71,158
+.byte	202,45,191,7,173,90,131,51
+.size	whirlpool_block_mmx,.-.L_whirlpool_block_mmx_begin
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/wp-mmx.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/wp-mmx.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/wp-mmx.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/wp-mmx.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,1106 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/wp-mmx.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"wp-mmx.s"
-.text
-.globl	whirlpool_block_mmx
-.type	whirlpool_block_mmx, at function
-.align	16
-whirlpool_block_mmx:
-.L_whirlpool_block_mmx_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	20(%esp),%esi
-	movl	24(%esp),%edi
-	movl	28(%esp),%ebp
-	movl	%esp,%eax
-	subl	$148,%esp
-	andl	$-64,%esp
-	leal	128(%esp),%ebx
-	movl	%esi,(%ebx)
-	movl	%edi,4(%ebx)
-	movl	%ebp,8(%ebx)
-	movl	%eax,16(%ebx)
-	call	.L000pic_point
-.L000pic_point:
-	popl	%ebp
-	leal	.L001table-.L000pic_point(%ebp),%ebp
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	movq	(%esi),%mm0
-	movq	8(%esi),%mm1
-	movq	16(%esi),%mm2
-	movq	24(%esi),%mm3
-	movq	32(%esi),%mm4
-	movq	40(%esi),%mm5
-	movq	48(%esi),%mm6
-	movq	56(%esi),%mm7
-.L002outerloop:
-	movq	%mm0,(%esp)
-	movq	%mm1,8(%esp)
-	movq	%mm2,16(%esp)
-	movq	%mm3,24(%esp)
-	movq	%mm4,32(%esp)
-	movq	%mm5,40(%esp)
-	movq	%mm6,48(%esp)
-	movq	%mm7,56(%esp)
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm1
-	pxor	16(%edi),%mm2
-	pxor	24(%edi),%mm3
-	pxor	32(%edi),%mm4
-	pxor	40(%edi),%mm5
-	pxor	48(%edi),%mm6
-	pxor	56(%edi),%mm7
-	movq	%mm0,64(%esp)
-	movq	%mm1,72(%esp)
-	movq	%mm2,80(%esp)
-	movq	%mm3,88(%esp)
-	movq	%mm4,96(%esp)
-	movq	%mm5,104(%esp)
-	movq	%mm6,112(%esp)
-	movq	%mm7,120(%esp)
-	xorl	%esi,%esi
-	movl	%esi,12(%ebx)
-.align	16
-.L003round:
-	movq	4096(%ebp,%esi,8),%mm0
-	movl	(%esp),%eax
-	movl	4(%esp),%ebx
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm0
-	movq	7(%ebp,%edi,8),%mm1
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	8(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	movq	6(%ebp,%esi,8),%mm2
-	movq	5(%ebp,%edi,8),%mm3
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	movq	4(%ebp,%esi,8),%mm4
-	movq	3(%ebp,%edi,8),%mm5
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	12(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	movq	2(%ebp,%esi,8),%mm6
-	movq	1(%ebp,%edi,8),%mm7
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm1
-	pxor	7(%ebp,%edi,8),%mm2
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	16(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm3
-	pxor	5(%ebp,%edi,8),%mm4
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm5
-	pxor	3(%ebp,%edi,8),%mm6
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	20(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm7
-	pxor	1(%ebp,%edi,8),%mm0
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm2
-	pxor	7(%ebp,%edi,8),%mm3
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	24(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm4
-	pxor	5(%ebp,%edi,8),%mm5
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm6
-	pxor	3(%ebp,%edi,8),%mm7
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	28(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm0
-	pxor	1(%ebp,%edi,8),%mm1
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm3
-	pxor	7(%ebp,%edi,8),%mm4
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	32(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm5
-	pxor	5(%ebp,%edi,8),%mm6
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm7
-	pxor	3(%ebp,%edi,8),%mm0
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	36(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm1
-	pxor	1(%ebp,%edi,8),%mm2
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm4
-	pxor	7(%ebp,%edi,8),%mm5
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	40(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm6
-	pxor	5(%ebp,%edi,8),%mm7
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm0
-	pxor	3(%ebp,%edi,8),%mm1
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	44(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm2
-	pxor	1(%ebp,%edi,8),%mm3
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm5
-	pxor	7(%ebp,%edi,8),%mm6
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	48(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm7
-	pxor	5(%ebp,%edi,8),%mm0
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm1
-	pxor	3(%ebp,%edi,8),%mm2
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	52(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm3
-	pxor	1(%ebp,%edi,8),%mm4
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm6
-	pxor	7(%ebp,%edi,8),%mm7
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	56(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm0
-	pxor	5(%ebp,%edi,8),%mm1
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm2
-	pxor	3(%ebp,%edi,8),%mm3
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	60(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm4
-	pxor	1(%ebp,%edi,8),%mm5
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm7
-	pxor	7(%ebp,%edi,8),%mm0
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	64(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm1
-	pxor	5(%ebp,%edi,8),%mm2
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm3
-	pxor	3(%ebp,%edi,8),%mm4
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	68(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm5
-	pxor	1(%ebp,%edi,8),%mm6
-	movq	%mm0,(%esp)
-	movq	%mm1,8(%esp)
-	movq	%mm2,16(%esp)
-	movq	%mm3,24(%esp)
-	movq	%mm4,32(%esp)
-	movq	%mm5,40(%esp)
-	movq	%mm6,48(%esp)
-	movq	%mm7,56(%esp)
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm0
-	pxor	7(%ebp,%edi,8),%mm1
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	72(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm2
-	pxor	5(%ebp,%edi,8),%mm3
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm4
-	pxor	3(%ebp,%edi,8),%mm5
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	76(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm6
-	pxor	1(%ebp,%edi,8),%mm7
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm1
-	pxor	7(%ebp,%edi,8),%mm2
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	80(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm3
-	pxor	5(%ebp,%edi,8),%mm4
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm5
-	pxor	3(%ebp,%edi,8),%mm6
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	84(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm7
-	pxor	1(%ebp,%edi,8),%mm0
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm2
-	pxor	7(%ebp,%edi,8),%mm3
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	88(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm4
-	pxor	5(%ebp,%edi,8),%mm5
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm6
-	pxor	3(%ebp,%edi,8),%mm7
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	92(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm0
-	pxor	1(%ebp,%edi,8),%mm1
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm3
-	pxor	7(%ebp,%edi,8),%mm4
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	96(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm5
-	pxor	5(%ebp,%edi,8),%mm6
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm7
-	pxor	3(%ebp,%edi,8),%mm0
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	100(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm1
-	pxor	1(%ebp,%edi,8),%mm2
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm4
-	pxor	7(%ebp,%edi,8),%mm5
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	104(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm6
-	pxor	5(%ebp,%edi,8),%mm7
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm0
-	pxor	3(%ebp,%edi,8),%mm1
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	108(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm2
-	pxor	1(%ebp,%edi,8),%mm3
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm5
-	pxor	7(%ebp,%edi,8),%mm6
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	112(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm7
-	pxor	5(%ebp,%edi,8),%mm0
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm1
-	pxor	3(%ebp,%edi,8),%mm2
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	116(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm3
-	pxor	1(%ebp,%edi,8),%mm4
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm6
-	pxor	7(%ebp,%edi,8),%mm7
-	movb	%al,%cl
-	movb	%ah,%dl
-	movl	120(%esp),%eax
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm0
-	pxor	5(%ebp,%edi,8),%mm1
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm2
-	pxor	3(%ebp,%edi,8),%mm3
-	movb	%bl,%cl
-	movb	%bh,%dl
-	movl	124(%esp),%ebx
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm4
-	pxor	1(%ebp,%edi,8),%mm5
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%eax
-	pxor	(%ebp,%esi,8),%mm7
-	pxor	7(%ebp,%edi,8),%mm0
-	movb	%al,%cl
-	movb	%ah,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	6(%ebp,%esi,8),%mm1
-	pxor	5(%ebp,%edi,8),%mm2
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	shrl	$16,%ebx
-	pxor	4(%ebp,%esi,8),%mm3
-	pxor	3(%ebp,%edi,8),%mm4
-	movb	%bl,%cl
-	movb	%bh,%dl
-	leal	(%ecx,%ecx,1),%esi
-	leal	(%edx,%edx,1),%edi
-	pxor	2(%ebp,%esi,8),%mm5
-	pxor	1(%ebp,%edi,8),%mm6
-	leal	128(%esp),%ebx
-	movl	12(%ebx),%esi
-	addl	$1,%esi
-	cmpl	$10,%esi
-	je	.L004roundsdone
-	movl	%esi,12(%ebx)
-	movq	%mm0,64(%esp)
-	movq	%mm1,72(%esp)
-	movq	%mm2,80(%esp)
-	movq	%mm3,88(%esp)
-	movq	%mm4,96(%esp)
-	movq	%mm5,104(%esp)
-	movq	%mm6,112(%esp)
-	movq	%mm7,120(%esp)
-	jmp	.L003round
-.align	16
-.L004roundsdone:
-	movl	(%ebx),%esi
-	movl	4(%ebx),%edi
-	movl	8(%ebx),%eax
-	pxor	(%edi),%mm0
-	pxor	8(%edi),%mm1
-	pxor	16(%edi),%mm2
-	pxor	24(%edi),%mm3
-	pxor	32(%edi),%mm4
-	pxor	40(%edi),%mm5
-	pxor	48(%edi),%mm6
-	pxor	56(%edi),%mm7
-	pxor	(%esi),%mm0
-	pxor	8(%esi),%mm1
-	pxor	16(%esi),%mm2
-	pxor	24(%esi),%mm3
-	pxor	32(%esi),%mm4
-	pxor	40(%esi),%mm5
-	pxor	48(%esi),%mm6
-	pxor	56(%esi),%mm7
-	movq	%mm0,(%esi)
-	movq	%mm1,8(%esi)
-	movq	%mm2,16(%esi)
-	movq	%mm3,24(%esi)
-	movq	%mm4,32(%esi)
-	movq	%mm5,40(%esi)
-	movq	%mm6,48(%esi)
-	movq	%mm7,56(%esi)
-	leal	64(%edi),%edi
-	subl	$1,%eax
-	jz	.L005alldone
-	movl	%edi,4(%ebx)
-	movl	%eax,8(%ebx)
-	jmp	.L002outerloop
-.L005alldone:
-	emms
-	movl	16(%ebx),%esp
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.align	64
-.L001table:
-.byte	24,24,96,24,192,120,48,216
-.byte	24,24,96,24,192,120,48,216
-.byte	35,35,140,35,5,175,70,38
-.byte	35,35,140,35,5,175,70,38
-.byte	198,198,63,198,126,249,145,184
-.byte	198,198,63,198,126,249,145,184
-.byte	232,232,135,232,19,111,205,251
-.byte	232,232,135,232,19,111,205,251
-.byte	135,135,38,135,76,161,19,203
-.byte	135,135,38,135,76,161,19,203
-.byte	184,184,218,184,169,98,109,17
-.byte	184,184,218,184,169,98,109,17
-.byte	1,1,4,1,8,5,2,9
-.byte	1,1,4,1,8,5,2,9
-.byte	79,79,33,79,66,110,158,13
-.byte	79,79,33,79,66,110,158,13
-.byte	54,54,216,54,173,238,108,155
-.byte	54,54,216,54,173,238,108,155
-.byte	166,166,162,166,89,4,81,255
-.byte	166,166,162,166,89,4,81,255
-.byte	210,210,111,210,222,189,185,12
-.byte	210,210,111,210,222,189,185,12
-.byte	245,245,243,245,251,6,247,14
-.byte	245,245,243,245,251,6,247,14
-.byte	121,121,249,121,239,128,242,150
-.byte	121,121,249,121,239,128,242,150
-.byte	111,111,161,111,95,206,222,48
-.byte	111,111,161,111,95,206,222,48
-.byte	145,145,126,145,252,239,63,109
-.byte	145,145,126,145,252,239,63,109
-.byte	82,82,85,82,170,7,164,248
-.byte	82,82,85,82,170,7,164,248
-.byte	96,96,157,96,39,253,192,71
-.byte	96,96,157,96,39,253,192,71
-.byte	188,188,202,188,137,118,101,53
-.byte	188,188,202,188,137,118,101,53
-.byte	155,155,86,155,172,205,43,55
-.byte	155,155,86,155,172,205,43,55
-.byte	142,142,2,142,4,140,1,138
-.byte	142,142,2,142,4,140,1,138
-.byte	163,163,182,163,113,21,91,210
-.byte	163,163,182,163,113,21,91,210
-.byte	12,12,48,12,96,60,24,108
-.byte	12,12,48,12,96,60,24,108
-.byte	123,123,241,123,255,138,246,132
-.byte	123,123,241,123,255,138,246,132
-.byte	53,53,212,53,181,225,106,128
-.byte	53,53,212,53,181,225,106,128
-.byte	29,29,116,29,232,105,58,245
-.byte	29,29,116,29,232,105,58,245
-.byte	224,224,167,224,83,71,221,179
-.byte	224,224,167,224,83,71,221,179
-.byte	215,215,123,215,246,172,179,33
-.byte	215,215,123,215,246,172,179,33
-.byte	194,194,47,194,94,237,153,156
-.byte	194,194,47,194,94,237,153,156
-.byte	46,46,184,46,109,150,92,67
-.byte	46,46,184,46,109,150,92,67
-.byte	75,75,49,75,98,122,150,41
-.byte	75,75,49,75,98,122,150,41
-.byte	254,254,223,254,163,33,225,93
-.byte	254,254,223,254,163,33,225,93
-.byte	87,87,65,87,130,22,174,213
-.byte	87,87,65,87,130,22,174,213
-.byte	21,21,84,21,168,65,42,189
-.byte	21,21,84,21,168,65,42,189
-.byte	119,119,193,119,159,182,238,232
-.byte	119,119,193,119,159,182,238,232
-.byte	55,55,220,55,165,235,110,146
-.byte	55,55,220,55,165,235,110,146
-.byte	229,229,179,229,123,86,215,158
-.byte	229,229,179,229,123,86,215,158
-.byte	159,159,70,159,140,217,35,19
-.byte	159,159,70,159,140,217,35,19
-.byte	240,240,231,240,211,23,253,35
-.byte	240,240,231,240,211,23,253,35
-.byte	74,74,53,74,106,127,148,32
-.byte	74,74,53,74,106,127,148,32
-.byte	218,218,79,218,158,149,169,68
-.byte	218,218,79,218,158,149,169,68
-.byte	88,88,125,88,250,37,176,162
-.byte	88,88,125,88,250,37,176,162
-.byte	201,201,3,201,6,202,143,207
-.byte	201,201,3,201,6,202,143,207
-.byte	41,41,164,41,85,141,82,124
-.byte	41,41,164,41,85,141,82,124
-.byte	10,10,40,10,80,34,20,90
-.byte	10,10,40,10,80,34,20,90
-.byte	177,177,254,177,225,79,127,80
-.byte	177,177,254,177,225,79,127,80
-.byte	160,160,186,160,105,26,93,201
-.byte	160,160,186,160,105,26,93,201
-.byte	107,107,177,107,127,218,214,20
-.byte	107,107,177,107,127,218,214,20
-.byte	133,133,46,133,92,171,23,217
-.byte	133,133,46,133,92,171,23,217
-.byte	189,189,206,189,129,115,103,60
-.byte	189,189,206,189,129,115,103,60
-.byte	93,93,105,93,210,52,186,143
-.byte	93,93,105,93,210,52,186,143
-.byte	16,16,64,16,128,80,32,144
-.byte	16,16,64,16,128,80,32,144
-.byte	244,244,247,244,243,3,245,7
-.byte	244,244,247,244,243,3,245,7
-.byte	203,203,11,203,22,192,139,221
-.byte	203,203,11,203,22,192,139,221
-.byte	62,62,248,62,237,198,124,211
-.byte	62,62,248,62,237,198,124,211
-.byte	5,5,20,5,40,17,10,45
-.byte	5,5,20,5,40,17,10,45
-.byte	103,103,129,103,31,230,206,120
-.byte	103,103,129,103,31,230,206,120
-.byte	228,228,183,228,115,83,213,151
-.byte	228,228,183,228,115,83,213,151
-.byte	39,39,156,39,37,187,78,2
-.byte	39,39,156,39,37,187,78,2
-.byte	65,65,25,65,50,88,130,115
-.byte	65,65,25,65,50,88,130,115
-.byte	139,139,22,139,44,157,11,167
-.byte	139,139,22,139,44,157,11,167
-.byte	167,167,166,167,81,1,83,246
-.byte	167,167,166,167,81,1,83,246
-.byte	125,125,233,125,207,148,250,178
-.byte	125,125,233,125,207,148,250,178
-.byte	149,149,110,149,220,251,55,73
-.byte	149,149,110,149,220,251,55,73
-.byte	216,216,71,216,142,159,173,86
-.byte	216,216,71,216,142,159,173,86
-.byte	251,251,203,251,139,48,235,112
-.byte	251,251,203,251,139,48,235,112
-.byte	238,238,159,238,35,113,193,205
-.byte	238,238,159,238,35,113,193,205
-.byte	124,124,237,124,199,145,248,187
-.byte	124,124,237,124,199,145,248,187
-.byte	102,102,133,102,23,227,204,113
-.byte	102,102,133,102,23,227,204,113
-.byte	221,221,83,221,166,142,167,123
-.byte	221,221,83,221,166,142,167,123
-.byte	23,23,92,23,184,75,46,175
-.byte	23,23,92,23,184,75,46,175
-.byte	71,71,1,71,2,70,142,69
-.byte	71,71,1,71,2,70,142,69
-.byte	158,158,66,158,132,220,33,26
-.byte	158,158,66,158,132,220,33,26
-.byte	202,202,15,202,30,197,137,212
-.byte	202,202,15,202,30,197,137,212
-.byte	45,45,180,45,117,153,90,88
-.byte	45,45,180,45,117,153,90,88
-.byte	191,191,198,191,145,121,99,46
-.byte	191,191,198,191,145,121,99,46
-.byte	7,7,28,7,56,27,14,63
-.byte	7,7,28,7,56,27,14,63
-.byte	173,173,142,173,1,35,71,172
-.byte	173,173,142,173,1,35,71,172
-.byte	90,90,117,90,234,47,180,176
-.byte	90,90,117,90,234,47,180,176
-.byte	131,131,54,131,108,181,27,239
-.byte	131,131,54,131,108,181,27,239
-.byte	51,51,204,51,133,255,102,182
-.byte	51,51,204,51,133,255,102,182
-.byte	99,99,145,99,63,242,198,92
-.byte	99,99,145,99,63,242,198,92
-.byte	2,2,8,2,16,10,4,18
-.byte	2,2,8,2,16,10,4,18
-.byte	170,170,146,170,57,56,73,147
-.byte	170,170,146,170,57,56,73,147
-.byte	113,113,217,113,175,168,226,222
-.byte	113,113,217,113,175,168,226,222
-.byte	200,200,7,200,14,207,141,198
-.byte	200,200,7,200,14,207,141,198
-.byte	25,25,100,25,200,125,50,209
-.byte	25,25,100,25,200,125,50,209
-.byte	73,73,57,73,114,112,146,59
-.byte	73,73,57,73,114,112,146,59
-.byte	217,217,67,217,134,154,175,95
-.byte	217,217,67,217,134,154,175,95
-.byte	242,242,239,242,195,29,249,49
-.byte	242,242,239,242,195,29,249,49
-.byte	227,227,171,227,75,72,219,168
-.byte	227,227,171,227,75,72,219,168
-.byte	91,91,113,91,226,42,182,185
-.byte	91,91,113,91,226,42,182,185
-.byte	136,136,26,136,52,146,13,188
-.byte	136,136,26,136,52,146,13,188
-.byte	154,154,82,154,164,200,41,62
-.byte	154,154,82,154,164,200,41,62
-.byte	38,38,152,38,45,190,76,11
-.byte	38,38,152,38,45,190,76,11
-.byte	50,50,200,50,141,250,100,191
-.byte	50,50,200,50,141,250,100,191
-.byte	176,176,250,176,233,74,125,89
-.byte	176,176,250,176,233,74,125,89
-.byte	233,233,131,233,27,106,207,242
-.byte	233,233,131,233,27,106,207,242
-.byte	15,15,60,15,120,51,30,119
-.byte	15,15,60,15,120,51,30,119
-.byte	213,213,115,213,230,166,183,51
-.byte	213,213,115,213,230,166,183,51
-.byte	128,128,58,128,116,186,29,244
-.byte	128,128,58,128,116,186,29,244
-.byte	190,190,194,190,153,124,97,39
-.byte	190,190,194,190,153,124,97,39
-.byte	205,205,19,205,38,222,135,235
-.byte	205,205,19,205,38,222,135,235
-.byte	52,52,208,52,189,228,104,137
-.byte	52,52,208,52,189,228,104,137
-.byte	72,72,61,72,122,117,144,50
-.byte	72,72,61,72,122,117,144,50
-.byte	255,255,219,255,171,36,227,84
-.byte	255,255,219,255,171,36,227,84
-.byte	122,122,245,122,247,143,244,141
-.byte	122,122,245,122,247,143,244,141
-.byte	144,144,122,144,244,234,61,100
-.byte	144,144,122,144,244,234,61,100
-.byte	95,95,97,95,194,62,190,157
-.byte	95,95,97,95,194,62,190,157
-.byte	32,32,128,32,29,160,64,61
-.byte	32,32,128,32,29,160,64,61
-.byte	104,104,189,104,103,213,208,15
-.byte	104,104,189,104,103,213,208,15
-.byte	26,26,104,26,208,114,52,202
-.byte	26,26,104,26,208,114,52,202
-.byte	174,174,130,174,25,44,65,183
-.byte	174,174,130,174,25,44,65,183
-.byte	180,180,234,180,201,94,117,125
-.byte	180,180,234,180,201,94,117,125
-.byte	84,84,77,84,154,25,168,206
-.byte	84,84,77,84,154,25,168,206
-.byte	147,147,118,147,236,229,59,127
-.byte	147,147,118,147,236,229,59,127
-.byte	34,34,136,34,13,170,68,47
-.byte	34,34,136,34,13,170,68,47
-.byte	100,100,141,100,7,233,200,99
-.byte	100,100,141,100,7,233,200,99
-.byte	241,241,227,241,219,18,255,42
-.byte	241,241,227,241,219,18,255,42
-.byte	115,115,209,115,191,162,230,204
-.byte	115,115,209,115,191,162,230,204
-.byte	18,18,72,18,144,90,36,130
-.byte	18,18,72,18,144,90,36,130
-.byte	64,64,29,64,58,93,128,122
-.byte	64,64,29,64,58,93,128,122
-.byte	8,8,32,8,64,40,16,72
-.byte	8,8,32,8,64,40,16,72
-.byte	195,195,43,195,86,232,155,149
-.byte	195,195,43,195,86,232,155,149
-.byte	236,236,151,236,51,123,197,223
-.byte	236,236,151,236,51,123,197,223
-.byte	219,219,75,219,150,144,171,77
-.byte	219,219,75,219,150,144,171,77
-.byte	161,161,190,161,97,31,95,192
-.byte	161,161,190,161,97,31,95,192
-.byte	141,141,14,141,28,131,7,145
-.byte	141,141,14,141,28,131,7,145
-.byte	61,61,244,61,245,201,122,200
-.byte	61,61,244,61,245,201,122,200
-.byte	151,151,102,151,204,241,51,91
-.byte	151,151,102,151,204,241,51,91
-.byte	0,0,0,0,0,0,0,0
-.byte	0,0,0,0,0,0,0,0
-.byte	207,207,27,207,54,212,131,249
-.byte	207,207,27,207,54,212,131,249
-.byte	43,43,172,43,69,135,86,110
-.byte	43,43,172,43,69,135,86,110
-.byte	118,118,197,118,151,179,236,225
-.byte	118,118,197,118,151,179,236,225
-.byte	130,130,50,130,100,176,25,230
-.byte	130,130,50,130,100,176,25,230
-.byte	214,214,127,214,254,169,177,40
-.byte	214,214,127,214,254,169,177,40
-.byte	27,27,108,27,216,119,54,195
-.byte	27,27,108,27,216,119,54,195
-.byte	181,181,238,181,193,91,119,116
-.byte	181,181,238,181,193,91,119,116
-.byte	175,175,134,175,17,41,67,190
-.byte	175,175,134,175,17,41,67,190
-.byte	106,106,181,106,119,223,212,29
-.byte	106,106,181,106,119,223,212,29
-.byte	80,80,93,80,186,13,160,234
-.byte	80,80,93,80,186,13,160,234
-.byte	69,69,9,69,18,76,138,87
-.byte	69,69,9,69,18,76,138,87
-.byte	243,243,235,243,203,24,251,56
-.byte	243,243,235,243,203,24,251,56
-.byte	48,48,192,48,157,240,96,173
-.byte	48,48,192,48,157,240,96,173
-.byte	239,239,155,239,43,116,195,196
-.byte	239,239,155,239,43,116,195,196
-.byte	63,63,252,63,229,195,126,218
-.byte	63,63,252,63,229,195,126,218
-.byte	85,85,73,85,146,28,170,199
-.byte	85,85,73,85,146,28,170,199
-.byte	162,162,178,162,121,16,89,219
-.byte	162,162,178,162,121,16,89,219
-.byte	234,234,143,234,3,101,201,233
-.byte	234,234,143,234,3,101,201,233
-.byte	101,101,137,101,15,236,202,106
-.byte	101,101,137,101,15,236,202,106
-.byte	186,186,210,186,185,104,105,3
-.byte	186,186,210,186,185,104,105,3
-.byte	47,47,188,47,101,147,94,74
-.byte	47,47,188,47,101,147,94,74
-.byte	192,192,39,192,78,231,157,142
-.byte	192,192,39,192,78,231,157,142
-.byte	222,222,95,222,190,129,161,96
-.byte	222,222,95,222,190,129,161,96
-.byte	28,28,112,28,224,108,56,252
-.byte	28,28,112,28,224,108,56,252
-.byte	253,253,211,253,187,46,231,70
-.byte	253,253,211,253,187,46,231,70
-.byte	77,77,41,77,82,100,154,31
-.byte	77,77,41,77,82,100,154,31
-.byte	146,146,114,146,228,224,57,118
-.byte	146,146,114,146,228,224,57,118
-.byte	117,117,201,117,143,188,234,250
-.byte	117,117,201,117,143,188,234,250
-.byte	6,6,24,6,48,30,12,54
-.byte	6,6,24,6,48,30,12,54
-.byte	138,138,18,138,36,152,9,174
-.byte	138,138,18,138,36,152,9,174
-.byte	178,178,242,178,249,64,121,75
-.byte	178,178,242,178,249,64,121,75
-.byte	230,230,191,230,99,89,209,133
-.byte	230,230,191,230,99,89,209,133
-.byte	14,14,56,14,112,54,28,126
-.byte	14,14,56,14,112,54,28,126
-.byte	31,31,124,31,248,99,62,231
-.byte	31,31,124,31,248,99,62,231
-.byte	98,98,149,98,55,247,196,85
-.byte	98,98,149,98,55,247,196,85
-.byte	212,212,119,212,238,163,181,58
-.byte	212,212,119,212,238,163,181,58
-.byte	168,168,154,168,41,50,77,129
-.byte	168,168,154,168,41,50,77,129
-.byte	150,150,98,150,196,244,49,82
-.byte	150,150,98,150,196,244,49,82
-.byte	249,249,195,249,155,58,239,98
-.byte	249,249,195,249,155,58,239,98
-.byte	197,197,51,197,102,246,151,163
-.byte	197,197,51,197,102,246,151,163
-.byte	37,37,148,37,53,177,74,16
-.byte	37,37,148,37,53,177,74,16
-.byte	89,89,121,89,242,32,178,171
-.byte	89,89,121,89,242,32,178,171
-.byte	132,132,42,132,84,174,21,208
-.byte	132,132,42,132,84,174,21,208
-.byte	114,114,213,114,183,167,228,197
-.byte	114,114,213,114,183,167,228,197
-.byte	57,57,228,57,213,221,114,236
-.byte	57,57,228,57,213,221,114,236
-.byte	76,76,45,76,90,97,152,22
-.byte	76,76,45,76,90,97,152,22
-.byte	94,94,101,94,202,59,188,148
-.byte	94,94,101,94,202,59,188,148
-.byte	120,120,253,120,231,133,240,159
-.byte	120,120,253,120,231,133,240,159
-.byte	56,56,224,56,221,216,112,229
-.byte	56,56,224,56,221,216,112,229
-.byte	140,140,10,140,20,134,5,152
-.byte	140,140,10,140,20,134,5,152
-.byte	209,209,99,209,198,178,191,23
-.byte	209,209,99,209,198,178,191,23
-.byte	165,165,174,165,65,11,87,228
-.byte	165,165,174,165,65,11,87,228
-.byte	226,226,175,226,67,77,217,161
-.byte	226,226,175,226,67,77,217,161
-.byte	97,97,153,97,47,248,194,78
-.byte	97,97,153,97,47,248,194,78
-.byte	179,179,246,179,241,69,123,66
-.byte	179,179,246,179,241,69,123,66
-.byte	33,33,132,33,21,165,66,52
-.byte	33,33,132,33,21,165,66,52
-.byte	156,156,74,156,148,214,37,8
-.byte	156,156,74,156,148,214,37,8
-.byte	30,30,120,30,240,102,60,238
-.byte	30,30,120,30,240,102,60,238
-.byte	67,67,17,67,34,82,134,97
-.byte	67,67,17,67,34,82,134,97
-.byte	199,199,59,199,118,252,147,177
-.byte	199,199,59,199,118,252,147,177
-.byte	252,252,215,252,179,43,229,79
-.byte	252,252,215,252,179,43,229,79
-.byte	4,4,16,4,32,20,8,36
-.byte	4,4,16,4,32,20,8,36
-.byte	81,81,89,81,178,8,162,227
-.byte	81,81,89,81,178,8,162,227
-.byte	153,153,94,153,188,199,47,37
-.byte	153,153,94,153,188,199,47,37
-.byte	109,109,169,109,79,196,218,34
-.byte	109,109,169,109,79,196,218,34
-.byte	13,13,52,13,104,57,26,101
-.byte	13,13,52,13,104,57,26,101
-.byte	250,250,207,250,131,53,233,121
-.byte	250,250,207,250,131,53,233,121
-.byte	223,223,91,223,182,132,163,105
-.byte	223,223,91,223,182,132,163,105
-.byte	126,126,229,126,215,155,252,169
-.byte	126,126,229,126,215,155,252,169
-.byte	36,36,144,36,61,180,72,25
-.byte	36,36,144,36,61,180,72,25
-.byte	59,59,236,59,197,215,118,254
-.byte	59,59,236,59,197,215,118,254
-.byte	171,171,150,171,49,61,75,154
-.byte	171,171,150,171,49,61,75,154
-.byte	206,206,31,206,62,209,129,240
-.byte	206,206,31,206,62,209,129,240
-.byte	17,17,68,17,136,85,34,153
-.byte	17,17,68,17,136,85,34,153
-.byte	143,143,6,143,12,137,3,131
-.byte	143,143,6,143,12,137,3,131
-.byte	78,78,37,78,74,107,156,4
-.byte	78,78,37,78,74,107,156,4
-.byte	183,183,230,183,209,81,115,102
-.byte	183,183,230,183,209,81,115,102
-.byte	235,235,139,235,11,96,203,224
-.byte	235,235,139,235,11,96,203,224
-.byte	60,60,240,60,253,204,120,193
-.byte	60,60,240,60,253,204,120,193
-.byte	129,129,62,129,124,191,31,253
-.byte	129,129,62,129,124,191,31,253
-.byte	148,148,106,148,212,254,53,64
-.byte	148,148,106,148,212,254,53,64
-.byte	247,247,251,247,235,12,243,28
-.byte	247,247,251,247,235,12,243,28
-.byte	185,185,222,185,161,103,111,24
-.byte	185,185,222,185,161,103,111,24
-.byte	19,19,76,19,152,95,38,139
-.byte	19,19,76,19,152,95,38,139
-.byte	44,44,176,44,125,156,88,81
-.byte	44,44,176,44,125,156,88,81
-.byte	211,211,107,211,214,184,187,5
-.byte	211,211,107,211,214,184,187,5
-.byte	231,231,187,231,107,92,211,140
-.byte	231,231,187,231,107,92,211,140
-.byte	110,110,165,110,87,203,220,57
-.byte	110,110,165,110,87,203,220,57
-.byte	196,196,55,196,110,243,149,170
-.byte	196,196,55,196,110,243,149,170
-.byte	3,3,12,3,24,15,6,27
-.byte	3,3,12,3,24,15,6,27
-.byte	86,86,69,86,138,19,172,220
-.byte	86,86,69,86,138,19,172,220
-.byte	68,68,13,68,26,73,136,94
-.byte	68,68,13,68,26,73,136,94
-.byte	127,127,225,127,223,158,254,160
-.byte	127,127,225,127,223,158,254,160
-.byte	169,169,158,169,33,55,79,136
-.byte	169,169,158,169,33,55,79,136
-.byte	42,42,168,42,77,130,84,103
-.byte	42,42,168,42,77,130,84,103
-.byte	187,187,214,187,177,109,107,10
-.byte	187,187,214,187,177,109,107,10
-.byte	193,193,35,193,70,226,159,135
-.byte	193,193,35,193,70,226,159,135
-.byte	83,83,81,83,162,2,166,241
-.byte	83,83,81,83,162,2,166,241
-.byte	220,220,87,220,174,139,165,114
-.byte	220,220,87,220,174,139,165,114
-.byte	11,11,44,11,88,39,22,83
-.byte	11,11,44,11,88,39,22,83
-.byte	157,157,78,157,156,211,39,1
-.byte	157,157,78,157,156,211,39,1
-.byte	108,108,173,108,71,193,216,43
-.byte	108,108,173,108,71,193,216,43
-.byte	49,49,196,49,149,245,98,164
-.byte	49,49,196,49,149,245,98,164
-.byte	116,116,205,116,135,185,232,243
-.byte	116,116,205,116,135,185,232,243
-.byte	246,246,255,246,227,9,241,21
-.byte	246,246,255,246,227,9,241,21
-.byte	70,70,5,70,10,67,140,76
-.byte	70,70,5,70,10,67,140,76
-.byte	172,172,138,172,9,38,69,165
-.byte	172,172,138,172,9,38,69,165
-.byte	137,137,30,137,60,151,15,181
-.byte	137,137,30,137,60,151,15,181
-.byte	20,20,80,20,160,68,40,180
-.byte	20,20,80,20,160,68,40,180
-.byte	225,225,163,225,91,66,223,186
-.byte	225,225,163,225,91,66,223,186
-.byte	22,22,88,22,176,78,44,166
-.byte	22,22,88,22,176,78,44,166
-.byte	58,58,232,58,205,210,116,247
-.byte	58,58,232,58,205,210,116,247
-.byte	105,105,185,105,111,208,210,6
-.byte	105,105,185,105,111,208,210,6
-.byte	9,9,36,9,72,45,18,65
-.byte	9,9,36,9,72,45,18,65
-.byte	112,112,221,112,167,173,224,215
-.byte	112,112,221,112,167,173,224,215
-.byte	182,182,226,182,217,84,113,111
-.byte	182,182,226,182,217,84,113,111
-.byte	208,208,103,208,206,183,189,30
-.byte	208,208,103,208,206,183,189,30
-.byte	237,237,147,237,59,126,199,214
-.byte	237,237,147,237,59,126,199,214
-.byte	204,204,23,204,46,219,133,226
-.byte	204,204,23,204,46,219,133,226
-.byte	66,66,21,66,42,87,132,104
-.byte	66,66,21,66,42,87,132,104
-.byte	152,152,90,152,180,194,45,44
-.byte	152,152,90,152,180,194,45,44
-.byte	164,164,170,164,73,14,85,237
-.byte	164,164,170,164,73,14,85,237
-.byte	40,40,160,40,93,136,80,117
-.byte	40,40,160,40,93,136,80,117
-.byte	92,92,109,92,218,49,184,134
-.byte	92,92,109,92,218,49,184,134
-.byte	248,248,199,248,147,63,237,107
-.byte	248,248,199,248,147,63,237,107
-.byte	134,134,34,134,68,164,17,194
-.byte	134,134,34,134,68,164,17,194
-.byte	24,35,198,232,135,184,1,79
-.byte	54,166,210,245,121,111,145,82
-.byte	96,188,155,142,163,12,123,53
-.byte	29,224,215,194,46,75,254,87
-.byte	21,119,55,229,159,240,74,218
-.byte	88,201,41,10,177,160,107,133
-.byte	189,93,16,244,203,62,5,103
-.byte	228,39,65,139,167,125,149,216
-.byte	251,238,124,102,221,23,71,158
-.byte	202,45,191,7,173,90,131,51
-.size	whirlpool_block_mmx,.-.L_whirlpool_block_mmx_begin

Added: trunk/secure/lib/libcrypto/i386/x86-gf2m.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86-gf2m.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/x86-gf2m.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,696 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86-gf2m.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from x86-gf2m.pl.
+#ifdef PIC
+.file	"x86-gf2m.S"
+.text
+.type	_mul_1x1_mmx, at function
+.align	16
+_mul_1x1_mmx:
+	subl	$36,%esp
+	movl	%eax,%ecx
+	leal	(%eax,%eax,1),%edx
+	andl	$1073741823,%ecx
+	leal	(%edx,%edx,1),%ebp
+	movl	$0,(%esp)
+	andl	$2147483647,%edx
+	movd	%eax,%mm2
+	movd	%ebx,%mm3
+	movl	%ecx,4(%esp)
+	xorl	%edx,%ecx
+	pxor	%mm5,%mm5
+	pxor	%mm4,%mm4
+	movl	%edx,8(%esp)
+	xorl	%ebp,%edx
+	movl	%ecx,12(%esp)
+	pcmpgtd	%mm2,%mm5
+	paddd	%mm2,%mm2
+	xorl	%edx,%ecx
+	movl	%ebp,16(%esp)
+	xorl	%edx,%ebp
+	pand	%mm3,%mm5
+	pcmpgtd	%mm2,%mm4
+	movl	%ecx,20(%esp)
+	xorl	%ecx,%ebp
+	psllq	$31,%mm5
+	pand	%mm3,%mm4
+	movl	%edx,24(%esp)
+	movl	$7,%esi
+	movl	%ebp,28(%esp)
+	movl	%esi,%ebp
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movl	%ebp,%edi
+	psllq	$30,%mm4
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	movd	(%esp,%esi,4),%mm0
+	movl	%ebp,%esi
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$3,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$6,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$9,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$12,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$15,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$18,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$21,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$24,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	pxor	%mm4,%mm0
+	psllq	$27,%mm2
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	pxor	%mm5,%mm0
+	psllq	$30,%mm1
+	addl	$36,%esp
+	pxor	%mm1,%mm0
+	ret
+.size	_mul_1x1_mmx,.-_mul_1x1_mmx
+.type	_mul_1x1_ialu, at function
+.align	16
+_mul_1x1_ialu:
+	subl	$36,%esp
+	movl	%eax,%ecx
+	leal	(%eax,%eax,1),%edx
+	leal	(,%eax,4),%ebp
+	andl	$1073741823,%ecx
+	leal	(%eax,%eax,1),%edi
+	sarl	$31,%eax
+	movl	$0,(%esp)
+	andl	$2147483647,%edx
+	movl	%ecx,4(%esp)
+	xorl	%edx,%ecx
+	movl	%edx,8(%esp)
+	xorl	%ebp,%edx
+	movl	%ecx,12(%esp)
+	xorl	%edx,%ecx
+	movl	%ebp,16(%esp)
+	xorl	%edx,%ebp
+	movl	%ecx,20(%esp)
+	xorl	%ecx,%ebp
+	sarl	$31,%edi
+	andl	%ebx,%eax
+	movl	%edx,24(%esp)
+	andl	%ebx,%edi
+	movl	%ebp,28(%esp)
+	movl	%eax,%edx
+	shll	$31,%eax
+	movl	%edi,%ecx
+	shrl	$1,%edx
+	movl	$7,%esi
+	shll	$30,%edi
+	andl	%ebx,%esi
+	shrl	$2,%ecx
+	xorl	%edi,%eax
+	shrl	$3,%ebx
+	movl	$7,%edi
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	xorl	(%esp,%esi,4),%eax
+	movl	$7,%esi
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$3,%ebp
+	andl	%ebx,%edi
+	shrl	$29,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$6,%ecx
+	andl	%ebx,%esi
+	shrl	$26,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$9,%ebp
+	andl	%ebx,%edi
+	shrl	$23,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$12,%ecx
+	andl	%ebx,%esi
+	shrl	$20,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$15,%ebp
+	andl	%ebx,%edi
+	shrl	$17,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$18,%ecx
+	andl	%ebx,%esi
+	shrl	$14,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$21,%ebp
+	andl	%ebx,%edi
+	shrl	$11,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$24,%ecx
+	andl	%ebx,%esi
+	shrl	$8,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	%ebp,%ecx
+	shll	$27,%ebp
+	movl	(%esp,%esi,4),%edi
+	shrl	$5,%ecx
+	movl	%edi,%esi
+	xorl	%ebp,%eax
+	shll	$30,%edi
+	xorl	%ecx,%edx
+	shrl	$2,%esi
+	xorl	%edi,%eax
+	xorl	%esi,%edx
+	addl	$36,%esp
+	ret
+.size	_mul_1x1_ialu,.-_mul_1x1_ialu
+.globl	bn_GF2m_mul_2x2
+.type	bn_GF2m_mul_2x2, at function
+.align	16
+bn_GF2m_mul_2x2:
+.L_bn_GF2m_mul_2x2_begin:
+	call	.L000PIC_me_up
+.L000PIC_me_up:
+	popl	%edx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L000PIC_me_up](%edx),%edx
+	movl	OPENSSL_ia32cap_P at GOT(%edx),%edx
+	movl	(%edx),%eax
+	movl	4(%edx),%edx
+	testl	$8388608,%eax
+	jz	.L001ialu
+	testl	$16777216,%eax
+	jz	.L002mmx
+	testl	$2,%edx
+	jz	.L002mmx
+	movups	8(%esp),%xmm0
+	shufps	$177,%xmm0,%xmm0
+.byte	102,15,58,68,192,1
+	movl	4(%esp),%eax
+	movups	%xmm0,(%eax)
+	ret
+.align	16
+.L002mmx:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%eax
+	movl	32(%esp),%ebx
+	call	_mul_1x1_mmx
+	movq	%mm0,%mm7
+	movl	28(%esp),%eax
+	movl	36(%esp),%ebx
+	call	_mul_1x1_mmx
+	movq	%mm0,%mm6
+	movl	24(%esp),%eax
+	movl	32(%esp),%ebx
+	xorl	28(%esp),%eax
+	xorl	36(%esp),%ebx
+	call	_mul_1x1_mmx
+	pxor	%mm7,%mm0
+	movl	20(%esp),%eax
+	pxor	%mm6,%mm0
+	movq	%mm0,%mm2
+	psllq	$32,%mm0
+	popl	%edi
+	psrlq	$32,%mm2
+	popl	%esi
+	pxor	%mm6,%mm0
+	popl	%ebx
+	pxor	%mm7,%mm2
+	movq	%mm0,(%eax)
+	popl	%ebp
+	movq	%mm2,8(%eax)
+	emms
+	ret
+.align	16
+.L001ialu:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$20,%esp
+	movl	44(%esp),%eax
+	movl	52(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	%eax,8(%esp)
+	movl	%edx,12(%esp)
+	movl	48(%esp),%eax
+	movl	56(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	%eax,(%esp)
+	movl	%edx,4(%esp)
+	movl	44(%esp),%eax
+	movl	52(%esp),%ebx
+	xorl	48(%esp),%eax
+	xorl	56(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	40(%esp),%ebp
+	movl	(%esp),%ebx
+	movl	4(%esp),%ecx
+	movl	8(%esp),%edi
+	movl	12(%esp),%esi
+	xorl	%edx,%eax
+	xorl	%ecx,%edx
+	xorl	%ebx,%eax
+	movl	%ebx,(%ebp)
+	xorl	%edi,%edx
+	movl	%esi,12(%ebp)
+	xorl	%esi,%eax
+	addl	$20,%esp
+	xorl	%esi,%edx
+	popl	%edi
+	xorl	%edx,%eax
+	popl	%esi
+	movl	%edx,8(%ebp)
+	popl	%ebx
+	movl	%eax,4(%ebp)
+	popl	%ebp
+	ret
+.size	bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
+.byte	71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
+.byte	99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"x86-gf2m.S"
+.text
+.type	_mul_1x1_mmx, at function
+.align	16
+_mul_1x1_mmx:
+	subl	$36,%esp
+	movl	%eax,%ecx
+	leal	(%eax,%eax,1),%edx
+	andl	$1073741823,%ecx
+	leal	(%edx,%edx,1),%ebp
+	movl	$0,(%esp)
+	andl	$2147483647,%edx
+	movd	%eax,%mm2
+	movd	%ebx,%mm3
+	movl	%ecx,4(%esp)
+	xorl	%edx,%ecx
+	pxor	%mm5,%mm5
+	pxor	%mm4,%mm4
+	movl	%edx,8(%esp)
+	xorl	%ebp,%edx
+	movl	%ecx,12(%esp)
+	pcmpgtd	%mm2,%mm5
+	paddd	%mm2,%mm2
+	xorl	%edx,%ecx
+	movl	%ebp,16(%esp)
+	xorl	%edx,%ebp
+	pand	%mm3,%mm5
+	pcmpgtd	%mm2,%mm4
+	movl	%ecx,20(%esp)
+	xorl	%ecx,%ebp
+	psllq	$31,%mm5
+	pand	%mm3,%mm4
+	movl	%edx,24(%esp)
+	movl	$7,%esi
+	movl	%ebp,28(%esp)
+	movl	%esi,%ebp
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movl	%ebp,%edi
+	psllq	$30,%mm4
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	movd	(%esp,%esi,4),%mm0
+	movl	%ebp,%esi
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$3,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$6,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$9,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$12,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$15,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$18,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	movl	%ebp,%edi
+	psllq	$21,%mm2
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	movl	%ebp,%esi
+	psllq	$24,%mm1
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	pxor	%mm1,%mm0
+	movd	(%esp,%edi,4),%mm2
+	pxor	%mm4,%mm0
+	psllq	$27,%mm2
+	pxor	%mm2,%mm0
+	movd	(%esp,%esi,4),%mm1
+	pxor	%mm5,%mm0
+	psllq	$30,%mm1
+	addl	$36,%esp
+	pxor	%mm1,%mm0
+	ret
+.size	_mul_1x1_mmx,.-_mul_1x1_mmx
+.type	_mul_1x1_ialu, at function
+.align	16
+_mul_1x1_ialu:
+	subl	$36,%esp
+	movl	%eax,%ecx
+	leal	(%eax,%eax,1),%edx
+	leal	(,%eax,4),%ebp
+	andl	$1073741823,%ecx
+	leal	(%eax,%eax,1),%edi
+	sarl	$31,%eax
+	movl	$0,(%esp)
+	andl	$2147483647,%edx
+	movl	%ecx,4(%esp)
+	xorl	%edx,%ecx
+	movl	%edx,8(%esp)
+	xorl	%ebp,%edx
+	movl	%ecx,12(%esp)
+	xorl	%edx,%ecx
+	movl	%ebp,16(%esp)
+	xorl	%edx,%ebp
+	movl	%ecx,20(%esp)
+	xorl	%ecx,%ebp
+	sarl	$31,%edi
+	andl	%ebx,%eax
+	movl	%edx,24(%esp)
+	andl	%ebx,%edi
+	movl	%ebp,28(%esp)
+	movl	%eax,%edx
+	shll	$31,%eax
+	movl	%edi,%ecx
+	shrl	$1,%edx
+	movl	$7,%esi
+	shll	$30,%edi
+	andl	%ebx,%esi
+	shrl	$2,%ecx
+	xorl	%edi,%eax
+	shrl	$3,%ebx
+	movl	$7,%edi
+	andl	%ebx,%edi
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	xorl	(%esp,%esi,4),%eax
+	movl	$7,%esi
+	andl	%ebx,%esi
+	shrl	$3,%ebx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$3,%ebp
+	andl	%ebx,%edi
+	shrl	$29,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$6,%ecx
+	andl	%ebx,%esi
+	shrl	$26,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$9,%ebp
+	andl	%ebx,%edi
+	shrl	$23,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$12,%ecx
+	andl	%ebx,%esi
+	shrl	$20,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$15,%ebp
+	andl	%ebx,%edi
+	shrl	$17,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$18,%ecx
+	andl	%ebx,%esi
+	shrl	$14,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	$7,%edi
+	movl	%ebp,%ecx
+	shll	$21,%ebp
+	andl	%ebx,%edi
+	shrl	$11,%ecx
+	xorl	%ebp,%eax
+	shrl	$3,%ebx
+	xorl	%ecx,%edx
+	movl	(%esp,%esi,4),%ecx
+	movl	$7,%esi
+	movl	%ecx,%ebp
+	shll	$24,%ecx
+	andl	%ebx,%esi
+	shrl	$8,%ebp
+	xorl	%ecx,%eax
+	shrl	$3,%ebx
+	xorl	%ebp,%edx
+	movl	(%esp,%edi,4),%ebp
+	movl	%ebp,%ecx
+	shll	$27,%ebp
+	movl	(%esp,%esi,4),%edi
+	shrl	$5,%ecx
+	movl	%edi,%esi
+	xorl	%ebp,%eax
+	shll	$30,%edi
+	xorl	%ecx,%edx
+	shrl	$2,%esi
+	xorl	%edi,%eax
+	xorl	%esi,%edx
+	addl	$36,%esp
+	ret
+.size	_mul_1x1_ialu,.-_mul_1x1_ialu
+.globl	bn_GF2m_mul_2x2
+.type	bn_GF2m_mul_2x2, at function
+.align	16
+bn_GF2m_mul_2x2:
+.L_bn_GF2m_mul_2x2_begin:
+	leal	OPENSSL_ia32cap_P,%edx
+	movl	(%edx),%eax
+	movl	4(%edx),%edx
+	testl	$8388608,%eax
+	jz	.L000ialu
+	testl	$16777216,%eax
+	jz	.L001mmx
+	testl	$2,%edx
+	jz	.L001mmx
+	movups	8(%esp),%xmm0
+	shufps	$177,%xmm0,%xmm0
+.byte	102,15,58,68,192,1
+	movl	4(%esp),%eax
+	movups	%xmm0,(%eax)
+	ret
+.align	16
+.L001mmx:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	24(%esp),%eax
+	movl	32(%esp),%ebx
+	call	_mul_1x1_mmx
+	movq	%mm0,%mm7
+	movl	28(%esp),%eax
+	movl	36(%esp),%ebx
+	call	_mul_1x1_mmx
+	movq	%mm0,%mm6
+	movl	24(%esp),%eax
+	movl	32(%esp),%ebx
+	xorl	28(%esp),%eax
+	xorl	36(%esp),%ebx
+	call	_mul_1x1_mmx
+	pxor	%mm7,%mm0
+	movl	20(%esp),%eax
+	pxor	%mm6,%mm0
+	movq	%mm0,%mm2
+	psllq	$32,%mm0
+	popl	%edi
+	psrlq	$32,%mm2
+	popl	%esi
+	pxor	%mm6,%mm0
+	popl	%ebx
+	pxor	%mm7,%mm2
+	movq	%mm0,(%eax)
+	popl	%ebp
+	movq	%mm2,8(%eax)
+	emms
+	ret
+.align	16
+.L000ialu:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	subl	$20,%esp
+	movl	44(%esp),%eax
+	movl	52(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	%eax,8(%esp)
+	movl	%edx,12(%esp)
+	movl	48(%esp),%eax
+	movl	56(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	%eax,(%esp)
+	movl	%edx,4(%esp)
+	movl	44(%esp),%eax
+	movl	52(%esp),%ebx
+	xorl	48(%esp),%eax
+	xorl	56(%esp),%ebx
+	call	_mul_1x1_ialu
+	movl	40(%esp),%ebp
+	movl	(%esp),%ebx
+	movl	4(%esp),%ecx
+	movl	8(%esp),%edi
+	movl	12(%esp),%esi
+	xorl	%edx,%eax
+	xorl	%ecx,%edx
+	xorl	%ebx,%eax
+	movl	%ebx,(%ebp)
+	xorl	%edi,%edx
+	movl	%esi,12(%ebp)
+	xorl	%esi,%eax
+	addl	$20,%esp
+	xorl	%esi,%edx
+	popl	%edi
+	xorl	%edx,%eax
+	popl	%esi
+	movl	%edx,8(%ebp)
+	popl	%ebx
+	movl	%eax,4(%ebp)
+	popl	%ebp
+	ret
+.size	bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
+.byte	71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
+.byte	99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
+.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte	62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/x86-gf2m.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/x86-gf2m.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86-gf2m.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/x86-gf2m.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,344 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86-gf2m.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"x86-gf2m.s"
-.text
-.type	_mul_1x1_mmx, at function
-.align	16
-_mul_1x1_mmx:
-	subl	$36,%esp
-	movl	%eax,%ecx
-	leal	(%eax,%eax,1),%edx
-	andl	$1073741823,%ecx
-	leal	(%edx,%edx,1),%ebp
-	movl	$0,(%esp)
-	andl	$2147483647,%edx
-	movd	%eax,%mm2
-	movd	%ebx,%mm3
-	movl	%ecx,4(%esp)
-	xorl	%edx,%ecx
-	pxor	%mm5,%mm5
-	pxor	%mm4,%mm4
-	movl	%edx,8(%esp)
-	xorl	%ebp,%edx
-	movl	%ecx,12(%esp)
-	pcmpgtd	%mm2,%mm5
-	paddd	%mm2,%mm2
-	xorl	%edx,%ecx
-	movl	%ebp,16(%esp)
-	xorl	%edx,%ebp
-	pand	%mm3,%mm5
-	pcmpgtd	%mm2,%mm4
-	movl	%ecx,20(%esp)
-	xorl	%ecx,%ebp
-	psllq	$31,%mm5
-	pand	%mm3,%mm4
-	movl	%edx,24(%esp)
-	movl	$7,%esi
-	movl	%ebp,28(%esp)
-	movl	%esi,%ebp
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	movl	%ebp,%edi
-	psllq	$30,%mm4
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	movd	(%esp,%esi,4),%mm0
-	movl	%ebp,%esi
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	movd	(%esp,%edi,4),%mm2
-	movl	%ebp,%edi
-	psllq	$3,%mm2
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	pxor	%mm2,%mm0
-	movd	(%esp,%esi,4),%mm1
-	movl	%ebp,%esi
-	psllq	$6,%mm1
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	pxor	%mm1,%mm0
-	movd	(%esp,%edi,4),%mm2
-	movl	%ebp,%edi
-	psllq	$9,%mm2
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	pxor	%mm2,%mm0
-	movd	(%esp,%esi,4),%mm1
-	movl	%ebp,%esi
-	psllq	$12,%mm1
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	pxor	%mm1,%mm0
-	movd	(%esp,%edi,4),%mm2
-	movl	%ebp,%edi
-	psllq	$15,%mm2
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	pxor	%mm2,%mm0
-	movd	(%esp,%esi,4),%mm1
-	movl	%ebp,%esi
-	psllq	$18,%mm1
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	pxor	%mm1,%mm0
-	movd	(%esp,%edi,4),%mm2
-	movl	%ebp,%edi
-	psllq	$21,%mm2
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	pxor	%mm2,%mm0
-	movd	(%esp,%esi,4),%mm1
-	movl	%ebp,%esi
-	psllq	$24,%mm1
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	pxor	%mm1,%mm0
-	movd	(%esp,%edi,4),%mm2
-	pxor	%mm4,%mm0
-	psllq	$27,%mm2
-	pxor	%mm2,%mm0
-	movd	(%esp,%esi,4),%mm1
-	pxor	%mm5,%mm0
-	psllq	$30,%mm1
-	addl	$36,%esp
-	pxor	%mm1,%mm0
-	ret
-.size	_mul_1x1_mmx,.-_mul_1x1_mmx
-.type	_mul_1x1_ialu, at function
-.align	16
-_mul_1x1_ialu:
-	subl	$36,%esp
-	movl	%eax,%ecx
-	leal	(%eax,%eax,1),%edx
-	leal	(,%eax,4),%ebp
-	andl	$1073741823,%ecx
-	leal	(%eax,%eax,1),%edi
-	sarl	$31,%eax
-	movl	$0,(%esp)
-	andl	$2147483647,%edx
-	movl	%ecx,4(%esp)
-	xorl	%edx,%ecx
-	movl	%edx,8(%esp)
-	xorl	%ebp,%edx
-	movl	%ecx,12(%esp)
-	xorl	%edx,%ecx
-	movl	%ebp,16(%esp)
-	xorl	%edx,%ebp
-	movl	%ecx,20(%esp)
-	xorl	%ecx,%ebp
-	sarl	$31,%edi
-	andl	%ebx,%eax
-	movl	%edx,24(%esp)
-	andl	%ebx,%edi
-	movl	%ebp,28(%esp)
-	movl	%eax,%edx
-	shll	$31,%eax
-	movl	%edi,%ecx
-	shrl	$1,%edx
-	movl	$7,%esi
-	shll	$30,%edi
-	andl	%ebx,%esi
-	shrl	$2,%ecx
-	xorl	%edi,%eax
-	shrl	$3,%ebx
-	movl	$7,%edi
-	andl	%ebx,%edi
-	shrl	$3,%ebx
-	xorl	%ecx,%edx
-	xorl	(%esp,%esi,4),%eax
-	movl	$7,%esi
-	andl	%ebx,%esi
-	shrl	$3,%ebx
-	movl	(%esp,%edi,4),%ebp
-	movl	$7,%edi
-	movl	%ebp,%ecx
-	shll	$3,%ebp
-	andl	%ebx,%edi
-	shrl	$29,%ecx
-	xorl	%ebp,%eax
-	shrl	$3,%ebx
-	xorl	%ecx,%edx
-	movl	(%esp,%esi,4),%ecx
-	movl	$7,%esi
-	movl	%ecx,%ebp
-	shll	$6,%ecx
-	andl	%ebx,%esi
-	shrl	$26,%ebp
-	xorl	%ecx,%eax
-	shrl	$3,%ebx
-	xorl	%ebp,%edx
-	movl	(%esp,%edi,4),%ebp
-	movl	$7,%edi
-	movl	%ebp,%ecx
-	shll	$9,%ebp
-	andl	%ebx,%edi
-	shrl	$23,%ecx
-	xorl	%ebp,%eax
-	shrl	$3,%ebx
-	xorl	%ecx,%edx
-	movl	(%esp,%esi,4),%ecx
-	movl	$7,%esi
-	movl	%ecx,%ebp
-	shll	$12,%ecx
-	andl	%ebx,%esi
-	shrl	$20,%ebp
-	xorl	%ecx,%eax
-	shrl	$3,%ebx
-	xorl	%ebp,%edx
-	movl	(%esp,%edi,4),%ebp
-	movl	$7,%edi
-	movl	%ebp,%ecx
-	shll	$15,%ebp
-	andl	%ebx,%edi
-	shrl	$17,%ecx
-	xorl	%ebp,%eax
-	shrl	$3,%ebx
-	xorl	%ecx,%edx
-	movl	(%esp,%esi,4),%ecx
-	movl	$7,%esi
-	movl	%ecx,%ebp
-	shll	$18,%ecx
-	andl	%ebx,%esi
-	shrl	$14,%ebp
-	xorl	%ecx,%eax
-	shrl	$3,%ebx
-	xorl	%ebp,%edx
-	movl	(%esp,%edi,4),%ebp
-	movl	$7,%edi
-	movl	%ebp,%ecx
-	shll	$21,%ebp
-	andl	%ebx,%edi
-	shrl	$11,%ecx
-	xorl	%ebp,%eax
-	shrl	$3,%ebx
-	xorl	%ecx,%edx
-	movl	(%esp,%esi,4),%ecx
-	movl	$7,%esi
-	movl	%ecx,%ebp
-	shll	$24,%ecx
-	andl	%ebx,%esi
-	shrl	$8,%ebp
-	xorl	%ecx,%eax
-	shrl	$3,%ebx
-	xorl	%ebp,%edx
-	movl	(%esp,%edi,4),%ebp
-	movl	%ebp,%ecx
-	shll	$27,%ebp
-	movl	(%esp,%esi,4),%edi
-	shrl	$5,%ecx
-	movl	%edi,%esi
-	xorl	%ebp,%eax
-	shll	$30,%edi
-	xorl	%ecx,%edx
-	shrl	$2,%esi
-	xorl	%edi,%eax
-	xorl	%esi,%edx
-	addl	$36,%esp
-	ret
-.size	_mul_1x1_ialu,.-_mul_1x1_ialu
-.globl	bn_GF2m_mul_2x2
-.type	bn_GF2m_mul_2x2, at function
-.align	16
-bn_GF2m_mul_2x2:
-.L_bn_GF2m_mul_2x2_begin:
-	leal	OPENSSL_ia32cap_P,%edx
-	movl	(%edx),%eax
-	movl	4(%edx),%edx
-	testl	$8388608,%eax
-	jz	.L000ialu
-	testl	$16777216,%eax
-	jz	.L001mmx
-	testl	$2,%edx
-	jz	.L001mmx
-	movups	8(%esp),%xmm0
-	shufps	$177,%xmm0,%xmm0
-.byte	102,15,58,68,192,1
-	movl	4(%esp),%eax
-	movups	%xmm0,(%eax)
-	ret
-.align	16
-.L001mmx:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	movl	24(%esp),%eax
-	movl	32(%esp),%ebx
-	call	_mul_1x1_mmx
-	movq	%mm0,%mm7
-	movl	28(%esp),%eax
-	movl	36(%esp),%ebx
-	call	_mul_1x1_mmx
-	movq	%mm0,%mm6
-	movl	24(%esp),%eax
-	movl	32(%esp),%ebx
-	xorl	28(%esp),%eax
-	xorl	36(%esp),%ebx
-	call	_mul_1x1_mmx
-	pxor	%mm7,%mm0
-	movl	20(%esp),%eax
-	pxor	%mm6,%mm0
-	movq	%mm0,%mm2
-	psllq	$32,%mm0
-	popl	%edi
-	psrlq	$32,%mm2
-	popl	%esi
-	pxor	%mm6,%mm0
-	popl	%ebx
-	pxor	%mm7,%mm2
-	movq	%mm0,(%eax)
-	popl	%ebp
-	movq	%mm2,8(%eax)
-	emms
-	ret
-.align	16
-.L000ialu:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	subl	$20,%esp
-	movl	44(%esp),%eax
-	movl	52(%esp),%ebx
-	call	_mul_1x1_ialu
-	movl	%eax,8(%esp)
-	movl	%edx,12(%esp)
-	movl	48(%esp),%eax
-	movl	56(%esp),%ebx
-	call	_mul_1x1_ialu
-	movl	%eax,(%esp)
-	movl	%edx,4(%esp)
-	movl	44(%esp),%eax
-	movl	52(%esp),%ebx
-	xorl	48(%esp),%eax
-	xorl	56(%esp),%ebx
-	call	_mul_1x1_ialu
-	movl	40(%esp),%ebp
-	movl	(%esp),%ebx
-	movl	4(%esp),%ecx
-	movl	8(%esp),%edi
-	movl	12(%esp),%esi
-	xorl	%edx,%eax
-	xorl	%ecx,%edx
-	xorl	%ebx,%eax
-	movl	%ebx,(%ebp)
-	xorl	%edi,%edx
-	movl	%esi,12(%ebp)
-	xorl	%esi,%eax
-	addl	$20,%esp
-	xorl	%esi,%edx
-	popl	%edi
-	xorl	%edx,%eax
-	popl	%esi
-	movl	%edx,8(%ebp)
-	popl	%ebx
-	movl	%eax,4(%ebp)
-	popl	%ebp
-	ret
-.size	bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
-.byte	71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
-.byte	99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
-.byte	67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
-.byte	112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
-.byte	62,0
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/x86-mont.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86-mont.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/x86-mont.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,938 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86-mont.S 299966 2016-05-16 19:30:27Z jkim $
+# Do not modify. This file is auto-generated from x86-mont.pl.
+#ifdef PIC
+.file	"x86-mont.S"
+.text
+.globl	bn_mul_mont
+.type	bn_mul_mont, at function
+.align	16
+bn_mul_mont:
+.L_bn_mul_mont_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	xorl	%eax,%eax
+	movl	40(%esp),%edi
+	cmpl	$4,%edi
+	jl	.L000just_leave
+	leal	20(%esp),%esi
+	leal	24(%esp),%edx
+	movl	%esp,%ebp
+	addl	$2,%edi
+	negl	%edi
+	leal	-32(%esp,%edi,4),%esp
+	negl	%edi
+	movl	%esp,%eax
+	subl	%edx,%eax
+	andl	$2047,%eax
+	subl	%eax,%esp
+	xorl	%esp,%edx
+	andl	$2048,%edx
+	xorl	$2048,%edx
+	subl	%edx,%esp
+	andl	$-64,%esp
+	movl	%ebp,%eax
+	subl	%esp,%eax
+	andl	$-4096,%eax
+.L001page_walk:
+	movl	(%esp,%eax,1),%edx
+	subl	$4096,%eax
+.byte	46
+	jnc	.L001page_walk
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	16(%esi),%esi
+	movl	(%esi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+	movl	%esi,20(%esp)
+	leal	-3(%edi),%ebx
+	movl	%ebp,24(%esp)
+	call	.L002PIC_me_up
+.L002PIC_me_up:
+	popl	%eax
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L002PIC_me_up](%eax),%eax
+	movl	OPENSSL_ia32cap_P at GOT(%eax),%eax
+	btl	$26,(%eax)
+	jnc	.L003non_sse2
+	movl	$-1,%eax
+	movd	%eax,%mm7
+	movl	8(%esp),%esi
+	movl	12(%esp),%edi
+	movl	16(%esp),%ebp
+	xorl	%edx,%edx
+	xorl	%ecx,%ecx
+	movd	(%edi),%mm4
+	movd	(%esi),%mm5
+	movd	(%ebp),%mm3
+	pmuludq	%mm4,%mm5
+	movq	%mm5,%mm2
+	movq	%mm5,%mm0
+	pand	%mm7,%mm0
+	pmuludq	20(%esp),%mm5
+	pmuludq	%mm5,%mm3
+	paddq	%mm0,%mm3
+	movd	4(%ebp),%mm1
+	movd	4(%esi),%mm0
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	incl	%ecx
+.align	16
+.L0041st:
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	movd	4(%ebp,%ecx,4),%mm1
+	paddq	%mm0,%mm3
+	movd	4(%esi,%ecx,4),%mm0
+	psrlq	$32,%mm2
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm3
+	leal	1(%ecx),%ecx
+	cmpl	%ebx,%ecx
+	jl	.L0041st
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	paddq	%mm0,%mm3
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	paddq	%mm2,%mm3
+	movq	%mm3,32(%esp,%ebx,4)
+	incl	%edx
+.L005outer:
+	xorl	%ecx,%ecx
+	movd	(%edi,%edx,4),%mm4
+	movd	(%esi),%mm5
+	movd	32(%esp),%mm6
+	movd	(%ebp),%mm3
+	pmuludq	%mm4,%mm5
+	paddq	%mm6,%mm5
+	movq	%mm5,%mm0
+	movq	%mm5,%mm2
+	pand	%mm7,%mm0
+	pmuludq	20(%esp),%mm5
+	pmuludq	%mm5,%mm3
+	paddq	%mm0,%mm3
+	movd	36(%esp),%mm6
+	movd	4(%ebp),%mm1
+	movd	4(%esi),%mm0
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	paddq	%mm6,%mm2
+	incl	%ecx
+	decl	%ebx
+.L006inner:
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	movd	36(%esp,%ecx,4),%mm6
+	pand	%mm7,%mm0
+	movd	4(%ebp,%ecx,4),%mm1
+	paddq	%mm0,%mm3
+	movd	4(%esi,%ecx,4),%mm0
+	psrlq	$32,%mm2
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm3
+	paddq	%mm6,%mm2
+	decl	%ebx
+	leal	1(%ecx),%ecx
+	jnz	.L006inner
+	movl	%ecx,%ebx
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	paddq	%mm0,%mm3
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	movd	36(%esp,%ebx,4),%mm6
+	paddq	%mm2,%mm3
+	paddq	%mm6,%mm3
+	movq	%mm3,32(%esp,%ebx,4)
+	leal	1(%edx),%edx
+	cmpl	%ebx,%edx
+	jle	.L005outer
+	emms
+	jmp	.L007common_tail
+.align	16
+.L003non_sse2:
+	movl	8(%esp),%esi
+	leal	1(%ebx),%ebp
+	movl	12(%esp),%edi
+	xorl	%ecx,%ecx
+	movl	%esi,%edx
+	andl	$1,%ebp
+	subl	%edi,%edx
+	leal	4(%edi,%ebx,4),%eax
+	orl	%edx,%ebp
+	movl	(%edi),%edi
+	jz	.L008bn_sqr_mont
+	movl	%eax,28(%esp)
+	movl	(%esi),%eax
+	xorl	%edx,%edx
+.align	16
+.L009mull:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%eax,%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	movl	(%esi,%ecx,4),%eax
+	cmpl	%ebx,%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L009mull
+	movl	%edx,%ebp
+	mull	%edi
+	movl	20(%esp),%edi
+	addl	%ebp,%eax
+	movl	16(%esp),%esi
+	adcl	$0,%edx
+	imull	32(%esp),%edi
+	movl	%eax,32(%esp,%ebx,4)
+	xorl	%ecx,%ecx
+	movl	%edx,36(%esp,%ebx,4)
+	movl	%ecx,40(%esp,%ebx,4)
+	movl	(%esi),%eax
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	4(%esi),%eax
+	adcl	$0,%edx
+	incl	%ecx
+	jmp	.L0102ndmadd
+.align	16
+.L0111stmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L0111stmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%eax
+	movl	20(%esp),%edi
+	adcl	$0,%edx
+	movl	16(%esp),%esi
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	imull	32(%esp),%edi
+	xorl	%ecx,%ecx
+	addl	36(%esp,%ebx,4),%edx
+	movl	%ebp,32(%esp,%ebx,4)
+	adcl	$0,%ecx
+	movl	(%esi),%eax
+	movl	%edx,36(%esp,%ebx,4)
+	movl	%ecx,40(%esp,%ebx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	4(%esi),%eax
+	adcl	$0,%edx
+	movl	$1,%ecx
+.align	16
+.L0102ndmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,24(%esp,%ecx,4)
+	jl	.L0102ndmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ebx,4)
+	xorl	%eax,%eax
+	movl	12(%esp),%ecx
+	addl	36(%esp,%ebx,4),%edx
+	adcl	40(%esp,%ebx,4),%eax
+	leal	4(%ecx),%ecx
+	movl	%edx,32(%esp,%ebx,4)
+	cmpl	28(%esp),%ecx
+	movl	%eax,36(%esp,%ebx,4)
+	je	.L007common_tail
+	movl	(%ecx),%edi
+	movl	8(%esp),%esi
+	movl	%ecx,12(%esp)
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	movl	(%esi),%eax
+	jmp	.L0111stmadd
+.align	16
+.L008bn_sqr_mont:
+	movl	%ebx,(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edi,%eax
+	mull	%edi
+	movl	%eax,32(%esp)
+	movl	%edx,%ebx
+	shrl	$1,%edx
+	andl	$1,%ebx
+	incl	%ecx
+.align	16
+.L012sqr:
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	leal	(%ebx,%eax,2),%ebp
+	shrl	$31,%eax
+	cmpl	(%esp),%ecx
+	movl	%eax,%ebx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L012sqr
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	movl	20(%esp),%edi
+	adcl	$0,%edx
+	movl	16(%esp),%esi
+	leal	(%ebx,%eax,2),%ebp
+	imull	32(%esp),%edi
+	shrl	$31,%eax
+	movl	%ebp,32(%esp,%ecx,4)
+	leal	(%eax,%edx,2),%ebp
+	movl	(%esi),%eax
+	shrl	$31,%edx
+	movl	%ebp,36(%esp,%ecx,4)
+	movl	%edx,40(%esp,%ecx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	%ecx,%ebx
+	adcl	$0,%edx
+	movl	4(%esi),%eax
+	movl	$1,%ecx
+.align	16
+.L0133rdmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	4(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ecx,4)
+	movl	%edx,%ebp
+	mull	%edi
+	addl	36(%esp,%ecx,4),%ebp
+	leal	2(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,24(%esp,%ecx,4)
+	jl	.L0133rdmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ebx,4)
+	movl	12(%esp),%ecx
+	xorl	%eax,%eax
+	movl	8(%esp),%esi
+	addl	36(%esp,%ebx,4),%edx
+	adcl	40(%esp,%ebx,4),%eax
+	movl	%edx,32(%esp,%ebx,4)
+	cmpl	%ebx,%ecx
+	movl	%eax,36(%esp,%ebx,4)
+	je	.L007common_tail
+	movl	4(%esi,%ecx,4),%edi
+	leal	1(%ecx),%ecx
+	movl	%edi,%eax
+	movl	%ecx,12(%esp)
+	mull	%edi
+	addl	32(%esp,%ecx,4),%eax
+	adcl	$0,%edx
+	movl	%eax,32(%esp,%ecx,4)
+	xorl	%ebp,%ebp
+	cmpl	%ebx,%ecx
+	leal	1(%ecx),%ecx
+	je	.L014sqrlast
+	movl	%edx,%ebx
+	shrl	$1,%edx
+	andl	$1,%ebx
+.align	16
+.L015sqradd:
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	leal	(%eax,%eax,1),%ebp
+	adcl	$0,%edx
+	shrl	$31,%eax
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%eax
+	addl	%ebx,%ebp
+	adcl	$0,%eax
+	cmpl	(%esp),%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	movl	%eax,%ebx
+	jle	.L015sqradd
+	movl	%edx,%ebp
+	addl	%edx,%edx
+	shrl	$31,%ebp
+	addl	%ebx,%edx
+	adcl	$0,%ebp
+.L014sqrlast:
+	movl	20(%esp),%edi
+	movl	16(%esp),%esi
+	imull	32(%esp),%edi
+	addl	32(%esp,%ecx,4),%edx
+	movl	(%esi),%eax
+	adcl	$0,%ebp
+	movl	%edx,32(%esp,%ecx,4)
+	movl	%ebp,36(%esp,%ecx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	leal	-1(%ecx),%ebx
+	adcl	$0,%edx
+	movl	$1,%ecx
+	movl	4(%esi),%eax
+	jmp	.L0133rdmadd
+.align	16
+.L007common_tail:
+	movl	16(%esp),%ebp
+	movl	4(%esp),%edi
+	leal	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	%ebx,%ecx
+	xorl	%edx,%edx
+.align	16
+.L016sub:
+	sbbl	(%ebp,%edx,4),%eax
+	movl	%eax,(%edi,%edx,4)
+	decl	%ecx
+	movl	4(%esi,%edx,4),%eax
+	leal	1(%edx),%edx
+	jge	.L016sub
+	sbbl	$0,%eax
+	andl	%eax,%esi
+	notl	%eax
+	movl	%edi,%ebp
+	andl	%eax,%ebp
+	orl	%ebp,%esi
+.align	16
+.L017copy:
+	movl	(%esi,%ebx,4),%eax
+	movl	%eax,(%edi,%ebx,4)
+	movl	%ecx,32(%esp,%ebx,4)
+	decl	%ebx
+	jge	.L017copy
+	movl	24(%esp),%esp
+	movl	$1,%eax
+.L000just_leave:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_mont,.-.L_bn_mul_mont_begin
+.byte	77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+.byte	112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+.byte	54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
+.byte	32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
+.byte	111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#else
+.file	"x86-mont.S"
+.text
+.globl	bn_mul_mont
+.type	bn_mul_mont, at function
+.align	16
+bn_mul_mont:
+.L_bn_mul_mont_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	xorl	%eax,%eax
+	movl	40(%esp),%edi
+	cmpl	$4,%edi
+	jl	.L000just_leave
+	leal	20(%esp),%esi
+	leal	24(%esp),%edx
+	movl	%esp,%ebp
+	addl	$2,%edi
+	negl	%edi
+	leal	-32(%esp,%edi,4),%esp
+	negl	%edi
+	movl	%esp,%eax
+	subl	%edx,%eax
+	andl	$2047,%eax
+	subl	%eax,%esp
+	xorl	%esp,%edx
+	andl	$2048,%edx
+	xorl	$2048,%edx
+	subl	%edx,%esp
+	andl	$-64,%esp
+	movl	%ebp,%eax
+	subl	%esp,%eax
+	andl	$-4096,%eax
+.L001page_walk:
+	movl	(%esp,%eax,1),%edx
+	subl	$4096,%eax
+.byte	46
+	jnc	.L001page_walk
+	movl	(%esi),%eax
+	movl	4(%esi),%ebx
+	movl	8(%esi),%ecx
+	movl	12(%esi),%edx
+	movl	16(%esi),%esi
+	movl	(%esi),%esi
+	movl	%eax,4(%esp)
+	movl	%ebx,8(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edx,16(%esp)
+	movl	%esi,20(%esp)
+	leal	-3(%edi),%ebx
+	movl	%ebp,24(%esp)
+	leal	OPENSSL_ia32cap_P,%eax
+	btl	$26,(%eax)
+	jnc	.L002non_sse2
+	movl	$-1,%eax
+	movd	%eax,%mm7
+	movl	8(%esp),%esi
+	movl	12(%esp),%edi
+	movl	16(%esp),%ebp
+	xorl	%edx,%edx
+	xorl	%ecx,%ecx
+	movd	(%edi),%mm4
+	movd	(%esi),%mm5
+	movd	(%ebp),%mm3
+	pmuludq	%mm4,%mm5
+	movq	%mm5,%mm2
+	movq	%mm5,%mm0
+	pand	%mm7,%mm0
+	pmuludq	20(%esp),%mm5
+	pmuludq	%mm5,%mm3
+	paddq	%mm0,%mm3
+	movd	4(%ebp),%mm1
+	movd	4(%esi),%mm0
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	incl	%ecx
+.align	16
+.L0031st:
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	movd	4(%ebp,%ecx,4),%mm1
+	paddq	%mm0,%mm3
+	movd	4(%esi,%ecx,4),%mm0
+	psrlq	$32,%mm2
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm3
+	leal	1(%ecx),%ecx
+	cmpl	%ebx,%ecx
+	jl	.L0031st
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	paddq	%mm0,%mm3
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	paddq	%mm2,%mm3
+	movq	%mm3,32(%esp,%ebx,4)
+	incl	%edx
+.L004outer:
+	xorl	%ecx,%ecx
+	movd	(%edi,%edx,4),%mm4
+	movd	(%esi),%mm5
+	movd	32(%esp),%mm6
+	movd	(%ebp),%mm3
+	pmuludq	%mm4,%mm5
+	paddq	%mm6,%mm5
+	movq	%mm5,%mm0
+	movq	%mm5,%mm2
+	pand	%mm7,%mm0
+	pmuludq	20(%esp),%mm5
+	pmuludq	%mm5,%mm3
+	paddq	%mm0,%mm3
+	movd	36(%esp),%mm6
+	movd	4(%ebp),%mm1
+	movd	4(%esi),%mm0
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	paddq	%mm6,%mm2
+	incl	%ecx
+	decl	%ebx
+.L005inner:
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	movd	36(%esp,%ecx,4),%mm6
+	pand	%mm7,%mm0
+	movd	4(%ebp,%ecx,4),%mm1
+	paddq	%mm0,%mm3
+	movd	4(%esi,%ecx,4),%mm0
+	psrlq	$32,%mm2
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm3
+	paddq	%mm6,%mm2
+	decl	%ebx
+	leal	1(%ecx),%ecx
+	jnz	.L005inner
+	movl	%ecx,%ebx
+	pmuludq	%mm4,%mm0
+	pmuludq	%mm5,%mm1
+	paddq	%mm0,%mm2
+	paddq	%mm1,%mm3
+	movq	%mm2,%mm0
+	pand	%mm7,%mm0
+	paddq	%mm0,%mm3
+	movd	%mm3,28(%esp,%ecx,4)
+	psrlq	$32,%mm2
+	psrlq	$32,%mm3
+	movd	36(%esp,%ebx,4),%mm6
+	paddq	%mm2,%mm3
+	paddq	%mm6,%mm3
+	movq	%mm3,32(%esp,%ebx,4)
+	leal	1(%edx),%edx
+	cmpl	%ebx,%edx
+	jle	.L004outer
+	emms
+	jmp	.L006common_tail
+.align	16
+.L002non_sse2:
+	movl	8(%esp),%esi
+	leal	1(%ebx),%ebp
+	movl	12(%esp),%edi
+	xorl	%ecx,%ecx
+	movl	%esi,%edx
+	andl	$1,%ebp
+	subl	%edi,%edx
+	leal	4(%edi,%ebx,4),%eax
+	orl	%edx,%ebp
+	movl	(%edi),%edi
+	jz	.L007bn_sqr_mont
+	movl	%eax,28(%esp)
+	movl	(%esi),%eax
+	xorl	%edx,%edx
+.align	16
+.L008mull:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%eax,%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	movl	(%esi,%ecx,4),%eax
+	cmpl	%ebx,%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L008mull
+	movl	%edx,%ebp
+	mull	%edi
+	movl	20(%esp),%edi
+	addl	%ebp,%eax
+	movl	16(%esp),%esi
+	adcl	$0,%edx
+	imull	32(%esp),%edi
+	movl	%eax,32(%esp,%ebx,4)
+	xorl	%ecx,%ecx
+	movl	%edx,36(%esp,%ebx,4)
+	movl	%ecx,40(%esp,%ebx,4)
+	movl	(%esi),%eax
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	4(%esi),%eax
+	adcl	$0,%edx
+	incl	%ecx
+	jmp	.L0092ndmadd
+.align	16
+.L0101stmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L0101stmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%eax
+	movl	20(%esp),%edi
+	adcl	$0,%edx
+	movl	16(%esp),%esi
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	imull	32(%esp),%edi
+	xorl	%ecx,%ecx
+	addl	36(%esp,%ebx,4),%edx
+	movl	%ebp,32(%esp,%ebx,4)
+	adcl	$0,%ecx
+	movl	(%esi),%eax
+	movl	%edx,36(%esp,%ebx,4)
+	movl	%ecx,40(%esp,%ebx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	4(%esi),%eax
+	adcl	$0,%edx
+	movl	$1,%ecx
+.align	16
+.L0092ndmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,24(%esp,%ecx,4)
+	jl	.L0092ndmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ebx,4)
+	xorl	%eax,%eax
+	movl	12(%esp),%ecx
+	addl	36(%esp,%ebx,4),%edx
+	adcl	40(%esp,%ebx,4),%eax
+	leal	4(%ecx),%ecx
+	movl	%edx,32(%esp,%ebx,4)
+	cmpl	28(%esp),%ecx
+	movl	%eax,36(%esp,%ebx,4)
+	je	.L006common_tail
+	movl	(%ecx),%edi
+	movl	8(%esp),%esi
+	movl	%ecx,12(%esp)
+	xorl	%ecx,%ecx
+	xorl	%edx,%edx
+	movl	(%esi),%eax
+	jmp	.L0101stmadd
+.align	16
+.L007bn_sqr_mont:
+	movl	%ebx,(%esp)
+	movl	%ecx,12(%esp)
+	movl	%edi,%eax
+	mull	%edi
+	movl	%eax,32(%esp)
+	movl	%edx,%ebx
+	shrl	$1,%edx
+	andl	$1,%ebx
+	incl	%ecx
+.align	16
+.L011sqr:
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	leal	1(%ecx),%ecx
+	adcl	$0,%edx
+	leal	(%ebx,%eax,2),%ebp
+	shrl	$31,%eax
+	cmpl	(%esp),%ecx
+	movl	%eax,%ebx
+	movl	%ebp,28(%esp,%ecx,4)
+	jl	.L011sqr
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	movl	20(%esp),%edi
+	adcl	$0,%edx
+	movl	16(%esp),%esi
+	leal	(%ebx,%eax,2),%ebp
+	imull	32(%esp),%edi
+	shrl	$31,%eax
+	movl	%ebp,32(%esp,%ecx,4)
+	leal	(%eax,%edx,2),%ebp
+	movl	(%esi),%eax
+	shrl	$31,%edx
+	movl	%ebp,36(%esp,%ecx,4)
+	movl	%edx,40(%esp,%ecx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	movl	%ecx,%ebx
+	adcl	$0,%edx
+	movl	4(%esi),%eax
+	movl	$1,%ecx
+.align	16
+.L0123rdmadd:
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ecx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	4(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ecx,4)
+	movl	%edx,%ebp
+	mull	%edi
+	addl	36(%esp,%ecx,4),%ebp
+	leal	2(%ecx),%ecx
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	movl	(%esi,%ecx,4),%eax
+	adcl	$0,%edx
+	cmpl	%ebx,%ecx
+	movl	%ebp,24(%esp,%ecx,4)
+	jl	.L0123rdmadd
+	movl	%edx,%ebp
+	mull	%edi
+	addl	32(%esp,%ebx,4),%ebp
+	adcl	$0,%edx
+	addl	%eax,%ebp
+	adcl	$0,%edx
+	movl	%ebp,28(%esp,%ebx,4)
+	movl	12(%esp),%ecx
+	xorl	%eax,%eax
+	movl	8(%esp),%esi
+	addl	36(%esp,%ebx,4),%edx
+	adcl	40(%esp,%ebx,4),%eax
+	movl	%edx,32(%esp,%ebx,4)
+	cmpl	%ebx,%ecx
+	movl	%eax,36(%esp,%ebx,4)
+	je	.L006common_tail
+	movl	4(%esi,%ecx,4),%edi
+	leal	1(%ecx),%ecx
+	movl	%edi,%eax
+	movl	%ecx,12(%esp)
+	mull	%edi
+	addl	32(%esp,%ecx,4),%eax
+	adcl	$0,%edx
+	movl	%eax,32(%esp,%ecx,4)
+	xorl	%ebp,%ebp
+	cmpl	%ebx,%ecx
+	leal	1(%ecx),%ecx
+	je	.L013sqrlast
+	movl	%edx,%ebx
+	shrl	$1,%edx
+	andl	$1,%ebx
+.align	16
+.L014sqradd:
+	movl	(%esi,%ecx,4),%eax
+	movl	%edx,%ebp
+	mull	%edi
+	addl	%ebp,%eax
+	leal	(%eax,%eax,1),%ebp
+	adcl	$0,%edx
+	shrl	$31,%eax
+	addl	32(%esp,%ecx,4),%ebp
+	leal	1(%ecx),%ecx
+	adcl	$0,%eax
+	addl	%ebx,%ebp
+	adcl	$0,%eax
+	cmpl	(%esp),%ecx
+	movl	%ebp,28(%esp,%ecx,4)
+	movl	%eax,%ebx
+	jle	.L014sqradd
+	movl	%edx,%ebp
+	addl	%edx,%edx
+	shrl	$31,%ebp
+	addl	%ebx,%edx
+	adcl	$0,%ebp
+.L013sqrlast:
+	movl	20(%esp),%edi
+	movl	16(%esp),%esi
+	imull	32(%esp),%edi
+	addl	32(%esp,%ecx,4),%edx
+	movl	(%esi),%eax
+	adcl	$0,%ebp
+	movl	%edx,32(%esp,%ecx,4)
+	movl	%ebp,36(%esp,%ecx,4)
+	mull	%edi
+	addl	32(%esp),%eax
+	leal	-1(%ecx),%ebx
+	adcl	$0,%edx
+	movl	$1,%ecx
+	movl	4(%esi),%eax
+	jmp	.L0123rdmadd
+.align	16
+.L006common_tail:
+	movl	16(%esp),%ebp
+	movl	4(%esp),%edi
+	leal	32(%esp),%esi
+	movl	(%esi),%eax
+	movl	%ebx,%ecx
+	xorl	%edx,%edx
+.align	16
+.L015sub:
+	sbbl	(%ebp,%edx,4),%eax
+	movl	%eax,(%edi,%edx,4)
+	decl	%ecx
+	movl	4(%esi,%edx,4),%eax
+	leal	1(%edx),%edx
+	jge	.L015sub
+	sbbl	$0,%eax
+	andl	%eax,%esi
+	notl	%eax
+	movl	%edi,%ebp
+	andl	%eax,%ebp
+	orl	%ebp,%esi
+.align	16
+.L016copy:
+	movl	(%esi,%ebx,4),%eax
+	movl	%eax,(%edi,%ebx,4)
+	movl	%ecx,32(%esp,%ebx,4)
+	decl	%ebx
+	jge	.L016copy
+	movl	24(%esp),%esp
+	movl	$1,%eax
+.L000just_leave:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	bn_mul_mont,.-.L_bn_mul_mont_begin
+.byte	77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+.byte	112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+.byte	54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
+.byte	32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
+.byte	111,114,103,62,0
+.comm	OPENSSL_ia32cap_P,8,4
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/x86-mont.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/x86-mont.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86-mont.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/x86-mont.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,457 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86-mont.s 238405 2012-07-12 19:30:53Z jkim $
-.file	"x86-mont.s"
-.text
-.globl	bn_mul_mont
-.type	bn_mul_mont, at function
-.align	16
-bn_mul_mont:
-.L_bn_mul_mont_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	xorl	%eax,%eax
-	movl	40(%esp),%edi
-	cmpl	$4,%edi
-	jl	.L000just_leave
-	leal	20(%esp),%esi
-	leal	24(%esp),%edx
-	movl	%esp,%ebp
-	addl	$2,%edi
-	negl	%edi
-	leal	-32(%esp,%edi,4),%esp
-	negl	%edi
-	movl	%esp,%eax
-	subl	%edx,%eax
-	andl	$2047,%eax
-	subl	%eax,%esp
-	xorl	%esp,%edx
-	andl	$2048,%edx
-	xorl	$2048,%edx
-	subl	%edx,%esp
-	andl	$-64,%esp
-	movl	(%esi),%eax
-	movl	4(%esi),%ebx
-	movl	8(%esi),%ecx
-	movl	12(%esi),%edx
-	movl	16(%esi),%esi
-	movl	(%esi),%esi
-	movl	%eax,4(%esp)
-	movl	%ebx,8(%esp)
-	movl	%ecx,12(%esp)
-	movl	%edx,16(%esp)
-	movl	%esi,20(%esp)
-	leal	-3(%edi),%ebx
-	movl	%ebp,24(%esp)
-	leal	OPENSSL_ia32cap_P,%eax
-	btl	$26,(%eax)
-	jnc	.L001non_sse2
-	movl	$-1,%eax
-	movd	%eax,%mm7
-	movl	8(%esp),%esi
-	movl	12(%esp),%edi
-	movl	16(%esp),%ebp
-	xorl	%edx,%edx
-	xorl	%ecx,%ecx
-	movd	(%edi),%mm4
-	movd	(%esi),%mm5
-	movd	(%ebp),%mm3
-	pmuludq	%mm4,%mm5
-	movq	%mm5,%mm2
-	movq	%mm5,%mm0
-	pand	%mm7,%mm0
-	pmuludq	20(%esp),%mm5
-	pmuludq	%mm5,%mm3
-	paddq	%mm0,%mm3
-	movd	4(%ebp),%mm1
-	movd	4(%esi),%mm0
-	psrlq	$32,%mm2
-	psrlq	$32,%mm3
-	incl	%ecx
-.align	16
-.L0021st:
-	pmuludq	%mm4,%mm0
-	pmuludq	%mm5,%mm1
-	paddq	%mm0,%mm2
-	paddq	%mm1,%mm3
-	movq	%mm2,%mm0
-	pand	%mm7,%mm0
-	movd	4(%ebp,%ecx,4),%mm1
-	paddq	%mm0,%mm3
-	movd	4(%esi,%ecx,4),%mm0
-	psrlq	$32,%mm2
-	movd	%mm3,28(%esp,%ecx,4)
-	psrlq	$32,%mm3
-	leal	1(%ecx),%ecx
-	cmpl	%ebx,%ecx
-	jl	.L0021st
-	pmuludq	%mm4,%mm0
-	pmuludq	%mm5,%mm1
-	paddq	%mm0,%mm2
-	paddq	%mm1,%mm3
-	movq	%mm2,%mm0
-	pand	%mm7,%mm0
-	paddq	%mm0,%mm3
-	movd	%mm3,28(%esp,%ecx,4)
-	psrlq	$32,%mm2
-	psrlq	$32,%mm3
-	paddq	%mm2,%mm3
-	movq	%mm3,32(%esp,%ebx,4)
-	incl	%edx
-.L003outer:
-	xorl	%ecx,%ecx
-	movd	(%edi,%edx,4),%mm4
-	movd	(%esi),%mm5
-	movd	32(%esp),%mm6
-	movd	(%ebp),%mm3
-	pmuludq	%mm4,%mm5
-	paddq	%mm6,%mm5
-	movq	%mm5,%mm0
-	movq	%mm5,%mm2
-	pand	%mm7,%mm0
-	pmuludq	20(%esp),%mm5
-	pmuludq	%mm5,%mm3
-	paddq	%mm0,%mm3
-	movd	36(%esp),%mm6
-	movd	4(%ebp),%mm1
-	movd	4(%esi),%mm0
-	psrlq	$32,%mm2
-	psrlq	$32,%mm3
-	paddq	%mm6,%mm2
-	incl	%ecx
-	decl	%ebx
-.L004inner:
-	pmuludq	%mm4,%mm0
-	pmuludq	%mm5,%mm1
-	paddq	%mm0,%mm2
-	paddq	%mm1,%mm3
-	movq	%mm2,%mm0
-	movd	36(%esp,%ecx,4),%mm6
-	pand	%mm7,%mm0
-	movd	4(%ebp,%ecx,4),%mm1
-	paddq	%mm0,%mm3
-	movd	4(%esi,%ecx,4),%mm0
-	psrlq	$32,%mm2
-	movd	%mm3,28(%esp,%ecx,4)
-	psrlq	$32,%mm3
-	paddq	%mm6,%mm2
-	decl	%ebx
-	leal	1(%ecx),%ecx
-	jnz	.L004inner
-	movl	%ecx,%ebx
-	pmuludq	%mm4,%mm0
-	pmuludq	%mm5,%mm1
-	paddq	%mm0,%mm2
-	paddq	%mm1,%mm3
-	movq	%mm2,%mm0
-	pand	%mm7,%mm0
-	paddq	%mm0,%mm3
-	movd	%mm3,28(%esp,%ecx,4)
-	psrlq	$32,%mm2
-	psrlq	$32,%mm3
-	movd	36(%esp,%ebx,4),%mm6
-	paddq	%mm2,%mm3
-	paddq	%mm6,%mm3
-	movq	%mm3,32(%esp,%ebx,4)
-	leal	1(%edx),%edx
-	cmpl	%ebx,%edx
-	jle	.L003outer
-	emms
-	jmp	.L005common_tail
-.align	16
-.L001non_sse2:
-	movl	8(%esp),%esi
-	leal	1(%ebx),%ebp
-	movl	12(%esp),%edi
-	xorl	%ecx,%ecx
-	movl	%esi,%edx
-	andl	$1,%ebp
-	subl	%edi,%edx
-	leal	4(%edi,%ebx,4),%eax
-	orl	%edx,%ebp
-	movl	(%edi),%edi
-	jz	.L006bn_sqr_mont
-	movl	%eax,28(%esp)
-	movl	(%esi),%eax
-	xorl	%edx,%edx
-.align	16
-.L007mull:
-	movl	%edx,%ebp
-	mull	%edi
-	addl	%eax,%ebp
-	leal	1(%ecx),%ecx
-	adcl	$0,%edx
-	movl	(%esi,%ecx,4),%eax
-	cmpl	%ebx,%ecx
-	movl	%ebp,28(%esp,%ecx,4)
-	jl	.L007mull
-	movl	%edx,%ebp
-	mull	%edi
-	movl	20(%esp),%edi
-	addl	%ebp,%eax
-	movl	16(%esp),%esi
-	adcl	$0,%edx
-	imull	32(%esp),%edi
-	movl	%eax,32(%esp,%ebx,4)
-	xorl	%ecx,%ecx
-	movl	%edx,36(%esp,%ebx,4)
-	movl	%ecx,40(%esp,%ebx,4)
-	movl	(%esi),%eax
-	mull	%edi
-	addl	32(%esp),%eax
-	movl	4(%esi),%eax
-	adcl	$0,%edx
-	incl	%ecx
-	jmp	.L0082ndmadd
-.align	16
-.L0091stmadd:
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ecx,4),%ebp
-	leal	1(%ecx),%ecx
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	movl	(%esi,%ecx,4),%eax
-	adcl	$0,%edx
-	cmpl	%ebx,%ecx
-	movl	%ebp,28(%esp,%ecx,4)
-	jl	.L0091stmadd
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ebx,4),%eax
-	movl	20(%esp),%edi
-	adcl	$0,%edx
-	movl	16(%esp),%esi
-	addl	%eax,%ebp
-	adcl	$0,%edx
-	imull	32(%esp),%edi
-	xorl	%ecx,%ecx
-	addl	36(%esp,%ebx,4),%edx
-	movl	%ebp,32(%esp,%ebx,4)
-	adcl	$0,%ecx
-	movl	(%esi),%eax
-	movl	%edx,36(%esp,%ebx,4)
-	movl	%ecx,40(%esp,%ebx,4)
-	mull	%edi
-	addl	32(%esp),%eax
-	movl	4(%esi),%eax
-	adcl	$0,%edx
-	movl	$1,%ecx
-.align	16
-.L0082ndmadd:
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ecx,4),%ebp
-	leal	1(%ecx),%ecx
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	movl	(%esi,%ecx,4),%eax
-	adcl	$0,%edx
-	cmpl	%ebx,%ecx
-	movl	%ebp,24(%esp,%ecx,4)
-	jl	.L0082ndmadd
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ebx,4),%ebp
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	adcl	$0,%edx
-	movl	%ebp,28(%esp,%ebx,4)
-	xorl	%eax,%eax
-	movl	12(%esp),%ecx
-	addl	36(%esp,%ebx,4),%edx
-	adcl	40(%esp,%ebx,4),%eax
-	leal	4(%ecx),%ecx
-	movl	%edx,32(%esp,%ebx,4)
-	cmpl	28(%esp),%ecx
-	movl	%eax,36(%esp,%ebx,4)
-	je	.L005common_tail
-	movl	(%ecx),%edi
-	movl	8(%esp),%esi
-	movl	%ecx,12(%esp)
-	xorl	%ecx,%ecx
-	xorl	%edx,%edx
-	movl	(%esi),%eax
-	jmp	.L0091stmadd
-.align	16
-.L006bn_sqr_mont:
-	movl	%ebx,(%esp)
-	movl	%ecx,12(%esp)
-	movl	%edi,%eax
-	mull	%edi
-	movl	%eax,32(%esp)
-	movl	%edx,%ebx
-	shrl	$1,%edx
-	andl	$1,%ebx
-	incl	%ecx
-.align	16
-.L010sqr:
-	movl	(%esi,%ecx,4),%eax
-	movl	%edx,%ebp
-	mull	%edi
-	addl	%ebp,%eax
-	leal	1(%ecx),%ecx
-	adcl	$0,%edx
-	leal	(%ebx,%eax,2),%ebp
-	shrl	$31,%eax
-	cmpl	(%esp),%ecx
-	movl	%eax,%ebx
-	movl	%ebp,28(%esp,%ecx,4)
-	jl	.L010sqr
-	movl	(%esi,%ecx,4),%eax
-	movl	%edx,%ebp
-	mull	%edi
-	addl	%ebp,%eax
-	movl	20(%esp),%edi
-	adcl	$0,%edx
-	movl	16(%esp),%esi
-	leal	(%ebx,%eax,2),%ebp
-	imull	32(%esp),%edi
-	shrl	$31,%eax
-	movl	%ebp,32(%esp,%ecx,4)
-	leal	(%eax,%edx,2),%ebp
-	movl	(%esi),%eax
-	shrl	$31,%edx
-	movl	%ebp,36(%esp,%ecx,4)
-	movl	%edx,40(%esp,%ecx,4)
-	mull	%edi
-	addl	32(%esp),%eax
-	movl	%ecx,%ebx
-	adcl	$0,%edx
-	movl	4(%esi),%eax
-	movl	$1,%ecx
-.align	16
-.L0113rdmadd:
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ecx,4),%ebp
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	movl	4(%esi,%ecx,4),%eax
-	adcl	$0,%edx
-	movl	%ebp,28(%esp,%ecx,4)
-	movl	%edx,%ebp
-	mull	%edi
-	addl	36(%esp,%ecx,4),%ebp
-	leal	2(%ecx),%ecx
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	movl	(%esi,%ecx,4),%eax
-	adcl	$0,%edx
-	cmpl	%ebx,%ecx
-	movl	%ebp,24(%esp,%ecx,4)
-	jl	.L0113rdmadd
-	movl	%edx,%ebp
-	mull	%edi
-	addl	32(%esp,%ebx,4),%ebp
-	adcl	$0,%edx
-	addl	%eax,%ebp
-	adcl	$0,%edx
-	movl	%ebp,28(%esp,%ebx,4)
-	movl	12(%esp),%ecx
-	xorl	%eax,%eax
-	movl	8(%esp),%esi
-	addl	36(%esp,%ebx,4),%edx
-	adcl	40(%esp,%ebx,4),%eax
-	movl	%edx,32(%esp,%ebx,4)
-	cmpl	%ebx,%ecx
-	movl	%eax,36(%esp,%ebx,4)
-	je	.L005common_tail
-	movl	4(%esi,%ecx,4),%edi
-	leal	1(%ecx),%ecx
-	movl	%edi,%eax
-	movl	%ecx,12(%esp)
-	mull	%edi
-	addl	32(%esp,%ecx,4),%eax
-	adcl	$0,%edx
-	movl	%eax,32(%esp,%ecx,4)
-	xorl	%ebp,%ebp
-	cmpl	%ebx,%ecx
-	leal	1(%ecx),%ecx
-	je	.L012sqrlast
-	movl	%edx,%ebx
-	shrl	$1,%edx
-	andl	$1,%ebx
-.align	16
-.L013sqradd:
-	movl	(%esi,%ecx,4),%eax
-	movl	%edx,%ebp
-	mull	%edi
-	addl	%ebp,%eax
-	leal	(%eax,%eax,1),%ebp
-	adcl	$0,%edx
-	shrl	$31,%eax
-	addl	32(%esp,%ecx,4),%ebp
-	leal	1(%ecx),%ecx
-	adcl	$0,%eax
-	addl	%ebx,%ebp
-	adcl	$0,%eax
-	cmpl	(%esp),%ecx
-	movl	%ebp,28(%esp,%ecx,4)
-	movl	%eax,%ebx
-	jle	.L013sqradd
-	movl	%edx,%ebp
-	addl	%edx,%edx
-	shrl	$31,%ebp
-	addl	%ebx,%edx
-	adcl	$0,%ebp
-.L012sqrlast:
-	movl	20(%esp),%edi
-	movl	16(%esp),%esi
-	imull	32(%esp),%edi
-	addl	32(%esp,%ecx,4),%edx
-	movl	(%esi),%eax
-	adcl	$0,%ebp
-	movl	%edx,32(%esp,%ecx,4)
-	movl	%ebp,36(%esp,%ecx,4)
-	mull	%edi
-	addl	32(%esp),%eax
-	leal	-1(%ecx),%ebx
-	adcl	$0,%edx
-	movl	$1,%ecx
-	movl	4(%esi),%eax
-	jmp	.L0113rdmadd
-.align	16
-.L005common_tail:
-	movl	16(%esp),%ebp
-	movl	4(%esp),%edi
-	leal	32(%esp),%esi
-	movl	(%esi),%eax
-	movl	%ebx,%ecx
-	xorl	%edx,%edx
-.align	16
-.L014sub:
-	sbbl	(%ebp,%edx,4),%eax
-	movl	%eax,(%edi,%edx,4)
-	decl	%ecx
-	movl	4(%esi,%edx,4),%eax
-	leal	1(%edx),%edx
-	jge	.L014sub
-	sbbl	$0,%eax
-	andl	%eax,%esi
-	notl	%eax
-	movl	%edi,%ebp
-	andl	%eax,%ebp
-	orl	%ebp,%esi
-.align	16
-.L015copy:
-	movl	(%esi,%ebx,4),%eax
-	movl	%eax,(%edi,%ebx,4)
-	movl	%ecx,32(%esp,%ebx,4)
-	decl	%ebx
-	jge	.L015copy
-	movl	24(%esp),%esp
-	movl	$1,%eax
-.L000just_leave:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	bn_mul_mont,.-.L_bn_mul_mont_begin
-.byte	77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
-.byte	112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
-.byte	54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
-.byte	32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
-.byte	111,114,103,62,0
-.comm	OPENSSL_ia32cap_P,8,4

Added: trunk/secure/lib/libcrypto/i386/x86cpuid.S
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86cpuid.S	                        (rev 0)
+++ trunk/secure/lib/libcrypto/i386/x86cpuid.S	2018-07-08 16:31:10 UTC (rev 11612)
@@ -0,0 +1,690 @@
+/* $MidnightBSD$ */
+# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86cpuid.S 299983 2016-05-16 22:42:09Z jkim $
+# Do not modify. This file is auto-generated from x86cpuid.pl.
+#ifdef PIC
+.file	"x86cpuid.S"
+.text
+.globl	OPENSSL_ia32_cpuid
+.type	OPENSSL_ia32_cpuid, at function
+.align	16
+OPENSSL_ia32_cpuid:
+.L_OPENSSL_ia32_cpuid_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	xorl	%edx,%edx
+	pushfl
+	popl	%eax
+	movl	%eax,%ecx
+	xorl	$2097152,%eax
+	pushl	%eax
+	popfl
+	pushfl
+	popl	%eax
+	xorl	%eax,%ecx
+	xorl	%eax,%eax
+	btl	$21,%ecx
+	jnc	.L000nocpuid
+	.byte	0x0f,0xa2
+	movl	%eax,%edi
+	xorl	%eax,%eax
+	cmpl	$1970169159,%ebx
+	setne	%al
+	movl	%eax,%ebp
+	cmpl	$1231384169,%edx
+	setne	%al
+	orl	%eax,%ebp
+	cmpl	$1818588270,%ecx
+	setne	%al
+	orl	%eax,%ebp
+	jz	.L001intel
+	cmpl	$1752462657,%ebx
+	setne	%al
+	movl	%eax,%esi
+	cmpl	$1769238117,%edx
+	setne	%al
+	orl	%eax,%esi
+	cmpl	$1145913699,%ecx
+	setne	%al
+	orl	%eax,%esi
+	jnz	.L001intel
+	movl	$2147483648,%eax
+	.byte	0x0f,0xa2
+	cmpl	$2147483649,%eax
+	jb	.L001intel
+	movl	%eax,%esi
+	movl	$2147483649,%eax
+	.byte	0x0f,0xa2
+	orl	%ecx,%ebp
+	andl	$2049,%ebp
+	cmpl	$2147483656,%esi
+	jb	.L001intel
+	movl	$2147483656,%eax
+	.byte	0x0f,0xa2
+	movzbl	%cl,%esi
+	incl	%esi
+	movl	$1,%eax
+	xorl	%ecx,%ecx
+	.byte	0x0f,0xa2
+	btl	$28,%edx
+	jnc	.L002generic
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	cmpl	%esi,%ebx
+	ja	.L002generic
+	andl	$4026531839,%edx
+	jmp	.L002generic
+.L001intel:
+	cmpl	$4,%edi
+	movl	$-1,%edi
+	jb	.L003nocacheinfo
+	movl	$4,%eax
+	movl	$0,%ecx
+	.byte	0x0f,0xa2
+	movl	%eax,%edi
+	shrl	$14,%edi
+	andl	$4095,%edi
+.L003nocacheinfo:
+	movl	$1,%eax
+	xorl	%ecx,%ecx
+	.byte	0x0f,0xa2
+	andl	$3220176895,%edx
+	cmpl	$0,%ebp
+	jne	.L004notintel
+	orl	$1073741824,%edx
+	andb	$15,%ah
+	cmpb	$15,%ah
+	jne	.L004notintel
+	orl	$1048576,%edx
+.L004notintel:
+	btl	$28,%edx
+	jnc	.L002generic
+	andl	$4026531839,%edx
+	cmpl	$0,%edi
+	je	.L002generic
+	orl	$268435456,%edx
+	shrl	$16,%ebx
+	cmpb	$1,%bl
+	ja	.L002generic
+	andl	$4026531839,%edx
+.L002generic:
+	andl	$2048,%ebp
+	andl	$4294965247,%ecx
+	movl	%edx,%esi
+	orl	%ecx,%ebp
+	btl	$27,%ecx
+	jnc	.L005clear_avx
+	xorl	%ecx,%ecx
+.byte	15,1,208
+	andl	$6,%eax
+	cmpl	$6,%eax
+	je	.L006done
+	cmpl	$2,%eax
+	je	.L005clear_avx
+.L007clear_xmm:
+	andl	$4261412861,%ebp
+	andl	$4278190079,%esi
+.L005clear_avx:
+	andl	$4026525695,%ebp
+.L006done:
+	movl	%esi,%eax
+	movl	%ebp,%edx
+.L000nocpuid:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
+.globl	OPENSSL_rdtsc
+.type	OPENSSL_rdtsc, at function
+.align	16
+OPENSSL_rdtsc:
+.L_OPENSSL_rdtsc_begin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	call	.L008PIC_me_up
+.L008PIC_me_up:
+	popl	%ecx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L008PIC_me_up](%ecx),%ecx
+	movl	OPENSSL_ia32cap_P at GOT(%ecx),%ecx
+	btl	$4,(%ecx)
+	jnc	.L009notsc
+	.byte	0x0f,0x31
+.L009notsc:
+	ret
+.size	OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
+.globl	OPENSSL_instrument_halt
+.type	OPENSSL_instrument_halt, at function
+.align	16
+OPENSSL_instrument_halt:
+.L_OPENSSL_instrument_halt_begin:
+	call	.L010PIC_me_up
+.L010PIC_me_up:
+	popl	%ecx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L010PIC_me_up](%ecx),%ecx
+	movl	OPENSSL_ia32cap_P at GOT(%ecx),%ecx
+	btl	$4,(%ecx)
+	jnc	.L011nohalt
+.long	2421723150
+	andl	$3,%eax
+	jnz	.L011nohalt
+	pushfl
+	popl	%eax
+	btl	$9,%eax
+	jnc	.L011nohalt
+	.byte	0x0f,0x31
+	pushl	%edx
+	pushl	%eax
+	hlt
+	.byte	0x0f,0x31
+	subl	(%esp),%eax
+	sbbl	4(%esp),%edx
+	addl	$8,%esp
+	ret
+.L011nohalt:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	ret
+.size	OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
+.globl	OPENSSL_far_spin
+.type	OPENSSL_far_spin, at function
+.align	16
+OPENSSL_far_spin:
+.L_OPENSSL_far_spin_begin:
+	pushfl
+	popl	%eax
+	btl	$9,%eax
+	jnc	.L012nospin
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+.long	2430111262
+	xorl	%eax,%eax
+	movl	(%ecx),%edx
+	jmp	.L013spin
+.align	16
+.L013spin:
+	incl	%eax
+	cmpl	(%ecx),%edx
+	je	.L013spin
+.long	529567888
+	ret
+.L012nospin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	ret
+.size	OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
+.globl	OPENSSL_wipe_cpu
+.type	OPENSSL_wipe_cpu, at function
+.align	16
+OPENSSL_wipe_cpu:
+.L_OPENSSL_wipe_cpu_begin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	call	.L014PIC_me_up
+.L014PIC_me_up:
+	popl	%ecx
+	leal	_GLOBAL_OFFSET_TABLE_+[.-.L014PIC_me_up](%ecx),%ecx
+	movl	OPENSSL_ia32cap_P at GOT(%ecx),%ecx
+	movl	(%ecx),%ecx
+	btl	$1,(%ecx)
+	jnc	.L015no_x87
+	andl	$83886080,%ecx
+	cmpl	$83886080,%ecx
+	jne	.L016no_sse2
+	pxor	%xmm0,%xmm0
+	pxor	%xmm1,%xmm1
+	pxor	%xmm2,%xmm2
+	pxor	%xmm3,%xmm3
+	pxor	%xmm4,%xmm4
+	pxor	%xmm5,%xmm5
+	pxor	%xmm6,%xmm6
+	pxor	%xmm7,%xmm7
+.L016no_sse2:
+.long	4007259865,4007259865,4007259865,4007259865,2430851995
+.L015no_x87:
+	leal	4(%esp),%eax
+	ret
+.size	OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
+.globl	OPENSSL_atomic_add
+.type	OPENSSL_atomic_add, at function
+.align	16
+OPENSSL_atomic_add:
+.L_OPENSSL_atomic_add_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%ecx
+	pushl	%ebx
+	nop
+	movl	(%edx),%eax
+.L017spin:
+	leal	(%eax,%ecx,1),%ebx
+	nop
+.long	447811568
+	jne	.L017spin
+	movl	%ebx,%eax
+	popl	%ebx
+	ret
+.size	OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
+.globl	OPENSSL_indirect_call
+.type	OPENSSL_indirect_call, at function
+.align	16
+OPENSSL_indirect_call:
+.L_OPENSSL_indirect_call_begin:
+	pushl	%ebp
+	movl	%esp,%ebp
+	subl	$28,%esp
+	movl	12(%ebp),%ecx
+	movl	%ecx,(%esp)
+	movl	16(%ebp),%edx
+	movl	%edx,4(%esp)
+	movl	20(%ebp),%eax
+	movl	%eax,8(%esp)
+	movl	24(%ebp),%eax
+	movl	%eax,12(%esp)
+	movl	28(%ebp),%eax
+	movl	%eax,16(%esp)
+	movl	32(%ebp),%eax
+	movl	%eax,20(%esp)
+	movl	36(%ebp),%eax
+	movl	%eax,24(%esp)
+	call	*8(%ebp)
+	movl	%ebp,%esp
+	popl	%ebp
+	ret
+.size	OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
+.globl	OPENSSL_cleanse
+.type	OPENSSL_cleanse, at function
+.align	16
+OPENSSL_cleanse:
+.L_OPENSSL_cleanse_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%ecx
+	xorl	%eax,%eax
+	cmpl	$7,%ecx
+	jae	.L018lot
+	cmpl	$0,%ecx
+	je	.L019ret
+.L020little:
+	movb	%al,(%edx)
+	subl	$1,%ecx
+	leal	1(%edx),%edx
+	jnz	.L020little
+.L019ret:
+	ret
+.align	16
+.L018lot:
+	testl	$3,%edx
+	jz	.L021aligned
+	movb	%al,(%edx)
+	leal	-1(%ecx),%ecx
+	leal	1(%edx),%edx
+	jmp	.L018lot
+.L021aligned:
+	movl	%eax,(%edx)
+	leal	-4(%ecx),%ecx
+	testl	$-4,%ecx
+	leal	4(%edx),%edx
+	jnz	.L021aligned
+	cmpl	$0,%ecx
+	jne	.L020little
+	ret
+.size	OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
+.globl	OPENSSL_ia32_rdrand
+.type	OPENSSL_ia32_rdrand, at function
+.align	16
+OPENSSL_ia32_rdrand:
+.L_OPENSSL_ia32_rdrand_begin:
+	movl	$8,%ecx
+.L022loop:
+.byte	15,199,240
+	jc	.L023break
+	loop	.L022loop
+.L023break:
+	cmpl	$0,%eax
+	cmovel	%ecx,%eax
+	ret
+.size	OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
+.hidden	OPENSSL_cpuid_setup
+.hidden	OPENSSL_ia32cap_P
+.comm	OPENSSL_ia32cap_P,8,4
+.section	.init
+	call	OPENSSL_cpuid_setup
+#else
+.file	"x86cpuid.S"
+.text
+.globl	OPENSSL_ia32_cpuid
+.type	OPENSSL_ia32_cpuid, at function
+.align	16
+OPENSSL_ia32_cpuid:
+.L_OPENSSL_ia32_cpuid_begin:
+	pushl	%ebp
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	xorl	%edx,%edx
+	pushfl
+	popl	%eax
+	movl	%eax,%ecx
+	xorl	$2097152,%eax
+	pushl	%eax
+	popfl
+	pushfl
+	popl	%eax
+	xorl	%eax,%ecx
+	xorl	%eax,%eax
+	btl	$21,%ecx
+	jnc	.L000nocpuid
+	.byte	0x0f,0xa2
+	movl	%eax,%edi
+	xorl	%eax,%eax
+	cmpl	$1970169159,%ebx
+	setne	%al
+	movl	%eax,%ebp
+	cmpl	$1231384169,%edx
+	setne	%al
+	orl	%eax,%ebp
+	cmpl	$1818588270,%ecx
+	setne	%al
+	orl	%eax,%ebp
+	jz	.L001intel
+	cmpl	$1752462657,%ebx
+	setne	%al
+	movl	%eax,%esi
+	cmpl	$1769238117,%edx
+	setne	%al
+	orl	%eax,%esi
+	cmpl	$1145913699,%ecx
+	setne	%al
+	orl	%eax,%esi
+	jnz	.L001intel
+	movl	$2147483648,%eax
+	.byte	0x0f,0xa2
+	cmpl	$2147483649,%eax
+	jb	.L001intel
+	movl	%eax,%esi
+	movl	$2147483649,%eax
+	.byte	0x0f,0xa2
+	orl	%ecx,%ebp
+	andl	$2049,%ebp
+	cmpl	$2147483656,%esi
+	jb	.L001intel
+	movl	$2147483656,%eax
+	.byte	0x0f,0xa2
+	movzbl	%cl,%esi
+	incl	%esi
+	movl	$1,%eax
+	xorl	%ecx,%ecx
+	.byte	0x0f,0xa2
+	btl	$28,%edx
+	jnc	.L002generic
+	shrl	$16,%ebx
+	andl	$255,%ebx
+	cmpl	%esi,%ebx
+	ja	.L002generic
+	andl	$4026531839,%edx
+	jmp	.L002generic
+.L001intel:
+	cmpl	$4,%edi
+	movl	$-1,%edi
+	jb	.L003nocacheinfo
+	movl	$4,%eax
+	movl	$0,%ecx
+	.byte	0x0f,0xa2
+	movl	%eax,%edi
+	shrl	$14,%edi
+	andl	$4095,%edi
+.L003nocacheinfo:
+	movl	$1,%eax
+	xorl	%ecx,%ecx
+	.byte	0x0f,0xa2
+	andl	$3220176895,%edx
+	cmpl	$0,%ebp
+	jne	.L004notintel
+	orl	$1073741824,%edx
+	andb	$15,%ah
+	cmpb	$15,%ah
+	jne	.L004notintel
+	orl	$1048576,%edx
+.L004notintel:
+	btl	$28,%edx
+	jnc	.L002generic
+	andl	$4026531839,%edx
+	cmpl	$0,%edi
+	je	.L002generic
+	orl	$268435456,%edx
+	shrl	$16,%ebx
+	cmpb	$1,%bl
+	ja	.L002generic
+	andl	$4026531839,%edx
+.L002generic:
+	andl	$2048,%ebp
+	andl	$4294965247,%ecx
+	movl	%edx,%esi
+	orl	%ecx,%ebp
+	btl	$27,%ecx
+	jnc	.L005clear_avx
+	xorl	%ecx,%ecx
+.byte	15,1,208
+	andl	$6,%eax
+	cmpl	$6,%eax
+	je	.L006done
+	cmpl	$2,%eax
+	je	.L005clear_avx
+.L007clear_xmm:
+	andl	$4261412861,%ebp
+	andl	$4278190079,%esi
+.L005clear_avx:
+	andl	$4026525695,%ebp
+.L006done:
+	movl	%esi,%eax
+	movl	%ebp,%edx
+.L000nocpuid:
+	popl	%edi
+	popl	%esi
+	popl	%ebx
+	popl	%ebp
+	ret
+.size	OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
+.globl	OPENSSL_rdtsc
+.type	OPENSSL_rdtsc, at function
+.align	16
+OPENSSL_rdtsc:
+.L_OPENSSL_rdtsc_begin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	leal	OPENSSL_ia32cap_P,%ecx
+	btl	$4,(%ecx)
+	jnc	.L008notsc
+	.byte	0x0f,0x31
+.L008notsc:
+	ret
+.size	OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
+.globl	OPENSSL_instrument_halt
+.type	OPENSSL_instrument_halt, at function
+.align	16
+OPENSSL_instrument_halt:
+.L_OPENSSL_instrument_halt_begin:
+	leal	OPENSSL_ia32cap_P,%ecx
+	btl	$4,(%ecx)
+	jnc	.L009nohalt
+.long	2421723150
+	andl	$3,%eax
+	jnz	.L009nohalt
+	pushfl
+	popl	%eax
+	btl	$9,%eax
+	jnc	.L009nohalt
+	.byte	0x0f,0x31
+	pushl	%edx
+	pushl	%eax
+	hlt
+	.byte	0x0f,0x31
+	subl	(%esp),%eax
+	sbbl	4(%esp),%edx
+	addl	$8,%esp
+	ret
+.L009nohalt:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	ret
+.size	OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
+.globl	OPENSSL_far_spin
+.type	OPENSSL_far_spin, at function
+.align	16
+OPENSSL_far_spin:
+.L_OPENSSL_far_spin_begin:
+	pushfl
+	popl	%eax
+	btl	$9,%eax
+	jnc	.L010nospin
+	movl	4(%esp),%eax
+	movl	8(%esp),%ecx
+.long	2430111262
+	xorl	%eax,%eax
+	movl	(%ecx),%edx
+	jmp	.L011spin
+.align	16
+.L011spin:
+	incl	%eax
+	cmpl	(%ecx),%edx
+	je	.L011spin
+.long	529567888
+	ret
+.L010nospin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	ret
+.size	OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
+.globl	OPENSSL_wipe_cpu
+.type	OPENSSL_wipe_cpu, at function
+.align	16
+OPENSSL_wipe_cpu:
+.L_OPENSSL_wipe_cpu_begin:
+	xorl	%eax,%eax
+	xorl	%edx,%edx
+	leal	OPENSSL_ia32cap_P,%ecx
+	movl	(%ecx),%ecx
+	btl	$1,(%ecx)
+	jnc	.L012no_x87
+	andl	$83886080,%ecx
+	cmpl	$83886080,%ecx
+	jne	.L013no_sse2
+	pxor	%xmm0,%xmm0
+	pxor	%xmm1,%xmm1
+	pxor	%xmm2,%xmm2
+	pxor	%xmm3,%xmm3
+	pxor	%xmm4,%xmm4
+	pxor	%xmm5,%xmm5
+	pxor	%xmm6,%xmm6
+	pxor	%xmm7,%xmm7
+.L013no_sse2:
+.long	4007259865,4007259865,4007259865,4007259865,2430851995
+.L012no_x87:
+	leal	4(%esp),%eax
+	ret
+.size	OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
+.globl	OPENSSL_atomic_add
+.type	OPENSSL_atomic_add, at function
+.align	16
+OPENSSL_atomic_add:
+.L_OPENSSL_atomic_add_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%ecx
+	pushl	%ebx
+	nop
+	movl	(%edx),%eax
+.L014spin:
+	leal	(%eax,%ecx,1),%ebx
+	nop
+.long	447811568
+	jne	.L014spin
+	movl	%ebx,%eax
+	popl	%ebx
+	ret
+.size	OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
+.globl	OPENSSL_indirect_call
+.type	OPENSSL_indirect_call, at function
+.align	16
+OPENSSL_indirect_call:
+.L_OPENSSL_indirect_call_begin:
+	pushl	%ebp
+	movl	%esp,%ebp
+	subl	$28,%esp
+	movl	12(%ebp),%ecx
+	movl	%ecx,(%esp)
+	movl	16(%ebp),%edx
+	movl	%edx,4(%esp)
+	movl	20(%ebp),%eax
+	movl	%eax,8(%esp)
+	movl	24(%ebp),%eax
+	movl	%eax,12(%esp)
+	movl	28(%ebp),%eax
+	movl	%eax,16(%esp)
+	movl	32(%ebp),%eax
+	movl	%eax,20(%esp)
+	movl	36(%ebp),%eax
+	movl	%eax,24(%esp)
+	call	*8(%ebp)
+	movl	%ebp,%esp
+	popl	%ebp
+	ret
+.size	OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
+.globl	OPENSSL_cleanse
+.type	OPENSSL_cleanse, at function
+.align	16
+OPENSSL_cleanse:
+.L_OPENSSL_cleanse_begin:
+	movl	4(%esp),%edx
+	movl	8(%esp),%ecx
+	xorl	%eax,%eax
+	cmpl	$7,%ecx
+	jae	.L015lot
+	cmpl	$0,%ecx
+	je	.L016ret
+.L017little:
+	movb	%al,(%edx)
+	subl	$1,%ecx
+	leal	1(%edx),%edx
+	jnz	.L017little
+.L016ret:
+	ret
+.align	16
+.L015lot:
+	testl	$3,%edx
+	jz	.L018aligned
+	movb	%al,(%edx)
+	leal	-1(%ecx),%ecx
+	leal	1(%edx),%edx
+	jmp	.L015lot
+.L018aligned:
+	movl	%eax,(%edx)
+	leal	-4(%ecx),%ecx
+	testl	$-4,%ecx
+	leal	4(%edx),%edx
+	jnz	.L018aligned
+	cmpl	$0,%ecx
+	jne	.L017little
+	ret
+.size	OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
+.globl	OPENSSL_ia32_rdrand
+.type	OPENSSL_ia32_rdrand, at function
+.align	16
+OPENSSL_ia32_rdrand:
+.L_OPENSSL_ia32_rdrand_begin:
+	movl	$8,%ecx
+.L019loop:
+.byte	15,199,240
+	jc	.L020break
+	loop	.L019loop
+.L020break:
+	cmpl	$0,%eax
+	cmovel	%ecx,%eax
+	ret
+.size	OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
+.hidden	OPENSSL_cpuid_setup
+.hidden	OPENSSL_ia32cap_P
+.comm	OPENSSL_ia32cap_P,8,4
+.section	.init
+	call	OPENSSL_cpuid_setup
+#endif


Property changes on: trunk/secure/lib/libcrypto/i386/x86cpuid.S
___________________________________________________________________
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:keywords
## -0,0 +1 ##
+MidnightBSD=%H
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Deleted: trunk/secure/lib/libcrypto/i386/x86cpuid.s
===================================================================
--- trunk/secure/lib/libcrypto/i386/x86cpuid.s	2018-07-08 16:29:52 UTC (rev 11611)
+++ trunk/secure/lib/libcrypto/i386/x86cpuid.s	2018-07-08 16:31:10 UTC (rev 11612)
@@ -1,335 +0,0 @@
-	# $FreeBSD: stable/10/secure/lib/libcrypto/i386/x86cpuid.s 264331 2014-04-10 22:39:27Z jkim $
-.file	"x86cpuid.s"
-.text
-.globl	OPENSSL_ia32_cpuid
-.type	OPENSSL_ia32_cpuid, at function
-.align	16
-OPENSSL_ia32_cpuid:
-.L_OPENSSL_ia32_cpuid_begin:
-	pushl	%ebp
-	pushl	%ebx
-	pushl	%esi
-	pushl	%edi
-	xorl	%edx,%edx
-	pushfl
-	popl	%eax
-	movl	%eax,%ecx
-	xorl	$2097152,%eax
-	pushl	%eax
-	popfl
-	pushfl
-	popl	%eax
-	xorl	%eax,%ecx
-	xorl	%eax,%eax
-	btl	$21,%ecx
-	jnc	.L000nocpuid
-	.byte	0x0f,0xa2
-	movl	%eax,%edi
-	xorl	%eax,%eax
-	cmpl	$1970169159,%ebx
-	setne	%al
-	movl	%eax,%ebp
-	cmpl	$1231384169,%edx
-	setne	%al
-	orl	%eax,%ebp
-	cmpl	$1818588270,%ecx
-	setne	%al
-	orl	%eax,%ebp
-	jz	.L001intel
-	cmpl	$1752462657,%ebx
-	setne	%al
-	movl	%eax,%esi
-	cmpl	$1769238117,%edx
-	setne	%al
-	orl	%eax,%esi
-	cmpl	$1145913699,%ecx
-	setne	%al
-	orl	%eax,%esi
-	jnz	.L001intel
-	movl	$2147483648,%eax
-	.byte	0x0f,0xa2
-	cmpl	$2147483649,%eax
-	jb	.L001intel
-	movl	%eax,%esi
-	movl	$2147483649,%eax
-	.byte	0x0f,0xa2
-	orl	%ecx,%ebp
-	andl	$2049,%ebp
-	cmpl	$2147483656,%esi
-	jb	.L001intel
-	movl	$2147483656,%eax
-	.byte	0x0f,0xa2
-	movzbl	%cl,%esi
-	incl	%esi
-	movl	$1,%eax
-	xorl	%ecx,%ecx
-	.byte	0x0f,0xa2
-	btl	$28,%edx
-	jnc	.L002generic
-	shrl	$16,%ebx
-	andl	$255,%ebx
-	cmpl	%esi,%ebx
-	ja	.L002generic
-	andl	$4026531839,%edx
-	jmp	.L002generic
-.L001intel:
-	cmpl	$4,%edi
-	movl	$-1,%edi
-	jb	.L003nocacheinfo
-	movl	$4,%eax
-	movl	$0,%ecx
-	.byte	0x0f,0xa2
-	movl	%eax,%edi
-	shrl	$14,%edi
-	andl	$4095,%edi
-.L003nocacheinfo:
-	movl	$1,%eax
-	xorl	%ecx,%ecx
-	.byte	0x0f,0xa2
-	andl	$3220176895,%edx
-	cmpl	$0,%ebp
-	jne	.L004notintel
-	orl	$1073741824,%edx
-	andb	$15,%ah
-	cmpb	$15,%ah
-	jne	.L004notintel
-	orl	$1048576,%edx
-.L004notintel:
-	btl	$28,%edx
-	jnc	.L002generic
-	andl	$4026531839,%edx
-	cmpl	$0,%edi
-	je	.L002generic
-	orl	$268435456,%edx
-	shrl	$16,%ebx
-	cmpb	$1,%bl
-	ja	.L002generic
-	andl	$4026531839,%edx
-.L002generic:
-	andl	$2048,%ebp
-	andl	$4294965247,%ecx
-	movl	%edx,%esi
-	orl	%ecx,%ebp
-	btl	$27,%ecx
-	jnc	.L005clear_avx
-	xorl	%ecx,%ecx
-.byte	15,1,208
-	andl	$6,%eax
-	cmpl	$6,%eax
-	je	.L006done
-	cmpl	$2,%eax
-	je	.L005clear_avx
-.L007clear_xmm:
-	andl	$4261412861,%ebp
-	andl	$4278190079,%esi
-.L005clear_avx:
-	andl	$4026525695,%ebp
-.L006done:
-	movl	%esi,%eax
-	movl	%ebp,%edx
-.L000nocpuid:
-	popl	%edi
-	popl	%esi
-	popl	%ebx
-	popl	%ebp
-	ret
-.size	OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
-.globl	OPENSSL_rdtsc
-.type	OPENSSL_rdtsc, at function
-.align	16
-OPENSSL_rdtsc:
-.L_OPENSSL_rdtsc_begin:
-	xorl	%eax,%eax
-	xorl	%edx,%edx
-	leal	OPENSSL_ia32cap_P,%ecx
-	btl	$4,(%ecx)
-	jnc	.L008notsc
-	.byte	0x0f,0x31
-.L008notsc:
-	ret
-.size	OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
-.globl	OPENSSL_instrument_halt
-.type	OPENSSL_instrument_halt, at function
-.align	16
-OPENSSL_instrument_halt:
-.L_OPENSSL_instrument_halt_begin:
-	leal	OPENSSL_ia32cap_P,%ecx
-	btl	$4,(%ecx)
-	jnc	.L009nohalt
-.long	2421723150
-	andl	$3,%eax
-	jnz	.L009nohalt
-	pushfl
-	popl	%eax
-	btl	$9,%eax
-	jnc	.L009nohalt
-	.byte	0x0f,0x31
-	pushl	%edx
-	pushl	%eax
-	hlt
-	.byte	0x0f,0x31
-	subl	(%esp),%eax
-	sbbl	4(%esp),%edx
-	addl	$8,%esp
-	ret
-.L009nohalt:
-	xorl	%eax,%eax
-	xorl	%edx,%edx
-	ret
-.size	OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
-.globl	OPENSSL_far_spin
-.type	OPENSSL_far_spin, at function
-.align	16
-OPENSSL_far_spin:
-.L_OPENSSL_far_spin_begin:
-	pushfl
-	popl	%eax
-	btl	$9,%eax
-	jnc	.L010nospin
-	movl	4(%esp),%eax
-	movl	8(%esp),%ecx
-.long	2430111262
-	xorl	%eax,%eax
-	movl	(%ecx),%edx
-	jmp	.L011spin
-.align	16
-.L011spin:
-	incl	%eax
-	cmpl	(%ecx),%edx
-	je	.L011spin
-.long	529567888
-	ret
-.L010nospin:
-	xorl	%eax,%eax
-	xorl	%edx,%edx
-	ret
-.size	OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
-.globl	OPENSSL_wipe_cpu
-.type	OPENSSL_wipe_cpu, at function
-.align	16
-OPENSSL_wipe_cpu:
-.L_OPENSSL_wipe_cpu_begin:
-	xorl	%eax,%eax
-	xorl	%edx,%edx
-	leal	OPENSSL_ia32cap_P,%ecx
-	movl	(%ecx),%ecx
-	btl	$1,(%ecx)
-	jnc	.L012no_x87
-	andl	$83886080,%ecx
-	cmpl	$83886080,%ecx
-	jne	.L013no_sse2
-	pxor	%xmm0,%xmm0
-	pxor	%xmm1,%xmm1
-	pxor	%xmm2,%xmm2
-	pxor	%xmm3,%xmm3
-	pxor	%xmm4,%xmm4
-	pxor	%xmm5,%xmm5
-	pxor	%xmm6,%xmm6
-	pxor	%xmm7,%xmm7
-.L013no_sse2:
-.long	4007259865,4007259865,4007259865,4007259865,2430851995
-.L012no_x87:
-	leal	4(%esp),%eax
-	ret
-.size	OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
-.globl	OPENSSL_atomic_add
-.type	OPENSSL_atomic_add, at function
-.align	16
-OPENSSL_atomic_add:
-.L_OPENSSL_atomic_add_begin:
-	movl	4(%esp),%edx
-	movl	8(%esp),%ecx
-	pushl	%ebx
-	nop
-	movl	(%edx),%eax
-.L014spin:
-	leal	(%eax,%ecx,1),%ebx
-	nop
-.long	447811568
-	jne	.L014spin
-	movl	%ebx,%eax
-	popl	%ebx
-	ret
-.size	OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
-.globl	OPENSSL_indirect_call
-.type	OPENSSL_indirect_call, at function
-.align	16
-OPENSSL_indirect_call:
-.L_OPENSSL_indirect_call_begin:
-	pushl	%ebp
-	movl	%esp,%ebp
-	subl	$28,%esp
-	movl	12(%ebp),%ecx
-	movl	%ecx,(%esp)
-	movl	16(%ebp),%edx
-	movl	%edx,4(%esp)
-	movl	20(%ebp),%eax
-	movl	%eax,8(%esp)
-	movl	24(%ebp),%eax
-	movl	%eax,12(%esp)
-	movl	28(%ebp),%eax
-	movl	%eax,16(%esp)
-	movl	32(%ebp),%eax
-	movl	%eax,20(%esp)
-	movl	36(%ebp),%eax
-	movl	%eax,24(%esp)
-	call	*8(%ebp)
-	movl	%ebp,%esp
-	popl	%ebp
-	ret
-.size	OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
-.globl	OPENSSL_cleanse
-.type	OPENSSL_cleanse, at function
-.align	16
-OPENSSL_cleanse:
-.L_OPENSSL_cleanse_begin:
-	movl	4(%esp),%edx
-	movl	8(%esp),%ecx
-	xorl	%eax,%eax
-	cmpl	$7,%ecx
-	jae	.L015lot
-	cmpl	$0,%ecx
-	je	.L016ret
-.L017little:
-	movb	%al,(%edx)
-	subl	$1,%ecx
-	leal	1(%edx),%edx
-	jnz	.L017little
-.L016ret:
-	ret
-.align	16
-.L015lot:
-	testl	$3,%edx
-	jz	.L018aligned
-	movb	%al,(%edx)
-	leal	-1(%ecx),%ecx
-	leal	1(%edx),%edx
-	jmp	.L015lot
-.L018aligned:
-	movl	%eax,(%edx)
-	leal	-4(%ecx),%ecx
-	testl	$-4,%ecx
-	leal	4(%edx),%edx
-	jnz	.L018aligned
-	cmpl	$0,%ecx
-	jne	.L017little
-	ret
-.size	OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
-.globl	OPENSSL_ia32_rdrand
-.type	OPENSSL_ia32_rdrand, at function
-.align	16
-OPENSSL_ia32_rdrand:
-.L_OPENSSL_ia32_rdrand_begin:
-	movl	$8,%ecx
-.L019loop:
-.byte	15,199,240
-	jc	.L020break
-	loop	.L019loop
-.L020break:
-	cmpl	$0,%eax
-	cmovel	%ecx,%eax
-	ret
-.size	OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
-.comm	OPENSSL_ia32cap_P,8,4
-.section	.init
-	call	OPENSSL_cpuid_setup



More information about the Midnightbsd-cvs mailing list