 da0f8353ac
			
		
	
	da0f8353ac
	
	
	
		
			
			git-svn-id: svn://svn.h5l.se/heimdal/trunk/heimdal@1924 ec53bebd-3082-4978-b11e-865c3cabbd6b
		
			
				
	
	
		
			2781 lines
		
	
	
		
			56 KiB
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
			
		
		
	
	
			2781 lines
		
	
	
		
			56 KiB
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
| 	/* Don't even think of reading this code */
 | |
| 	/* It was automatically generated by des-som2.pl */
 | |
| 	/* Which is a perl program used to generate the x86 assember for */
 | |
| 	/* any of elf, a.out, Win32, or Solaris */
 | |
| 	/* It can be found in SSLeay 0.6.5+ or in libdes 3.26+ */
 | |
| 	/* eric <eay@mincom.oz.au> */
 | |
| 	/* The inner loop instruction sequence and the IP/FP modifications */
 | |
| 	/* are from Svend Olaf Mikkelsen <svolaf@inet.uni-c.dk> */
 | |
| 
 | |
| 	.file	"dx86xxxx.s"
 | |
| 	.version	"01.01"
 | |
| gcc2_compiled.:
 | |
| .text
 | |
| 	.align ALIGN
 | |
| .globl des_encrypt
 | |
| 	TYPE(des_encrypt,@function)
 | |
| des_encrypt:
 | |
| 	pushl	%ebp
 | |
| 	pushl	%ebx
 | |
| 	pushl	%esi
 | |
| 	pushl	%edi
 | |
| 
 | |
| 
 | |
| 	/* Load the 2 words */
 | |
| 	movl	20(%esp),	%esi
 | |
| 	xorl	%ecx,		%ecx
 | |
| 	movl	(%esi),		%eax
 | |
| 	movl	28(%esp),	%ebx
 | |
| 	movl	4(%esi),	%edi
 | |
| 
 | |
| 	/* IP */
 | |
| 	roll	$4,		%eax
 | |
| 	movl	%eax,		%esi
 | |
| 	xorl	%edi,		%eax
 | |
| 	andl	$0xf0f0f0f0,	%eax
 | |
| 	xorl	%eax,		%esi
 | |
| 	xorl	%eax,		%edi
 | |
| 
 | |
| 	roll	$20,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xfff0000f,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$14,		%eax
 | |
| 	movl	%eax,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 	andl	$0x33333333,	%eax
 | |
| 	xorl	%eax,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 
 | |
| 	roll	$22,		%esi
 | |
| 	movl	%esi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 	andl	$0x03fc03fc,	%esi
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 
 | |
| 	roll	$9,		%eax
 | |
| 	movl	%eax,		%esi
 | |
| 	xorl	%edi,		%eax
 | |
| 	andl	$0xaaaaaaaa,	%eax
 | |
| 	xorl	%eax,		%esi
 | |
| 	xorl	%eax,		%edi
 | |
| 
 | |
| 	roll	$1,		%edi
 | |
| 	cmpl	$0,		%ebx
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	je	.L000start_decrypt
 | |
| 
 | |
| 	/* Round 0 */
 | |
| 	movl	(%ebp),		%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	4(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 1 */
 | |
| 	movl	8(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	12(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 2 */
 | |
| 	movl	16(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	20(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 3 */
 | |
| 	movl	24(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	28(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 4 */
 | |
| 	movl	32(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	36(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 5 */
 | |
| 	movl	40(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	44(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 6 */
 | |
| 	movl	48(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	52(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 7 */
 | |
| 	movl	56(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	60(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 8 */
 | |
| 	movl	64(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	68(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 9 */
 | |
| 	movl	72(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	76(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 10 */
 | |
| 	movl	80(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	84(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 11 */
 | |
| 	movl	88(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	92(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 12 */
 | |
| 	movl	96(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	100(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 13 */
 | |
| 	movl	104(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	108(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 14 */
 | |
| 	movl	112(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	116(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 15 */
 | |
| 	movl	120(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	124(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	jmp	.L001end
 | |
| .align ALIGN
 | |
| .L000start_decrypt:
 | |
| 
 | |
| 	/* Round 15 */
 | |
| 	movl	120(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	124(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 14 */
 | |
| 	movl	112(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	116(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 13 */
 | |
| 	movl	104(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	108(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 12 */
 | |
| 	movl	96(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	100(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 11 */
 | |
| 	movl	88(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	92(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 10 */
 | |
| 	movl	80(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	84(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 9 */
 | |
| 	movl	72(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	76(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 8 */
 | |
| 	movl	64(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	68(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 7 */
 | |
| 	movl	56(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	60(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 6 */
 | |
| 	movl	48(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	52(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 5 */
 | |
| 	movl	40(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	44(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 4 */
 | |
| 	movl	32(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	36(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 3 */
 | |
| 	movl	24(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	28(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 2 */
 | |
| 	movl	16(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	20(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 1 */
 | |
| 	movl	8(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	12(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 0 */
 | |
| 	movl	(%ebp),		%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	4(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| .align ALIGN
 | |
| .L001end:
 | |
| 
 | |
| 	/* FP */
 | |
| 	movl	20(%esp),	%edx
 | |
| 	rorl	$1,		%esi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xaaaaaaaa,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$23,		%eax
 | |
| 	movl	%eax,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 	andl	$0x03fc03fc,	%eax
 | |
| 	xorl	%eax,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 
 | |
| 	roll	$10,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0x33333333,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$18,		%esi
 | |
| 	movl	%esi,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 	andl	$0xfff0000f,	%esi
 | |
| 	xorl	%esi,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 
 | |
| 	roll	$12,		%edi
 | |
| 	movl	%edi,		%esi
 | |
| 	xorl	%eax,		%edi
 | |
| 	andl	$0xf0f0f0f0,	%edi
 | |
| 	xorl	%edi,		%esi
 | |
| 	xorl	%edi,		%eax
 | |
| 
 | |
| 	rorl	$4,		%eax
 | |
| 	movl	%eax,		(%edx)
 | |
| 	movl	%esi,		4(%edx)
 | |
| 	popl	%edi
 | |
| 	popl	%esi
 | |
| 	popl	%ebx
 | |
| 	popl	%ebp
 | |
| 	ret
 | |
| .des_encrypt_end:
 | |
| 	SIZE(des_encrypt,.des_encrypt_end-des_encrypt)
 | |
| .ident	"desasm.pl"
 | |
| .text
 | |
| 	.align ALIGN
 | |
| .globl des_encrypt2
 | |
| 	TYPE(des_encrypt2,@function)
 | |
| des_encrypt2:
 | |
| 	pushl	%ebp
 | |
| 	pushl	%ebx
 | |
| 	pushl	%esi
 | |
| 	pushl	%edi
 | |
| 
 | |
| 
 | |
| 	/* Load the 2 words */
 | |
| 	movl	20(%esp),	%eax
 | |
| 	xorl	%ecx,		%ecx
 | |
| 	movl	(%eax),		%esi
 | |
| 	movl	28(%esp),	%ebx
 | |
| 	roll	$3,		%esi
 | |
| 	movl	4(%eax),	%edi
 | |
| 	roll	$3,		%edi
 | |
| 	cmpl	$0,		%ebx
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	je	.L002start_decrypt
 | |
| 
 | |
| 	/* Round 0 */
 | |
| 	movl	(%ebp),		%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	4(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 1 */
 | |
| 	movl	8(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	12(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 2 */
 | |
| 	movl	16(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	20(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 3 */
 | |
| 	movl	24(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	28(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 4 */
 | |
| 	movl	32(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	36(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 5 */
 | |
| 	movl	40(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	44(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 6 */
 | |
| 	movl	48(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	52(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 7 */
 | |
| 	movl	56(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	60(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 8 */
 | |
| 	movl	64(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	68(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 9 */
 | |
| 	movl	72(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	76(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 10 */
 | |
| 	movl	80(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	84(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 11 */
 | |
| 	movl	88(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	92(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 12 */
 | |
| 	movl	96(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	100(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 13 */
 | |
| 	movl	104(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	108(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 14 */
 | |
| 	movl	112(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	116(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 15 */
 | |
| 	movl	120(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	124(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	jmp	.L003end
 | |
| .align ALIGN
 | |
| .L002start_decrypt:
 | |
| 
 | |
| 	/* Round 15 */
 | |
| 	movl	120(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	124(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 14 */
 | |
| 	movl	112(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	116(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 13 */
 | |
| 	movl	104(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	108(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 12 */
 | |
| 	movl	96(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	100(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 11 */
 | |
| 	movl	88(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	92(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 10 */
 | |
| 	movl	80(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	84(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 9 */
 | |
| 	movl	72(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	76(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 8 */
 | |
| 	movl	64(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	68(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 7 */
 | |
| 	movl	56(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	60(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 6 */
 | |
| 	movl	48(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	52(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 5 */
 | |
| 	movl	40(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	44(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 4 */
 | |
| 	movl	32(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	36(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 3 */
 | |
| 	movl	24(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	28(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 2 */
 | |
| 	movl	16(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	20(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 
 | |
| 	/* Round 1 */
 | |
| 	movl	8(%ebp),	%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	12(%ebp),	%edx
 | |
| 	xorl	%esi,		%eax
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%edi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%edi
 | |
| 
 | |
| 	/* Round 0 */
 | |
| 	movl	(%ebp),		%eax
 | |
| 	xorl	%ebx,		%ebx
 | |
| 	movl	4(%ebp),	%edx
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%edx
 | |
| 	andl	$0xfcfcfcfc,	%eax
 | |
| 	andl	$0xcfcfcfcf,	%edx
 | |
| 	movb	%al,		%bl
 | |
| 	movb	%ah,		%cl
 | |
| 	rorl	$4,		%edx
 | |
| 	movl	      des_SPtrans(%ebx),%ebp
 | |
| 	movb	%dl,		%bl
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	0x200+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%dh,		%cl
 | |
| 	shrl	$16,		%eax
 | |
| 	movl	0x100+des_SPtrans(%ebx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movb	%ah,		%bl
 | |
| 	shrl	$16,		%edx
 | |
| 	movl	0x300+des_SPtrans(%ecx),%ebp
 | |
| 	xorl	%ebp,		%esi
 | |
| 	movl	24(%esp),	%ebp
 | |
| 	movb	%dh,		%cl
 | |
| 	andl	$0xff,		%eax
 | |
| 	andl	$0xff,		%edx
 | |
| 	movl	0x600+des_SPtrans(%ebx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x700+des_SPtrans(%ecx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x400+des_SPtrans(%eax),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| 	movl	0x500+des_SPtrans(%edx),%ebx
 | |
| 	xorl	%ebx,		%esi
 | |
| .align ALIGN
 | |
| .L003end:
 | |
| 
 | |
| 	/* Fixup */
 | |
| 	rorl	$3,		%edi
 | |
| 	movl	20(%esp),	%eax
 | |
| 	rorl	$3,		%esi
 | |
| 	movl	%edi,		(%eax)
 | |
| 	movl	%esi,		4(%eax)
 | |
| 	popl	%edi
 | |
| 	popl	%esi
 | |
| 	popl	%ebx
 | |
| 	popl	%ebp
 | |
| 	ret
 | |
| .des_encrypt2_end:
 | |
| 	SIZE(des_encrypt2,.des_encrypt2_end-des_encrypt2)
 | |
| .ident	"desasm.pl"
 | |
| .text
 | |
| 	.align ALIGN
 | |
| .globl des_encrypt3
 | |
| 	TYPE(des_encrypt3,@function)
 | |
| des_encrypt3:
 | |
| 	pushl	%ebp
 | |
| 	pushl	%ebx
 | |
| 	pushl	%esi
 | |
| 	pushl	%edi
 | |
| 
 | |
| 
 | |
| 	/* Load the data words */
 | |
| 	movl	20(%esp),	%ebx
 | |
| 	movl	(%ebx),		%edi
 | |
| 	movl	4(%ebx),	%esi
 | |
| 
 | |
| 	/* IP */
 | |
| 	roll	$4,		%edi
 | |
| 	movl	%edi,		%edx
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xf0f0f0f0,	%edi
 | |
| 	xorl	%edi,		%edx
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$20,		%esi
 | |
| 	movl	%esi,		%edi
 | |
| 	xorl	%edx,		%esi
 | |
| 	andl	$0xfff0000f,	%esi
 | |
| 	xorl	%esi,		%edi
 | |
| 	xorl	%esi,		%edx
 | |
| 
 | |
| 	roll	$14,		%edi
 | |
| 	movl	%edi,		%esi
 | |
| 	xorl	%edx,		%edi
 | |
| 	andl	$0x33333333,	%edi
 | |
| 	xorl	%edi,		%esi
 | |
| 	xorl	%edi,		%edx
 | |
| 
 | |
| 	roll	$22,		%edx
 | |
| 	movl	%edx,		%edi
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0x03fc03fc,	%edx
 | |
| 	xorl	%edx,		%edi
 | |
| 	xorl	%edx,		%esi
 | |
| 
 | |
| 	roll	$9,		%edi
 | |
| 	movl	%edi,		%edx
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xaaaaaaaa,	%edi
 | |
| 	xorl	%edi,		%edx
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	rorl	$3,		%edx
 | |
| 	rorl	$2,		%esi
 | |
| 	movl	%esi,		4(%ebx)
 | |
| 	movl	24(%esp),	%eax
 | |
| 	movl	%edx,		(%ebx)
 | |
| 	movl	28(%esp),	%edi
 | |
| 	movl	32(%esp),	%esi
 | |
| 	pushl	$1
 | |
| 	pushl	%eax
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	pushl	$0
 | |
| 	pushl	%edi
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	pushl	$1
 | |
| 	pushl	%esi
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	movl	(%ebx),		%edi
 | |
| 	addl	$36,		%esp
 | |
| 	movl	4(%ebx),	%esi
 | |
| 
 | |
| 	/* FP */
 | |
| 	roll	$2,		%esi
 | |
| 	roll	$3,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xaaaaaaaa,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$23,		%eax
 | |
| 	movl	%eax,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 	andl	$0x03fc03fc,	%eax
 | |
| 	xorl	%eax,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 
 | |
| 	roll	$10,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0x33333333,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$18,		%esi
 | |
| 	movl	%esi,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 	andl	$0xfff0000f,	%esi
 | |
| 	xorl	%esi,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 
 | |
| 	roll	$12,		%edi
 | |
| 	movl	%edi,		%esi
 | |
| 	xorl	%eax,		%edi
 | |
| 	andl	$0xf0f0f0f0,	%edi
 | |
| 	xorl	%edi,		%esi
 | |
| 	xorl	%edi,		%eax
 | |
| 
 | |
| 	rorl	$4,		%eax
 | |
| 	movl	%eax,		(%ebx)
 | |
| 	movl	%esi,		4(%ebx)
 | |
| 	popl	%edi
 | |
| 	popl	%esi
 | |
| 	popl	%ebx
 | |
| 	popl	%ebp
 | |
| 	ret
 | |
| .des_encrypt3_end:
 | |
| 	SIZE(des_encrypt3,.des_encrypt3_end-des_encrypt3)
 | |
| .ident	"desasm.pl"
 | |
| .text
 | |
| 	.align ALIGN
 | |
| .globl des_decrypt3
 | |
| 	TYPE(des_decrypt3,@function)
 | |
| des_decrypt3:
 | |
| 	pushl	%ebp
 | |
| 	pushl	%ebx
 | |
| 	pushl	%esi
 | |
| 	pushl	%edi
 | |
| 
 | |
| 
 | |
| 	/* Load the data words */
 | |
| 	movl	20(%esp),	%ebx
 | |
| 	movl	(%ebx),		%edi
 | |
| 	movl	4(%ebx),	%esi
 | |
| 
 | |
| 	/* IP */
 | |
| 	roll	$4,		%edi
 | |
| 	movl	%edi,		%edx
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xf0f0f0f0,	%edi
 | |
| 	xorl	%edi,		%edx
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$20,		%esi
 | |
| 	movl	%esi,		%edi
 | |
| 	xorl	%edx,		%esi
 | |
| 	andl	$0xfff0000f,	%esi
 | |
| 	xorl	%esi,		%edi
 | |
| 	xorl	%esi,		%edx
 | |
| 
 | |
| 	roll	$14,		%edi
 | |
| 	movl	%edi,		%esi
 | |
| 	xorl	%edx,		%edi
 | |
| 	andl	$0x33333333,	%edi
 | |
| 	xorl	%edi,		%esi
 | |
| 	xorl	%edi,		%edx
 | |
| 
 | |
| 	roll	$22,		%edx
 | |
| 	movl	%edx,		%edi
 | |
| 	xorl	%esi,		%edx
 | |
| 	andl	$0x03fc03fc,	%edx
 | |
| 	xorl	%edx,		%edi
 | |
| 	xorl	%edx,		%esi
 | |
| 
 | |
| 	roll	$9,		%edi
 | |
| 	movl	%edi,		%edx
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xaaaaaaaa,	%edi
 | |
| 	xorl	%edi,		%edx
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	rorl	$3,		%edx
 | |
| 	rorl	$2,		%esi
 | |
| 	movl	%esi,		4(%ebx)
 | |
| 	movl	24(%esp),	%esi
 | |
| 	movl	%edx,		(%ebx)
 | |
| 	movl	28(%esp),	%edi
 | |
| 	movl	32(%esp),	%eax
 | |
| 	pushl	$0
 | |
| 	pushl	%eax
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	pushl	$1
 | |
| 	pushl	%edi
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	pushl	$0
 | |
| 	pushl	%esi
 | |
| 	pushl	%ebx
 | |
| 	call	des_encrypt2
 | |
| 	movl	(%ebx),		%edi
 | |
| 	addl	$36,		%esp
 | |
| 	movl	4(%ebx),	%esi
 | |
| 
 | |
| 	/* FP */
 | |
| 	roll	$2,		%esi
 | |
| 	roll	$3,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0xaaaaaaaa,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$23,		%eax
 | |
| 	movl	%eax,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 	andl	$0x03fc03fc,	%eax
 | |
| 	xorl	%eax,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 
 | |
| 	roll	$10,		%edi
 | |
| 	movl	%edi,		%eax
 | |
| 	xorl	%esi,		%edi
 | |
| 	andl	$0x33333333,	%edi
 | |
| 	xorl	%edi,		%eax
 | |
| 	xorl	%edi,		%esi
 | |
| 
 | |
| 	roll	$18,		%esi
 | |
| 	movl	%esi,		%edi
 | |
| 	xorl	%eax,		%esi
 | |
| 	andl	$0xfff0000f,	%esi
 | |
| 	xorl	%esi,		%edi
 | |
| 	xorl	%esi,		%eax
 | |
| 
 | |
| 	roll	$12,		%edi
 | |
| 	movl	%edi,		%esi
 | |
| 	xorl	%eax,		%edi
 | |
| 	andl	$0xf0f0f0f0,	%edi
 | |
| 	xorl	%edi,		%esi
 | |
| 	xorl	%edi,		%eax
 | |
| 
 | |
| 	rorl	$4,		%eax
 | |
| 	movl	%eax,		(%ebx)
 | |
| 	movl	%esi,		4(%ebx)
 | |
| 	popl	%edi
 | |
| 	popl	%esi
 | |
| 	popl	%ebx
 | |
| 	popl	%ebp
 | |
| 	ret
 | |
| .des_decrypt3_end:
 | |
| 	SIZE(des_decrypt3,.des_decrypt3_end-des_decrypt3)
 | |
| .ident	"desasm.pl"
 |