| /* Don't even think of reading this code */ |
| /* It was automatically generated by bf586.pl */ |
| /* Which is a perl program used to generate the x86 assember for */ |
| /* any of elf, a.out, Win32, or Solaris */ |
| /* It can be found in SSLeay 0.7.0+ */ |
| /* eric <eay@cryptsoft.com> */ |
| |
| .file "bfx86xxxx.s" |
| .version "01.01" |
| gcc2_compiled.: |
| .text |
| .align ALIGN |
| .globl BF_encrypt |
| TYPE(BF_encrypt,@function) |
| BF_encrypt: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| |
| |
| /* Load the 2 words */ |
| movl 20(%esp), %eax |
| movl (%eax), %ecx |
| movl 4(%eax), %edx |
| |
| /* P pointer, s and enc flag */ |
| movl 24(%esp), %edi |
| xorl %eax, %eax |
| xorl %ebx, %ebx |
| movl 28(%esp), %ebp |
| cmpl $0, %ebp |
| je .L000start_decrypt |
| xorl (%edi), %ecx |
| |
| /* Round 0 */ |
| rorl $16, %ecx |
| movl 4(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 1 */ |
| rorl $16, %edx |
| movl 8(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 2 */ |
| rorl $16, %ecx |
| movl 12(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 3 */ |
| rorl $16, %edx |
| movl 16(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 4 */ |
| rorl $16, %ecx |
| movl 20(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 5 */ |
| rorl $16, %edx |
| movl 24(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 6 */ |
| rorl $16, %ecx |
| movl 28(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 7 */ |
| rorl $16, %edx |
| movl 32(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 8 */ |
| rorl $16, %ecx |
| movl 36(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 9 */ |
| rorl $16, %edx |
| movl 40(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 10 */ |
| rorl $16, %ecx |
| movl 44(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 11 */ |
| rorl $16, %edx |
| movl 48(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 12 */ |
| rorl $16, %ecx |
| movl 52(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 13 */ |
| rorl $16, %edx |
| movl 56(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 14 */ |
| rorl $16, %ecx |
| movl 60(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 15 */ |
| rorl $16, %edx |
| movl 64(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| xorl 68(%edi), %edx |
| movl 20(%esp), %eax |
| movl %edx, (%eax) |
| movl %ecx, 4(%eax) |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .align ALIGN |
| .L000start_decrypt: |
| xorl 68(%edi), %ecx |
| |
| /* Round 16 */ |
| rorl $16, %ecx |
| movl 64(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 15 */ |
| rorl $16, %edx |
| movl 60(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 14 */ |
| rorl $16, %ecx |
| movl 56(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 13 */ |
| rorl $16, %edx |
| movl 52(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 12 */ |
| rorl $16, %ecx |
| movl 48(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 11 */ |
| rorl $16, %edx |
| movl 44(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 10 */ |
| rorl $16, %ecx |
| movl 40(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 9 */ |
| rorl $16, %edx |
| movl 36(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 8 */ |
| rorl $16, %ecx |
| movl 32(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 7 */ |
| rorl $16, %edx |
| movl 28(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 6 */ |
| rorl $16, %ecx |
| movl 24(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 5 */ |
| rorl $16, %edx |
| movl 20(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 4 */ |
| rorl $16, %ecx |
| movl 16(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 3 */ |
| rorl $16, %edx |
| movl 12(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| |
| /* Round 2 */ |
| rorl $16, %ecx |
| movl 8(%edi), %esi |
| movb %ch, %al |
| movb %cl, %bl |
| rorl $16, %ecx |
| xorl %esi, %edx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %ch, %al |
| movb %cl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %edx |
| |
| /* Round 1 */ |
| rorl $16, %edx |
| movl 4(%edi), %esi |
| movb %dh, %al |
| movb %dl, %bl |
| rorl $16, %edx |
| xorl %esi, %ecx |
| movl 72(%edi,%eax,4),%esi |
| movl 1096(%edi,%ebx,4),%ebp |
| movb %dh, %al |
| movb %dl, %bl |
| addl %ebp, %esi |
| movl 2120(%edi,%eax,4),%eax |
| xorl %eax, %esi |
| movl 3144(%edi,%ebx,4),%ebp |
| addl %ebp, %esi |
| xorl %eax, %eax |
| xorl %esi, %ecx |
| xorl (%edi), %edx |
| movl 20(%esp), %eax |
| movl %edx, (%eax) |
| movl %ecx, 4(%eax) |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .BF_encrypt_end: |
| SIZE(BF_encrypt,.BF_encrypt_end-BF_encrypt) |
| .ident "desasm.pl" |