Skip to main content
  • Home
  • login
  • Browse the archive

    swh mirror partner logo
swh logo
SoftwareHeritage
Software
Heritage
Mirror
Features
  • Search

  • Downloads

  • Save code now

  • Add forge now

  • Help

  • 177e4e4
  • /
  • asm
  • /
  • bx86-cpp.s
Raw File
Permalinks

To reference or cite the objects present in the Software Heritage archive, permalinks based on SoftWare Hash IDentifiers (SWHIDs) must be used.
Select below a type of object currently browsed in order to display its associated SWHID and permalink.

  • content
  • directory
content badge Iframe embedding
swh:1:cnt:0925137a6d38c2b54041b7edd3f96c1f838769f2
directory badge Iframe embedding
swh:1:dir:b441c26826f6bbc843266c15f615819e2341e574
bx86-cpp.s
	/* Don't even think of reading this code */
	/* It was automatically generated by bf586.pl */
	/* Which is a perl program used to generate the x86 assember for */
	/* any of elf, a.out, Win32, or Solaris */
	/* It can be found in SSLeay 0.7.0+ */
	/* eric <eay@cryptsoft.com> */

	.file	"bfx86xxxx.s"
	.version	"01.01"
gcc2_compiled.:
.text
	.align ALIGN
.globl BF_encrypt
	TYPE(BF_encrypt,@function)
BF_encrypt:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi


	/* Load the 2 words */
	movl	20(%esp),	%eax
	movl	(%eax),		%ecx
	movl	4(%eax),	%edx

	/* P pointer, s and enc flag */
	movl	24(%esp),	%edi
	xorl	%eax,		%eax
	xorl	%ebx,		%ebx
	movl	28(%esp),	%ebp
	cmpl	$0,		%ebp
	je	.L000start_decrypt
	xorl	(%edi),		%ecx

	/* Round 0 */
	rorl	$16,		%ecx
	movl	4(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 1 */
	rorl	$16,		%edx
	movl	8(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 2 */
	rorl	$16,		%ecx
	movl	12(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 3 */
	rorl	$16,		%edx
	movl	16(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 4 */
	rorl	$16,		%ecx
	movl	20(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 5 */
	rorl	$16,		%edx
	movl	24(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 6 */
	rorl	$16,		%ecx
	movl	28(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 7 */
	rorl	$16,		%edx
	movl	32(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 8 */
	rorl	$16,		%ecx
	movl	36(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 9 */
	rorl	$16,		%edx
	movl	40(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 10 */
	rorl	$16,		%ecx
	movl	44(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 11 */
	rorl	$16,		%edx
	movl	48(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 12 */
	rorl	$16,		%ecx
	movl	52(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 13 */
	rorl	$16,		%edx
	movl	56(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 14 */
	rorl	$16,		%ecx
	movl	60(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 15 */
	rorl	$16,		%edx
	movl	64(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx
	xorl	68(%edi),	%edx
	movl	20(%esp),	%eax
	movl	%edx,		(%eax)
	movl	%ecx,		4(%eax)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.align ALIGN
.L000start_decrypt:
	xorl	68(%edi),	%ecx

	/* Round 16 */
	rorl	$16,		%ecx
	movl	64(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 15 */
	rorl	$16,		%edx
	movl	60(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 14 */
	rorl	$16,		%ecx
	movl	56(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 13 */
	rorl	$16,		%edx
	movl	52(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 12 */
	rorl	$16,		%ecx
	movl	48(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 11 */
	rorl	$16,		%edx
	movl	44(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 10 */
	rorl	$16,		%ecx
	movl	40(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 9 */
	rorl	$16,		%edx
	movl	36(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 8 */
	rorl	$16,		%ecx
	movl	32(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 7 */
	rorl	$16,		%edx
	movl	28(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 6 */
	rorl	$16,		%ecx
	movl	24(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 5 */
	rorl	$16,		%edx
	movl	20(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 4 */
	rorl	$16,		%ecx
	movl	16(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 3 */
	rorl	$16,		%edx
	movl	12(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx

	/* Round 2 */
	rorl	$16,		%ecx
	movl	8(%edi),	%esi
	movb	%ch,		%al
	movb	%cl,		%bl
	rorl	$16,		%ecx
	xorl	%esi,		%edx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%ch,		%al
	movb	%cl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%edx

	/* Round 1 */
	rorl	$16,		%edx
	movl	4(%edi),	%esi
	movb	%dh,		%al
	movb	%dl,		%bl
	rorl	$16,		%edx
	xorl	%esi,		%ecx
	movl	72(%edi,%eax,4),%esi
	movl	1096(%edi,%ebx,4),%ebp
	movb	%dh,		%al
	movb	%dl,		%bl
	addl	%ebp,		%esi
	movl	2120(%edi,%eax,4),%eax
	xorl	%eax,		%esi
	movl	3144(%edi,%ebx,4),%ebp
	addl	%ebp,		%esi
	xorl	%eax,		%eax
	xorl	%esi,		%ecx
	xorl	(%edi),		%edx
	movl	20(%esp),	%eax
	movl	%edx,		(%eax)
	movl	%ecx,		4(%eax)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.BF_encrypt_end:
	SIZE(BF_encrypt,.BF_encrypt_end-BF_encrypt)
.ident	"desasm.pl"

ENEA — Copyright (C), ENEA. License: GNU AGPLv3+.
Legal notes  ::  JavaScript license information ::  Web API

back to top