asm/co86bsdi.o: asm/co86unix.cpp
$(CPP) -DBSDI asm/co86unix.cpp | sed 's/ :/:/' | as -o asm/co86bsdi.o
-asm/bn86unix.cpp:
+asm/bn86unix.cpp: asm/bn-586.pl
(cd asm; perl bn-586.pl cpp >bn86unix.cpp )
-asm/co86unix.cpp:
+asm/co86unix.cpp: asm/co-586.pl
(cd asm; perl co-586.pl cpp >co86unix.cpp )
# MIPS 64 bit assember
push(@INC,"perlasm","../../perlasm");
require "x86asm.pl";
-&asm_init($ARGV[0],"bn-586.pl");
+&asm_init($ARGV[0],$0);
&bn_mul_add_words("bn_mul_add_words");
&bn_mul_words("bn_mul_words");
+++ /dev/null
-/* Run the C pre-processor over this file with one of the following defined
- * ELF - elf object files,
- * OUT - a.out object files,
- * BSDI - BSDI style a.out object files
- * SOL - Solaris style elf
- */
-
-#define TYPE(a,b) .type a,b
-#define SIZE(a,b) .size a,b
-
-#if defined(OUT) || defined(BSDI)
-#define bn_mul_add_words _bn_mul_add_words
-#define bn_mul_words _bn_mul_words
-#define bn_sqr_words _bn_sqr_words
-#define bn_div_words _bn_div_words
-#define bn_add_words _bn_add_words
-#define bn_sub_words _bn_sub_words
-#define bn_mul_comba8 _bn_mul_comba8
-#define bn_mul_comba4 _bn_mul_comba4
-#define bn_sqr_comba8 _bn_sqr_comba8
-#define bn_sqr_comba4 _bn_sqr_comba4
-
-#endif
-
-#ifdef OUT
-#define OK 1
-#define ALIGN 4
-#endif
-
-#ifdef BSDI
-#define OK 1
-#define ALIGN 4
-#undef SIZE
-#undef TYPE
-#define SIZE(a,b)
-#define TYPE(a,b)
-#endif
-
-#if defined(ELF) || defined(SOL)
-#define OK 1
-#define ALIGN 16
-#endif
-
-#ifndef OK
-You need to define one of
-ELF - elf systems - linux-elf, NetBSD and DG-UX
-OUT - a.out systems - linux-a.out and FreeBSD
-SOL - solaris systems, which are elf with strange comment lines
-BSDI - a.out with a very primative version of as.
-#endif
-
-/* Let the Assembler begin :-) */
- /* Don't even think of reading this code */
- /* It was automatically generated by bn-586.pl */
- /* Which is a perl program used to generate the x86 assember for */
- /* any of elf, a.out, BSDI,Win32, or Solaris */
- /* eric <eay@cryptsoft.com> */
-
- .file "bn-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align ALIGN
-.globl bn_mul_add_words
- TYPE(bn_mul_add_words,@function)
-bn_mul_add_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- xorl %esi, %esi
- movl 20(%esp), %edi
- movl 28(%esp), %ecx
- movl 24(%esp), %ebx
- andl $4294967288, %ecx
- movl 32(%esp), %ebp
- pushl %ecx
- jz .L000maw_finish
-.L001maw_loop:
- movl %ecx, (%esp)
- /* Round 0 */
- movl (%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl (%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- /* Round 4 */
- movl 4(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 4(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- /* Round 8 */
- movl 8(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 8(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- /* Round 12 */
- movl 12(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 12(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- /* Round 16 */
- movl 16(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 16(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- /* Round 20 */
- movl 20(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 20(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- /* Round 24 */
- movl 24(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 24(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
- /* Round 28 */
- movl 28(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 28(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 28(%edi)
- movl %edx, %esi
-
- movl (%esp), %ecx
- addl $32, %ebx
- addl $32, %edi
- subl $8, %ecx
- jnz .L001maw_loop
-.L000maw_finish:
- movl 32(%esp), %ecx
- andl $7, %ecx
- jnz .L002maw_finish2
- jmp .L003maw_end
-.align ALIGN
-.L002maw_finish2:
- /* Tail Round 0 */
- movl (%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl (%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, (%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 1 */
- movl 4(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 4(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 4(%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 2 */
- movl 8(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 8(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 8(%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 3 */
- movl 12(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 12(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 12(%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 4 */
- movl 16(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 16(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 16(%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 5 */
- movl 20(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 20(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 20(%edi)
- movl %edx, %esi
- jz .L003maw_end
- /* Tail Round 6 */
- movl 24(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 24(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
-.L003maw_end:
- movl %esi, %eax
- popl %ecx
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_mul_add_words_end:
- SIZE(bn_mul_add_words,.bn_mul_add_words_end-bn_mul_add_words)
-.ident "bn_mul_add_words"
-.text
- .align ALIGN
-.globl bn_mul_words
- TYPE(bn_mul_words,@function)
-bn_mul_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- xorl %esi, %esi
- movl 20(%esp), %edi
- movl 24(%esp), %ebx
- movl 28(%esp), %ebp
- movl 32(%esp), %ecx
- andl $4294967288, %ebp
- jz .L004mw_finish
-.L005mw_loop:
- /* Round 0 */
- movl (%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- /* Round 4 */
- movl 4(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- /* Round 8 */
- movl 8(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- /* Round 12 */
- movl 12(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- /* Round 16 */
- movl 16(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- /* Round 20 */
- movl 20(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- /* Round 24 */
- movl 24(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
- /* Round 28 */
- movl 28(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 28(%edi)
- movl %edx, %esi
-
- addl $32, %ebx
- addl $32, %edi
- subl $8, %ebp
- jz .L004mw_finish
- jmp .L005mw_loop
-.L004mw_finish:
- movl 28(%esp), %ebp
- andl $7, %ebp
- jnz .L006mw_finish2
- jmp .L007mw_end
-.align ALIGN
-.L006mw_finish2:
- /* Tail Round 0 */
- movl (%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 1 */
- movl 4(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 2 */
- movl 8(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 3 */
- movl 12(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 4 */
- movl 16(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 5 */
- movl 20(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- /* Tail Round 6 */
- movl 24(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
-.L007mw_end:
- movl %esi, %eax
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_mul_words_end:
- SIZE(bn_mul_words,.bn_mul_words_end-bn_mul_words)
-.ident "bn_mul_words"
-.text
- .align ALIGN
-.globl bn_sqr_words
- TYPE(bn_sqr_words,@function)
-bn_sqr_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %esi
- movl 24(%esp), %edi
- movl 28(%esp), %ebx
- andl $4294967288, %ebx
- jz .L008sw_finish
-.L009sw_loop:
- /* Round 0 */
- movl (%edi), %eax
- mull %eax
- movl %eax, (%esi)
- movl %edx, 4(%esi)
- /* Round 4 */
- movl 4(%edi), %eax
- mull %eax
- movl %eax, 8(%esi)
- movl %edx, 12(%esi)
- /* Round 8 */
- movl 8(%edi), %eax
- mull %eax
- movl %eax, 16(%esi)
- movl %edx, 20(%esi)
- /* Round 12 */
- movl 12(%edi), %eax
- mull %eax
- movl %eax, 24(%esi)
- movl %edx, 28(%esi)
- /* Round 16 */
- movl 16(%edi), %eax
- mull %eax
- movl %eax, 32(%esi)
- movl %edx, 36(%esi)
- /* Round 20 */
- movl 20(%edi), %eax
- mull %eax
- movl %eax, 40(%esi)
- movl %edx, 44(%esi)
- /* Round 24 */
- movl 24(%edi), %eax
- mull %eax
- movl %eax, 48(%esi)
- movl %edx, 52(%esi)
- /* Round 28 */
- movl 28(%edi), %eax
- mull %eax
- movl %eax, 56(%esi)
- movl %edx, 60(%esi)
-
- addl $32, %edi
- addl $64, %esi
- subl $8, %ebx
- jnz .L009sw_loop
-.L008sw_finish:
- movl 28(%esp), %ebx
- andl $7, %ebx
- jz .L010sw_end
- /* Tail Round 0 */
- movl (%edi), %eax
- mull %eax
- movl %eax, (%esi)
- decl %ebx
- movl %edx, 4(%esi)
- jz .L010sw_end
- /* Tail Round 1 */
- movl 4(%edi), %eax
- mull %eax
- movl %eax, 8(%esi)
- decl %ebx
- movl %edx, 12(%esi)
- jz .L010sw_end
- /* Tail Round 2 */
- movl 8(%edi), %eax
- mull %eax
- movl %eax, 16(%esi)
- decl %ebx
- movl %edx, 20(%esi)
- jz .L010sw_end
- /* Tail Round 3 */
- movl 12(%edi), %eax
- mull %eax
- movl %eax, 24(%esi)
- decl %ebx
- movl %edx, 28(%esi)
- jz .L010sw_end
- /* Tail Round 4 */
- movl 16(%edi), %eax
- mull %eax
- movl %eax, 32(%esi)
- decl %ebx
- movl %edx, 36(%esi)
- jz .L010sw_end
- /* Tail Round 5 */
- movl 20(%edi), %eax
- mull %eax
- movl %eax, 40(%esi)
- decl %ebx
- movl %edx, 44(%esi)
- jz .L010sw_end
- /* Tail Round 6 */
- movl 24(%edi), %eax
- mull %eax
- movl %eax, 48(%esi)
- movl %edx, 52(%esi)
-.L010sw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_sqr_words_end:
- SIZE(bn_sqr_words,.bn_sqr_words_end-bn_sqr_words)
-.ident "bn_sqr_words"
-.text
- .align ALIGN
-.globl bn_div_words
- TYPE(bn_div_words,@function)
-bn_div_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
- movl 20(%esp), %edx
- movl 24(%esp), %eax
- movl 28(%esp), %ebx
- divl %ebx
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_div_words_end:
- SIZE(bn_div_words,.bn_div_words_end-bn_div_words)
-.ident "bn_div_words"
-.text
- .align ALIGN
-.globl bn_add_words
- TYPE(bn_add_words,@function)
-bn_add_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %ebx
- movl 24(%esp), %esi
- movl 28(%esp), %edi
- movl 32(%esp), %ebp
- xorl %eax, %eax
- andl $4294967288, %ebp
- jz .L011aw_finish
-.L012aw_loop:
- /* Round 0 */
- movl (%esi), %ecx
- movl (%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, (%ebx)
- /* Round 1 */
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 4(%ebx)
- /* Round 2 */
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 8(%ebx)
- /* Round 3 */
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 12(%ebx)
- /* Round 4 */
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 16(%ebx)
- /* Round 5 */
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 20(%ebx)
- /* Round 6 */
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
- /* Round 7 */
- movl 28(%esi), %ecx
- movl 28(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 28(%ebx)
-
- addl $32, %esi
- addl $32, %edi
- addl $32, %ebx
- subl $8, %ebp
- jnz .L012aw_loop
-.L011aw_finish:
- movl 32(%esp), %ebp
- andl $7, %ebp
- jz .L013aw_end
- /* Tail Round 0 */
- movl (%esi), %ecx
- movl (%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, (%ebx)
- jz .L013aw_end
- /* Tail Round 1 */
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 4(%ebx)
- jz .L013aw_end
- /* Tail Round 2 */
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 8(%ebx)
- jz .L013aw_end
- /* Tail Round 3 */
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 12(%ebx)
- jz .L013aw_end
- /* Tail Round 4 */
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 16(%ebx)
- jz .L013aw_end
- /* Tail Round 5 */
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 20(%ebx)
- jz .L013aw_end
- /* Tail Round 6 */
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
-.L013aw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_add_words_end:
- SIZE(bn_add_words,.bn_add_words_end-bn_add_words)
-.ident "bn_add_words"
-.text
- .align ALIGN
-.globl bn_sub_words
- TYPE(bn_sub_words,@function)
-bn_sub_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %ebx
- movl 24(%esp), %esi
- movl 28(%esp), %edi
- movl 32(%esp), %ebp
- xorl %eax, %eax
- andl $4294967288, %ebp
- jz .L014aw_finish
-.L015aw_loop:
- /* Round 0 */
- movl (%esi), %ecx
- movl (%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, (%ebx)
- /* Round 1 */
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 4(%ebx)
- /* Round 2 */
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 8(%ebx)
- /* Round 3 */
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 12(%ebx)
- /* Round 4 */
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 16(%ebx)
- /* Round 5 */
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 20(%ebx)
- /* Round 6 */
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
- /* Round 7 */
- movl 28(%esi), %ecx
- movl 28(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 28(%ebx)
-
- addl $32, %esi
- addl $32, %edi
- addl $32, %ebx
- subl $8, %ebp
- jnz .L015aw_loop
-.L014aw_finish:
- movl 32(%esp), %ebp
- andl $7, %ebp
- jz .L016aw_end
- /* Tail Round 0 */
- movl (%esi), %ecx
- movl (%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, (%ebx)
- jz .L016aw_end
- /* Tail Round 1 */
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 4(%ebx)
- jz .L016aw_end
- /* Tail Round 2 */
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 8(%ebx)
- jz .L016aw_end
- /* Tail Round 3 */
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 12(%ebx)
- jz .L016aw_end
- /* Tail Round 4 */
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 16(%ebx)
- jz .L016aw_end
- /* Tail Round 5 */
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 20(%ebx)
- jz .L016aw_end
- /* Tail Round 6 */
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
-.L016aw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_sub_words_end:
- SIZE(bn_sub_words,.bn_sub_words_end-bn_sub_words)
-.ident "bn_sub_words"
-.text
- .align ALIGN
-.globl bn_mul_comba8
- TYPE(bn_mul_comba8,@function)
-bn_mul_comba8:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- /* ################## Calculate word 0 */
- xorl %ebp, %ebp
- /* mul a[0]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ################## Calculate word 1 */
- xorl %ebx, %ebx
- /* mul a[1]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- /* saved r[1] */
- /* ################## Calculate word 2 */
- xorl %ecx, %ecx
- /* mul a[2]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ################## Calculate word 3 */
- xorl %ebp, %ebp
- /* mul a[3]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 16(%esi), %eax
- /* saved r[3] */
- /* ################## Calculate word 4 */
- xorl %ebx, %ebx
- /* mul a[4]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 20(%esi), %eax
- /* saved r[4] */
- /* ################## Calculate word 5 */
- xorl %ecx, %ecx
- /* mul a[5]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[3]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 24(%esi), %eax
- /* saved r[5] */
- /* ################## Calculate word 6 */
- xorl %ebp, %ebp
- /* mul a[6]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[4]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[3]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[4] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- movl 28(%esi), %eax
- /* saved r[6] */
- /* ################## Calculate word 7 */
- xorl %ebx, %ebx
- /* mul a[7]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[5]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[4]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[5] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 28(%eax)
- movl 28(%esi), %eax
- /* saved r[7] */
- /* ################## Calculate word 8 */
- xorl %ecx, %ecx
- /* mul a[7]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[6]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[5]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[3]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[6] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%eax)
- movl 28(%esi), %eax
- /* saved r[8] */
- /* ################## Calculate word 9 */
- xorl %ebp, %ebp
- /* mul a[7]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[6]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[4] */
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- /* mul a[4]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[3]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[7] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 36(%eax)
- movl 28(%esi), %eax
- /* saved r[9] */
- /* ################## Calculate word 10 */
- xorl %ebx, %ebx
- /* mul a[7]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- /* mul a[5]*b[5] */
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- /* mul a[4]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%eax)
- movl 28(%esi), %eax
- /* saved r[10] */
- /* ################## Calculate word 11 */
- xorl %ecx, %ecx
- /* mul a[7]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[6]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- /* mul a[5]*b[6] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 44(%eax)
- movl 28(%esi), %eax
- /* saved r[11] */
- /* ################## Calculate word 12 */
- xorl %ebp, %ebp
- /* mul a[7]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[6]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[7] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%eax)
- movl 28(%esi), %eax
- /* saved r[12] */
- /* ################## Calculate word 13 */
- xorl %ebx, %ebx
- /* mul a[7]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 52(%eax)
- movl 28(%esi), %eax
- /* saved r[13] */
- /* ################## Calculate word 14 */
- xorl %ecx, %ecx
- /* mul a[7]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%eax)
- /* saved r[14] */
- /* save r[15] */
- movl %ebx, 60(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba8_end:
- SIZE(bn_mul_comba8,.bn_mul_comba8_end-bn_mul_comba8)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_mul_comba4
- TYPE(bn_mul_comba4,@function)
-bn_mul_comba4:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- /* ################## Calculate word 0 */
- xorl %ebp, %ebp
- /* mul a[0]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ################## Calculate word 1 */
- xorl %ebx, %ebx
- /* mul a[1]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- /* saved r[1] */
- /* ################## Calculate word 2 */
- xorl %ecx, %ecx
- /* mul a[2]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ################## Calculate word 3 */
- xorl %ebp, %ebp
- /* mul a[3]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 12(%esi), %eax
- /* saved r[3] */
- /* ################## Calculate word 4 */
- xorl %ebx, %ebx
- /* mul a[3]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 12(%esi), %eax
- /* saved r[4] */
- /* ################## Calculate word 5 */
- xorl %ecx, %ecx
- /* mul a[3]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 12(%esi), %eax
- /* saved r[5] */
- /* ################## Calculate word 6 */
- xorl %ebp, %ebp
- /* mul a[3]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- /* saved r[6] */
- /* save r[7] */
- movl %ecx, 28(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba4_end:
- SIZE(bn_mul_comba4,.bn_mul_comba4_end-bn_mul_comba4)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_sqr_comba8
- TYPE(bn_sqr_comba8,@function)
-bn_sqr_comba8:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- /* ############### Calculate word 0 */
- xorl %ebp, %ebp
- /* sqr a[0]*a[0] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ############### Calculate word 1 */
- xorl %ebx, %ebx
- /* sqr a[1]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- /* saved r[1] */
- /* ############### Calculate word 2 */
- xorl %ecx, %ecx
- /* sqr a[2]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- /* sqr a[1]*a[1] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ############### Calculate word 3 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[2]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl (%esi), %edx
- /* saved r[3] */
- /* ############### Calculate word 4 */
- xorl %ebx, %ebx
- /* sqr a[4]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 12(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- /* sqr a[3]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- /* sqr a[2]*a[2] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl (%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 20(%esi), %eax
- /* saved r[4] */
- /* ############### Calculate word 5 */
- xorl %ecx, %ecx
- /* sqr a[5]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- movl 4(%esi), %edx
- /* sqr a[4]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- /* sqr a[3]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- movl (%esi), %edx
- /* saved r[5] */
- /* ############### Calculate word 6 */
- xorl %ebp, %ebp
- /* sqr a[6]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[5]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl 8(%esi), %edx
- /* sqr a[4]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- /* sqr a[3]*a[3] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- movl 28(%esi), %eax
- /* saved r[6] */
- /* ############### Calculate word 7 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- /* sqr a[6]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- movl 8(%esi), %edx
- /* sqr a[5]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %eax
- adcl $0, %ebx
- movl 12(%esi), %edx
- /* sqr a[4]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 28(%edi)
- movl 4(%esi), %edx
- /* saved r[7] */
- /* ############### Calculate word 8 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- /* sqr a[6]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 20(%esi), %eax
- adcl $0, %ecx
- movl 12(%esi), %edx
- /* sqr a[5]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- /* sqr a[4]*a[4] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 8(%esi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%edi)
- movl 28(%esi), %eax
- /* saved r[8] */
- /* ############### Calculate word 9 */
- xorl %ebp, %ebp
- /* sqr a[7]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- movl 12(%esi), %edx
- /* sqr a[6]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 16(%esi), %edx
- /* sqr a[5]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 28(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 36(%edi)
- movl 12(%esi), %edx
- /* saved r[9] */
- /* ############### Calculate word 10 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 16(%esi), %edx
- /* sqr a[6]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- /* sqr a[5]*a[5] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%edi)
- movl 28(%esi), %eax
- /* saved r[10] */
- /* ############### Calculate word 11 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 20(%esi), %edx
- /* sqr a[6]*a[5] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 28(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 44(%edi)
- movl 20(%esi), %edx
- /* saved r[11] */
- /* ############### Calculate word 12 */
- xorl %ebp, %ebp
- /* sqr a[7]*a[5] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- /* sqr a[6]*a[6] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%edi)
- movl 28(%esi), %eax
- /* saved r[12] */
- /* ############### Calculate word 13 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[6] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 52(%edi)
- /* saved r[13] */
- /* ############### Calculate word 14 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[7] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%edi)
- /* saved r[14] */
- movl %ebx, 60(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba8_end:
- SIZE(bn_sqr_comba8,.bn_sqr_comba8_end-bn_sqr_comba8)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_sqr_comba4
- TYPE(bn_sqr_comba4,@function)
-bn_sqr_comba4:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- /* ############### Calculate word 0 */
- xorl %ebp, %ebp
- /* sqr a[0]*a[0] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ############### Calculate word 1 */
- xorl %ebx, %ebx
- /* sqr a[1]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- /* saved r[1] */
- /* ############### Calculate word 2 */
- xorl %ecx, %ecx
- /* sqr a[2]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- /* sqr a[1]*a[1] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ############### Calculate word 3 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[2]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl 4(%esi), %edx
- /* saved r[3] */
- /* ############### Calculate word 4 */
- xorl %ebx, %ebx
- /* sqr a[3]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- /* sqr a[2]*a[2] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 12(%esi), %eax
- /* saved r[4] */
- /* ############### Calculate word 5 */
- xorl %ecx, %ecx
- /* sqr a[3]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- /* saved r[5] */
- /* ############### Calculate word 6 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[3] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- /* saved r[6] */
- movl %ecx, 28(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba4_end:
- SIZE(bn_sqr_comba4,.bn_sqr_comba4_end-bn_sqr_comba4)
-.ident "desasm.pl"
require "alpha/sqr_c8.pl";
require "alpha/div.pl";
-&asm_init($ARGV[0],"bn-586.pl");
+&asm_init($ARGV[0],$0);
&bn_mul_words("bn_mul_words");
&bn_sqr_words("bn_sqr_words");
push(@INC,"perlasm","../../perlasm");
require "x86asm.pl";
-&asm_init($ARGV[0],"bn-586.pl");
+&asm_init($ARGV[0],$0);
&bn_mul_comba("bn_mul_comba8",8);
&bn_mul_comba("bn_mul_comba4",4);
push(@INC,"perlasm","../../perlasm");
require "alpha.pl";
-&asm_init($ARGV[0],"bn-586.pl");
+&asm_init($ARGV[0],$0);
print &bn_sub_words("bn_sub_words");
+++ /dev/null
-/* Run the C pre-processor over this file with one of the following defined
- * ELF - elf object files,
- * OUT - a.out object files,
- * BSDI - BSDI style a.out object files
- * SOL - Solaris style elf
- */
-
-#define TYPE(a,b) .type a,b
-#define SIZE(a,b) .size a,b
-
-#if defined(OUT) || defined(BSDI)
-#define bn_mul_comba8 _bn_mul_comba8
-#define bn_mul_comba4 _bn_mul_comba4
-#define bn_sqr_comba8 _bn_sqr_comba8
-#define bn_sqr_comba4 _bn_sqr_comba4
-
-#endif
-
-#ifdef OUT
-#define OK 1
-#define ALIGN 4
-#endif
-
-#ifdef BSDI
-#define OK 1
-#define ALIGN 4
-#undef SIZE
-#undef TYPE
-#define SIZE(a,b)
-#define TYPE(a,b)
-#endif
-
-#if defined(ELF) || defined(SOL)
-#define OK 1
-#define ALIGN 16
-#endif
-
-#ifndef OK
-You need to define one of
-ELF - elf systems - linux-elf, NetBSD and DG-UX
-OUT - a.out systems - linux-a.out and FreeBSD
-SOL - solaris systems, which are elf with strange comment lines
-BSDI - a.out with a very primative version of as.
-#endif
-
-/* Let the Assembler begin :-) */
- /* Don't even think of reading this code */
- /* It was automatically generated by bn-586.pl */
- /* Which is a perl program used to generate the x86 assember for */
- /* any of elf, a.out, BSDI,Win32, or Solaris */
- /* eric <eay@cryptsoft.com> */
-
- .file "bn-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align ALIGN
-.globl bn_mul_comba8
- TYPE(bn_mul_comba8,@function)
-bn_mul_comba8:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- /* ################## Calculate word 0 */
- xorl %ebp, %ebp
- /* mul a[0]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ################## Calculate word 1 */
- xorl %ebx, %ebx
- /* mul a[1]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- /* saved r[1] */
- /* ################## Calculate word 2 */
- xorl %ecx, %ecx
- /* mul a[2]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ################## Calculate word 3 */
- xorl %ebp, %ebp
- /* mul a[3]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 16(%esi), %eax
- /* saved r[3] */
- /* ################## Calculate word 4 */
- xorl %ebx, %ebx
- /* mul a[4]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 20(%esi), %eax
- /* saved r[4] */
- /* ################## Calculate word 5 */
- xorl %ecx, %ecx
- /* mul a[5]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[3]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 24(%esi), %eax
- /* saved r[5] */
- /* ################## Calculate word 6 */
- xorl %ebp, %ebp
- /* mul a[6]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[4]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[3]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[4] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- movl 28(%esi), %eax
- /* saved r[6] */
- /* ################## Calculate word 7 */
- xorl %ebx, %ebx
- /* mul a[7]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[5]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[4]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[5] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 28(%eax)
- movl 28(%esi), %eax
- /* saved r[7] */
- /* ################## Calculate word 8 */
- xorl %ecx, %ecx
- /* mul a[7]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[6]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[5]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[3]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[6] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%eax)
- movl 28(%esi), %eax
- /* saved r[8] */
- /* ################## Calculate word 9 */
- xorl %ebp, %ebp
- /* mul a[7]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[6]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[4] */
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- /* mul a[4]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[3]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[7] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 36(%eax)
- movl 28(%esi), %eax
- /* saved r[9] */
- /* ################## Calculate word 10 */
- xorl %ebx, %ebx
- /* mul a[7]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[4] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- /* mul a[5]*b[5] */
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- /* mul a[4]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[3]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%eax)
- movl 28(%esi), %eax
- /* saved r[10] */
- /* ################## Calculate word 11 */
- xorl %ecx, %ecx
- /* mul a[7]*b[4] */
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- /* mul a[6]*b[5] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- /* mul a[5]*b[6] */
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- /* mul a[4]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 44(%eax)
- movl 28(%esi), %eax
- /* saved r[11] */
- /* ################## Calculate word 12 */
- xorl %ebp, %ebp
- /* mul a[7]*b[5] */
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- /* mul a[6]*b[6] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- /* mul a[5]*b[7] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%eax)
- movl 28(%esi), %eax
- /* saved r[12] */
- /* ################## Calculate word 13 */
- xorl %ebx, %ebx
- /* mul a[7]*b[6] */
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- /* mul a[6]*b[7] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 52(%eax)
- movl 28(%esi), %eax
- /* saved r[13] */
- /* ################## Calculate word 14 */
- xorl %ecx, %ecx
- /* mul a[7]*b[7] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%eax)
- /* saved r[14] */
- /* save r[15] */
- movl %ebx, 60(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba8_end:
- SIZE(bn_mul_comba8,.bn_mul_comba8_end-bn_mul_comba8)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_mul_comba4
- TYPE(bn_mul_comba4,@function)
-bn_mul_comba4:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- /* ################## Calculate word 0 */
- xorl %ebp, %ebp
- /* mul a[0]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ################## Calculate word 1 */
- xorl %ebx, %ebx
- /* mul a[1]*b[0] */
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- /* mul a[0]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- /* saved r[1] */
- /* ################## Calculate word 2 */
- xorl %ecx, %ecx
- /* mul a[2]*b[0] */
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- /* mul a[1]*b[1] */
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- /* mul a[0]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ################## Calculate word 3 */
- xorl %ebp, %ebp
- /* mul a[3]*b[0] */
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- /* mul a[2]*b[1] */
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- /* mul a[1]*b[2] */
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- /* mul a[0]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 12(%esi), %eax
- /* saved r[3] */
- /* ################## Calculate word 4 */
- xorl %ebx, %ebx
- /* mul a[3]*b[1] */
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- /* mul a[2]*b[2] */
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- /* mul a[1]*b[3] */
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 12(%esi), %eax
- /* saved r[4] */
- /* ################## Calculate word 5 */
- xorl %ecx, %ecx
- /* mul a[3]*b[2] */
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- /* mul a[2]*b[3] */
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 12(%esi), %eax
- /* saved r[5] */
- /* ################## Calculate word 6 */
- xorl %ebp, %ebp
- /* mul a[3]*b[3] */
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- /* saved r[6] */
- /* save r[7] */
- movl %ecx, 28(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba4_end:
- SIZE(bn_mul_comba4,.bn_mul_comba4_end-bn_mul_comba4)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_sqr_comba8
- TYPE(bn_sqr_comba8,@function)
-bn_sqr_comba8:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- /* ############### Calculate word 0 */
- xorl %ebp, %ebp
- /* sqr a[0]*a[0] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ############### Calculate word 1 */
- xorl %ebx, %ebx
- /* sqr a[1]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- /* saved r[1] */
- /* ############### Calculate word 2 */
- xorl %ecx, %ecx
- /* sqr a[2]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- /* sqr a[1]*a[1] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ############### Calculate word 3 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[2]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl (%esi), %edx
- /* saved r[3] */
- /* ############### Calculate word 4 */
- xorl %ebx, %ebx
- /* sqr a[4]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 12(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- /* sqr a[3]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- /* sqr a[2]*a[2] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl (%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 20(%esi), %eax
- /* saved r[4] */
- /* ############### Calculate word 5 */
- xorl %ecx, %ecx
- /* sqr a[5]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- movl 4(%esi), %edx
- /* sqr a[4]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- /* sqr a[3]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- movl (%esi), %edx
- /* saved r[5] */
- /* ############### Calculate word 6 */
- xorl %ebp, %ebp
- /* sqr a[6]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[5]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl 8(%esi), %edx
- /* sqr a[4]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- /* sqr a[3]*a[3] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- movl 28(%esi), %eax
- /* saved r[6] */
- /* ############### Calculate word 7 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- /* sqr a[6]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- movl 8(%esi), %edx
- /* sqr a[5]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %eax
- adcl $0, %ebx
- movl 12(%esi), %edx
- /* sqr a[4]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 28(%edi)
- movl 4(%esi), %edx
- /* saved r[7] */
- /* ############### Calculate word 8 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- /* sqr a[6]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 20(%esi), %eax
- adcl $0, %ecx
- movl 12(%esi), %edx
- /* sqr a[5]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- /* sqr a[4]*a[4] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 8(%esi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%edi)
- movl 28(%esi), %eax
- /* saved r[8] */
- /* ############### Calculate word 9 */
- xorl %ebp, %ebp
- /* sqr a[7]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- movl 12(%esi), %edx
- /* sqr a[6]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 16(%esi), %edx
- /* sqr a[5]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 28(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 36(%edi)
- movl 12(%esi), %edx
- /* saved r[9] */
- /* ############### Calculate word 10 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[3] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 16(%esi), %edx
- /* sqr a[6]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- /* sqr a[5]*a[5] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%edi)
- movl 28(%esi), %eax
- /* saved r[10] */
- /* ############### Calculate word 11 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[4] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 20(%esi), %edx
- /* sqr a[6]*a[5] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 28(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 44(%edi)
- movl 20(%esi), %edx
- /* saved r[11] */
- /* ############### Calculate word 12 */
- xorl %ebp, %ebp
- /* sqr a[7]*a[5] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- /* sqr a[6]*a[6] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%edi)
- movl 28(%esi), %eax
- /* saved r[12] */
- /* ############### Calculate word 13 */
- xorl %ebx, %ebx
- /* sqr a[7]*a[6] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 52(%edi)
- /* saved r[13] */
- /* ############### Calculate word 14 */
- xorl %ecx, %ecx
- /* sqr a[7]*a[7] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%edi)
- /* saved r[14] */
- movl %ebx, 60(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba8_end:
- SIZE(bn_sqr_comba8,.bn_sqr_comba8_end-bn_sqr_comba8)
-.ident "desasm.pl"
-.text
- .align ALIGN
-.globl bn_sqr_comba4
- TYPE(bn_sqr_comba4,@function)
-bn_sqr_comba4:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- /* ############### Calculate word 0 */
- xorl %ebp, %ebp
- /* sqr a[0]*a[0] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- /* saved r[0] */
- /* ############### Calculate word 1 */
- xorl %ebx, %ebx
- /* sqr a[1]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- /* saved r[1] */
- /* ############### Calculate word 2 */
- xorl %ecx, %ecx
- /* sqr a[2]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- /* sqr a[1]*a[1] */
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- /* saved r[2] */
- /* ############### Calculate word 3 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[0] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- /* sqr a[2]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl 4(%esi), %edx
- /* saved r[3] */
- /* ############### Calculate word 4 */
- xorl %ebx, %ebx
- /* sqr a[3]*a[1] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- /* sqr a[2]*a[2] */
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 12(%esi), %eax
- /* saved r[4] */
- /* ############### Calculate word 5 */
- xorl %ecx, %ecx
- /* sqr a[3]*a[2] */
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- /* saved r[5] */
- /* ############### Calculate word 6 */
- xorl %ebp, %ebp
- /* sqr a[3]*a[3] */
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- /* saved r[6] */
- movl %ecx, 28(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba4_end:
- SIZE(bn_sqr_comba4,.bn_sqr_comba4_end-bn_sqr_comba4)
-.ident "desasm.pl"
+++ /dev/null
- # Don't even think of reading this code
- # It was automatically generated by bn-586.pl
- # Which is a perl program used to generate the x86 assember for
- # any of elf, a.out, BSDI,Win32, or Solaris
- # eric <eay@cryptsoft.com>
-
- .file "bn-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align 16
-.globl bn_mul_comba8
- .type bn_mul_comba8,@function
-bn_mul_comba8:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- # ################## Calculate word 0
- xorl %ebp, %ebp
- # mul a[0]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- # saved r[0]
- # ################## Calculate word 1
- xorl %ebx, %ebx
- # mul a[1]*b[0]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- # saved r[1]
- # ################## Calculate word 2
- xorl %ecx, %ecx
- # mul a[2]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[1]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- # saved r[2]
- # ################## Calculate word 3
- xorl %ebp, %ebp
- # mul a[3]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[2]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 16(%esi), %eax
- # saved r[3]
- # ################## Calculate word 4
- xorl %ebx, %ebx
- # mul a[4]*b[0]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[3]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 20(%esi), %eax
- # saved r[4]
- # ################## Calculate word 5
- xorl %ecx, %ecx
- # mul a[5]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[1]
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[3]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[4]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 24(%esi), %eax
- # saved r[5]
- # ################## Calculate word 6
- xorl %ebp, %ebp
- # mul a[6]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[4]*b[2]
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[3]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[4]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[5]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- movl 28(%esi), %eax
- # saved r[6]
- # ################## Calculate word 7
- xorl %ebx, %ebx
- # mul a[7]*b[0]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[5]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[4]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[5]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[6]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 28(%eax)
- movl 28(%esi), %eax
- # saved r[7]
- # ################## Calculate word 8
- xorl %ecx, %ecx
- # mul a[7]*b[1]
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[6]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[5]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[4]
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[3]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[6]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%eax)
- movl 28(%esi), %eax
- # saved r[8]
- # ################## Calculate word 9
- xorl %ebp, %ebp
- # mul a[7]*b[2]
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[6]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[4]
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- # mul a[4]*b[5]
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[3]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[7]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 36(%eax)
- movl 28(%esi), %eax
- # saved r[9]
- # ################## Calculate word 10
- xorl %ebx, %ebx
- # mul a[7]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- # mul a[5]*b[5]
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- # mul a[4]*b[6]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%eax)
- movl 28(%esi), %eax
- # saved r[10]
- # ################## Calculate word 11
- xorl %ecx, %ecx
- # mul a[7]*b[4]
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[6]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- # mul a[5]*b[6]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 44(%eax)
- movl 28(%esi), %eax
- # saved r[11]
- # ################## Calculate word 12
- xorl %ebp, %ebp
- # mul a[7]*b[5]
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[6]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[7]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%eax)
- movl 28(%esi), %eax
- # saved r[12]
- # ################## Calculate word 13
- xorl %ebx, %ebx
- # mul a[7]*b[6]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 52(%eax)
- movl 28(%esi), %eax
- # saved r[13]
- # ################## Calculate word 14
- xorl %ecx, %ecx
- # mul a[7]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%eax)
- # saved r[14]
- # save r[15]
- movl %ebx, 60(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba8_end:
- .size bn_mul_comba8,.bn_mul_comba8_end-bn_mul_comba8
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_mul_comba4
- .type bn_mul_comba4,@function
-bn_mul_comba4:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- # ################## Calculate word 0
- xorl %ebp, %ebp
- # mul a[0]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- # saved r[0]
- # ################## Calculate word 1
- xorl %ebx, %ebx
- # mul a[1]*b[0]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- # saved r[1]
- # ################## Calculate word 2
- xorl %ecx, %ecx
- # mul a[2]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[1]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- # saved r[2]
- # ################## Calculate word 3
- xorl %ebp, %ebp
- # mul a[3]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[2]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 12(%esi), %eax
- # saved r[3]
- # ################## Calculate word 4
- xorl %ebx, %ebx
- # mul a[3]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 12(%esi), %eax
- # saved r[4]
- # ################## Calculate word 5
- xorl %ecx, %ecx
- # mul a[3]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 12(%esi), %eax
- # saved r[5]
- # ################## Calculate word 6
- xorl %ebp, %ebp
- # mul a[3]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- # saved r[6]
- # save r[7]
- movl %ecx, 28(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba4_end:
- .size bn_mul_comba4,.bn_mul_comba4_end-bn_mul_comba4
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_sqr_comba8
- .type bn_sqr_comba8,@function
-bn_sqr_comba8:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- # ############### Calculate word 0
- xorl %ebp, %ebp
- # sqr a[0]*a[0]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- # saved r[0]
- # ############### Calculate word 1
- xorl %ebx, %ebx
- # sqr a[1]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- # saved r[1]
- # ############### Calculate word 2
- xorl %ecx, %ecx
- # sqr a[2]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- # sqr a[1]*a[1]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- # saved r[2]
- # ############### Calculate word 3
- xorl %ebp, %ebp
- # sqr a[3]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[2]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl (%esi), %edx
- # saved r[3]
- # ############### Calculate word 4
- xorl %ebx, %ebx
- # sqr a[4]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 12(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- # sqr a[3]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- # sqr a[2]*a[2]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl (%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 20(%esi), %eax
- # saved r[4]
- # ############### Calculate word 5
- xorl %ecx, %ecx
- # sqr a[5]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- movl 4(%esi), %edx
- # sqr a[4]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- # sqr a[3]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- movl (%esi), %edx
- # saved r[5]
- # ############### Calculate word 6
- xorl %ebp, %ebp
- # sqr a[6]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[5]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl 8(%esi), %edx
- # sqr a[4]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- # sqr a[3]*a[3]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- movl 28(%esi), %eax
- # saved r[6]
- # ############### Calculate word 7
- xorl %ebx, %ebx
- # sqr a[7]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- # sqr a[6]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- movl 8(%esi), %edx
- # sqr a[5]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %eax
- adcl $0, %ebx
- movl 12(%esi), %edx
- # sqr a[4]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 28(%edi)
- movl 4(%esi), %edx
- # saved r[7]
- # ############### Calculate word 8
- xorl %ecx, %ecx
- # sqr a[7]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- # sqr a[6]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 20(%esi), %eax
- adcl $0, %ecx
- movl 12(%esi), %edx
- # sqr a[5]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- # sqr a[4]*a[4]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 8(%esi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%edi)
- movl 28(%esi), %eax
- # saved r[8]
- # ############### Calculate word 9
- xorl %ebp, %ebp
- # sqr a[7]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- movl 12(%esi), %edx
- # sqr a[6]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 16(%esi), %edx
- # sqr a[5]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 28(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 36(%edi)
- movl 12(%esi), %edx
- # saved r[9]
- # ############### Calculate word 10
- xorl %ebx, %ebx
- # sqr a[7]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 16(%esi), %edx
- # sqr a[6]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- # sqr a[5]*a[5]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%edi)
- movl 28(%esi), %eax
- # saved r[10]
- # ############### Calculate word 11
- xorl %ecx, %ecx
- # sqr a[7]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 20(%esi), %edx
- # sqr a[6]*a[5]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 28(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 44(%edi)
- movl 20(%esi), %edx
- # saved r[11]
- # ############### Calculate word 12
- xorl %ebp, %ebp
- # sqr a[7]*a[5]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- # sqr a[6]*a[6]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%edi)
- movl 28(%esi), %eax
- # saved r[12]
- # ############### Calculate word 13
- xorl %ebx, %ebx
- # sqr a[7]*a[6]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 52(%edi)
- # saved r[13]
- # ############### Calculate word 14
- xorl %ecx, %ecx
- # sqr a[7]*a[7]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%edi)
- # saved r[14]
- movl %ebx, 60(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba8_end:
- .size bn_sqr_comba8,.bn_sqr_comba8_end-bn_sqr_comba8
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_sqr_comba4
- .type bn_sqr_comba4,@function
-bn_sqr_comba4:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- # ############### Calculate word 0
- xorl %ebp, %ebp
- # sqr a[0]*a[0]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- # saved r[0]
- # ############### Calculate word 1
- xorl %ebx, %ebx
- # sqr a[1]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- # saved r[1]
- # ############### Calculate word 2
- xorl %ecx, %ecx
- # sqr a[2]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- # sqr a[1]*a[1]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- # saved r[2]
- # ############### Calculate word 3
- xorl %ebp, %ebp
- # sqr a[3]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[2]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl 4(%esi), %edx
- # saved r[3]
- # ############### Calculate word 4
- xorl %ebx, %ebx
- # sqr a[3]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- # sqr a[2]*a[2]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 12(%esi), %eax
- # saved r[4]
- # ############### Calculate word 5
- xorl %ecx, %ecx
- # sqr a[3]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- # saved r[5]
- # ############### Calculate word 6
- xorl %ebp, %ebp
- # sqr a[3]*a[3]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- # saved r[6]
- movl %ecx, 28(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba4_end:
- .size bn_sqr_comba4,.bn_sqr_comba4_end-bn_sqr_comba4
-.ident "desasm.pl"
+++ /dev/null
- .text
- .align 3
- .globl bn_sqr_comba8
- .ent bn_sqr_comba8
-bn_sqr_comba8:
-bn_sqr_comba8..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- ldq $0, 0($17)
- ldq $1, 8($17)
- ldq $2, 16($17)
- ldq $3, 24($17)
- ldq $4, 32($17)
- ldq $5, 40($17)
- ldq $6, 48($17)
- ldq $7, 56($17)
- bis $31, $31, $23
- mulq $0, $0, $8
- umulh $0, $0, $22
- stq $8, 0($16)
- bis $31, $31, $8
- mulq $1, $0, $24
- umulh $1, $0, $25
- cmplt $24, $31, $27
- cmplt $25, $31, $28
- addq $24, $24, $24
- addq $25, $25, $25
- addq $25, $27, $25
- addq $8, $28, $8
- addq $22, $24, $22
- addq $23, $25, $23
- cmpult $22, $24, $21
- cmpult $23, $25, $20
- addq $23, $21, $23
- addq $8, $20, $8
- stq $22, 8($16)
- bis $31, $31, $22
- mulq $1, $1, $19
- umulh $1, $1, $18
- addq $23, $19, $23
- addq $8, $18, $8
- cmpult $23, $19, $17
- cmpult $8, $18, $27
- addq $8, $17, $8
- addq $22, $27, $22
- mulq $2, $0, $28
- umulh $2, $0, $24
- cmplt $28, $31, $25
- cmplt $24, $31, $21
- addq $28, $28, $28
- addq $24, $24, $24
- addq $24, $25, $24
- addq $22, $21, $22
- addq $23, $28, $23
- addq $8, $24, $8
- cmpult $23, $28, $20
- cmpult $8, $24, $19
- addq $8, $20, $8
- addq $22, $19, $22
- stq $23, 16($16)
- bis $31, $31, $23
- mulq $2, $1, $18
- umulh $2, $1, $17
- cmplt $18, $31, $27
- cmplt $17, $31, $25
- addq $18, $18, $18
- addq $17, $17, $17
- addq $17, $27, $17
- addq $23, $25, $23
- addq $8, $18, $8
- addq $22, $17, $22
- cmpult $8, $18, $21
- cmpult $22, $17, $28
- addq $22, $21, $22
- addq $23, $28, $23
- mulq $3, $0, $24
- umulh $3, $0, $20
- cmplt $24, $31, $19
- cmplt $20, $31, $27
- addq $24, $24, $24
- addq $20, $20, $20
- addq $20, $19, $20
- addq $23, $27, $23
- addq $8, $24, $8
- addq $22, $20, $22
- cmpult $8, $24, $25
- cmpult $22, $20, $18
- addq $22, $25, $22
- addq $23, $18, $23
- stq $8, 24($16)
- bis $31, $31, $8
- mulq $2, $2, $17
- umulh $2, $2, $21
- addq $22, $17, $22
- addq $23, $21, $23
- cmpult $22, $17, $28
- cmpult $23, $21, $19
- addq $23, $28, $23
- addq $8, $19, $8
- mulq $3, $1, $27
- umulh $3, $1, $24
- cmplt $27, $31, $20
- cmplt $24, $31, $25
- addq $27, $27, $27
- addq $24, $24, $24
- addq $24, $20, $24
- addq $8, $25, $8
- addq $22, $27, $22
- addq $23, $24, $23
- cmpult $22, $27, $18
- cmpult $23, $24, $17
- addq $23, $18, $23
- addq $8, $17, $8
- mulq $4, $0, $21
- umulh $4, $0, $28
- cmplt $21, $31, $19
- cmplt $28, $31, $20
- addq $21, $21, $21
- addq $28, $28, $28
- addq $28, $19, $28
- addq $8, $20, $8
- addq $22, $21, $22
- addq $23, $28, $23
- cmpult $22, $21, $25
- cmpult $23, $28, $27
- addq $23, $25, $23
- addq $8, $27, $8
- stq $22, 32($16)
- bis $31, $31, $22
- mulq $3, $2, $24
- umulh $3, $2, $18
- cmplt $24, $31, $17
- cmplt $18, $31, $19
- addq $24, $24, $24
- addq $18, $18, $18
- addq $18, $17, $18
- addq $22, $19, $22
- addq $23, $24, $23
- addq $8, $18, $8
- cmpult $23, $24, $20
- cmpult $8, $18, $21
- addq $8, $20, $8
- addq $22, $21, $22
- mulq $4, $1, $28
- umulh $4, $1, $25
- cmplt $28, $31, $27
- cmplt $25, $31, $17
- addq $28, $28, $28
- addq $25, $25, $25
- addq $25, $27, $25
- addq $22, $17, $22
- addq $23, $28, $23
- addq $8, $25, $8
- cmpult $23, $28, $19
- cmpult $8, $25, $24
- addq $8, $19, $8
- addq $22, $24, $22
- mulq $5, $0, $18
- umulh $5, $0, $20
- cmplt $18, $31, $21
- cmplt $20, $31, $27
- addq $18, $18, $18
- addq $20, $20, $20
- addq $20, $21, $20
- addq $22, $27, $22
- addq $23, $18, $23
- addq $8, $20, $8
- cmpult $23, $18, $17
- cmpult $8, $20, $28
- addq $8, $17, $8
- addq $22, $28, $22
- stq $23, 40($16)
- bis $31, $31, $23
- mulq $3, $3, $25
- umulh $3, $3, $19
- addq $8, $25, $8
- addq $22, $19, $22
- cmpult $8, $25, $24
- cmpult $22, $19, $21
- addq $22, $24, $22
- addq $23, $21, $23
- mulq $4, $2, $27
- umulh $4, $2, $18
- cmplt $27, $31, $20
- cmplt $18, $31, $17
- addq $27, $27, $27
- addq $18, $18, $18
- addq $18, $20, $18
- addq $23, $17, $23
- addq $8, $27, $8
- addq $22, $18, $22
- cmpult $8, $27, $28
- cmpult $22, $18, $25
- addq $22, $28, $22
- addq $23, $25, $23
- mulq $5, $1, $19
- umulh $5, $1, $24
- cmplt $19, $31, $21
- cmplt $24, $31, $20
- addq $19, $19, $19
- addq $24, $24, $24
- addq $24, $21, $24
- addq $23, $20, $23
- addq $8, $19, $8
- addq $22, $24, $22
- cmpult $8, $19, $17
- cmpult $22, $24, $27
- addq $22, $17, $22
- addq $23, $27, $23
- mulq $6, $0, $18
- umulh $6, $0, $28
- cmplt $18, $31, $25
- cmplt $28, $31, $21
- addq $18, $18, $18
- addq $28, $28, $28
- addq $28, $25, $28
- addq $23, $21, $23
- addq $8, $18, $8
- addq $22, $28, $22
- cmpult $8, $18, $20
- cmpult $22, $28, $19
- addq $22, $20, $22
- addq $23, $19, $23
- stq $8, 48($16)
- bis $31, $31, $8
- mulq $4, $3, $24
- umulh $4, $3, $17
- cmplt $24, $31, $27
- cmplt $17, $31, $25
- addq $24, $24, $24
- addq $17, $17, $17
- addq $17, $27, $17
- addq $8, $25, $8
- addq $22, $24, $22
- addq $23, $17, $23
- cmpult $22, $24, $21
- cmpult $23, $17, $18
- addq $23, $21, $23
- addq $8, $18, $8
- mulq $5, $2, $28
- umulh $5, $2, $20
- cmplt $28, $31, $19
- cmplt $20, $31, $27
- addq $28, $28, $28
- addq $20, $20, $20
- addq $20, $19, $20
- addq $8, $27, $8
- addq $22, $28, $22
- addq $23, $20, $23
- cmpult $22, $28, $25
- cmpult $23, $20, $24
- addq $23, $25, $23
- addq $8, $24, $8
- mulq $6, $1, $17
- umulh $6, $1, $21
- cmplt $17, $31, $18
- cmplt $21, $31, $19
- addq $17, $17, $17
- addq $21, $21, $21
- addq $21, $18, $21
- addq $8, $19, $8
- addq $22, $17, $22
- addq $23, $21, $23
- cmpult $22, $17, $27
- cmpult $23, $21, $28
- addq $23, $27, $23
- addq $8, $28, $8
- mulq $7, $0, $20
- umulh $7, $0, $25
- cmplt $20, $31, $24
- cmplt $25, $31, $18
- addq $20, $20, $20
- addq $25, $25, $25
- addq $25, $24, $25
- addq $8, $18, $8
- addq $22, $20, $22
- addq $23, $25, $23
- cmpult $22, $20, $19
- cmpult $23, $25, $17
- addq $23, $19, $23
- addq $8, $17, $8
- stq $22, 56($16)
- bis $31, $31, $22
- mulq $4, $4, $21
- umulh $4, $4, $27
- addq $23, $21, $23
- addq $8, $27, $8
- cmpult $23, $21, $28
- cmpult $8, $27, $24
- addq $8, $28, $8
- addq $22, $24, $22
- mulq $5, $3, $18
- umulh $5, $3, $20
- cmplt $18, $31, $25
- cmplt $20, $31, $19
- addq $18, $18, $18
- addq $20, $20, $20
- addq $20, $25, $20
- addq $22, $19, $22
- addq $23, $18, $23
- addq $8, $20, $8
- cmpult $23, $18, $17
- cmpult $8, $20, $21
- addq $8, $17, $8
- addq $22, $21, $22
- mulq $6, $2, $27
- umulh $6, $2, $28
- cmplt $27, $31, $24
- cmplt $28, $31, $25
- addq $27, $27, $27
- addq $28, $28, $28
- addq $28, $24, $28
- addq $22, $25, $22
- addq $23, $27, $23
- addq $8, $28, $8
- cmpult $23, $27, $19
- cmpult $8, $28, $18
- addq $8, $19, $8
- addq $22, $18, $22
- mulq $7, $1, $20
- umulh $7, $1, $17
- cmplt $20, $31, $21
- cmplt $17, $31, $24
- addq $20, $20, $20
- addq $17, $17, $17
- addq $17, $21, $17
- addq $22, $24, $22
- addq $23, $20, $23
- addq $8, $17, $8
- cmpult $23, $20, $25
- cmpult $8, $17, $27
- addq $8, $25, $8
- addq $22, $27, $22
- stq $23, 64($16)
- bis $31, $31, $23
- mulq $5, $4, $28
- umulh $5, $4, $19
- cmplt $28, $31, $18
- cmplt $19, $31, $21
- addq $28, $28, $28
- addq $19, $19, $19
- addq $19, $18, $19
- addq $23, $21, $23
- addq $8, $28, $8
- addq $22, $19, $22
- cmpult $8, $28, $24
- cmpult $22, $19, $20
- addq $22, $24, $22
- addq $23, $20, $23
- mulq $6, $3, $17
- umulh $6, $3, $25
- cmplt $17, $31, $27
- cmplt $25, $31, $18
- addq $17, $17, $17
- addq $25, $25, $25
- addq $25, $27, $25
- addq $23, $18, $23
- addq $8, $17, $8
- addq $22, $25, $22
- cmpult $8, $17, $21
- cmpult $22, $25, $28
- addq $22, $21, $22
- addq $23, $28, $23
- mulq $7, $2, $19
- umulh $7, $2, $24
- cmplt $19, $31, $20
- cmplt $24, $31, $27
- addq $19, $19, $19
- addq $24, $24, $24
- addq $24, $20, $24
- addq $23, $27, $23
- addq $8, $19, $8
- addq $22, $24, $22
- cmpult $8, $19, $18
- cmpult $22, $24, $17
- addq $22, $18, $22
- addq $23, $17, $23
- stq $8, 72($16)
- bis $31, $31, $8
- mulq $5, $5, $25
- umulh $5, $5, $21
- addq $22, $25, $22
- addq $23, $21, $23
- cmpult $22, $25, $28
- cmpult $23, $21, $20
- addq $23, $28, $23
- addq $8, $20, $8
- mulq $6, $4, $27
- umulh $6, $4, $19
- cmplt $27, $31, $24
- cmplt $19, $31, $18
- addq $27, $27, $27
- addq $19, $19, $19
- addq $19, $24, $19
- addq $8, $18, $8
- addq $22, $27, $22
- addq $23, $19, $23
- cmpult $22, $27, $17
- cmpult $23, $19, $25
- addq $23, $17, $23
- addq $8, $25, $8
- mulq $7, $3, $21
- umulh $7, $3, $28
- cmplt $21, $31, $20
- cmplt $28, $31, $24
- addq $21, $21, $21
- addq $28, $28, $28
- addq $28, $20, $28
- addq $8, $24, $8
- addq $22, $21, $22
- addq $23, $28, $23
- cmpult $22, $21, $18
- cmpult $23, $28, $27
- addq $23, $18, $23
- addq $8, $27, $8
- stq $22, 80($16)
- bis $31, $31, $22
- mulq $6, $5, $19
- umulh $6, $5, $17
- cmplt $19, $31, $25
- cmplt $17, $31, $20
- addq $19, $19, $19
- addq $17, $17, $17
- addq $17, $25, $17
- addq $22, $20, $22
- addq $23, $19, $23
- addq $8, $17, $8
- cmpult $23, $19, $24
- cmpult $8, $17, $21
- addq $8, $24, $8
- addq $22, $21, $22
- mulq $7, $4, $28
- umulh $7, $4, $18
- cmplt $28, $31, $27
- cmplt $18, $31, $25
- addq $28, $28, $28
- addq $18, $18, $18
- addq $18, $27, $18
- addq $22, $25, $22
- addq $23, $28, $23
- addq $8, $18, $8
- cmpult $23, $28, $20
- cmpult $8, $18, $19
- addq $8, $20, $8
- addq $22, $19, $22
- stq $23, 88($16)
- bis $31, $31, $23
- mulq $6, $6, $17
- umulh $6, $6, $24
- addq $8, $17, $8
- addq $22, $24, $22
- cmpult $8, $17, $21
- cmpult $22, $24, $27
- addq $22, $21, $22
- addq $23, $27, $23
- mulq $7, $5, $25
- umulh $7, $5, $28
- cmplt $25, $31, $18
- cmplt $28, $31, $20
- addq $25, $25, $25
- addq $28, $28, $28
- addq $28, $18, $28
- addq $23, $20, $23
- addq $8, $25, $8
- addq $22, $28, $22
- cmpult $8, $25, $19
- cmpult $22, $28, $17
- addq $22, $19, $22
- addq $23, $17, $23
- stq $8, 96($16)
- bis $31, $31, $8
- mulq $7, $6, $24
- umulh $7, $6, $21
- cmplt $24, $31, $27
- cmplt $21, $31, $18
- addq $24, $24, $24
- addq $21, $21, $21
- addq $21, $27, $21
- addq $8, $18, $8
- addq $22, $24, $22
- addq $23, $21, $23
- cmpult $22, $24, $20
- cmpult $23, $21, $25
- addq $23, $20, $23
- addq $8, $25, $8
- stq $22, 104($16)
- bis $31, $31, $22
- mulq $7, $7, $28
- umulh $7, $7, $19
- addq $23, $28, $23
- addq $8, $19, $8
- cmpult $23, $28, $17
- cmpult $8, $19, $27
- addq $8, $17, $8
- addq $22, $27, $22
- stq $23, 112($16)
- stq $8, 120($16)
- ret $31,($26),1
- .end bn_sqr_comba8
+++ /dev/null
-int abc(a,b,c,d,e,f,g,h,i,j)
-unsigned long a,b,c,d,e,f,g,h,i,j;
- {
- gg(g);
- if (g)
- gg(h);
- gg(i);
- }
+++ /dev/null
- # Don't even think of reading this code
- # It was automatically generated by bn-586.pl
- # Which is a perl program used to generate the x86 assember for
- # any of elf, a.out, BSDI,Win32, or Solaris
- # eric <eay@cryptsoft.com>
-
- .file "bn-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align 16
-.globl bn_mul_add_words
- .type bn_mul_add_words,@function
-bn_mul_add_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- xorl %esi, %esi
- movl 20(%esp), %edi
- movl 28(%esp), %ecx
- movl 24(%esp), %ebx
- andl $4294967288, %ecx
- movl 32(%esp), %ebp
- pushl %ecx
- jz .L000maw_finish
-.L001maw_loop:
- movl %ecx, (%esp)
- # Round 0
- movl (%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl (%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- # Round 4
- movl 4(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 4(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- # Round 8
- movl 8(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 8(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- # Round 12
- movl 12(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 12(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- # Round 16
- movl 16(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 16(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- # Round 20
- movl 20(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 20(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- # Round 24
- movl 24(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 24(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
- # Round 28
- movl 28(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 28(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 28(%edi)
- movl %edx, %esi
-
- movl (%esp), %ecx
- addl $32, %ebx
- addl $32, %edi
- subl $8, %ecx
- jnz .L001maw_loop
-.L000maw_finish:
- movl 32(%esp), %ecx
- andl $7, %ecx
- jnz .L002maw_finish2
- jmp .L003maw_end
-.align 16
-.L002maw_finish2:
- # Tail Round 0
- movl (%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl (%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, (%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 1
- movl 4(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 4(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 4(%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 2
- movl 8(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 8(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 8(%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 3
- movl 12(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 12(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 12(%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 4
- movl 16(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 16(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 16(%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 5
- movl 20(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 20(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- decl %ecx
- movl %eax, 20(%edi)
- movl %edx, %esi
- jz .L003maw_end
- # Tail Round 6
- movl 24(%ebx), %eax
- mull %ebp
- addl %esi, %eax
- movl 24(%edi), %esi
- adcl $0, %edx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
-.L003maw_end:
- movl %esi, %eax
- popl %ecx
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_mul_add_words_end:
- .size bn_mul_add_words,.bn_mul_add_words_end-bn_mul_add_words
-.ident "bn_mul_add_words"
-.text
- .align 16
-.globl bn_mul_words
- .type bn_mul_words,@function
-bn_mul_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- xorl %esi, %esi
- movl 20(%esp), %edi
- movl 24(%esp), %ebx
- movl 28(%esp), %ebp
- movl 32(%esp), %ecx
- andl $4294967288, %ebp
- jz .L004mw_finish
-.L005mw_loop:
- # Round 0
- movl (%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- # Round 4
- movl 4(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- # Round 8
- movl 8(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- # Round 12
- movl 12(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- # Round 16
- movl 16(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- # Round 20
- movl 20(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- # Round 24
- movl 24(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
- # Round 28
- movl 28(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 28(%edi)
- movl %edx, %esi
-
- addl $32, %ebx
- addl $32, %edi
- subl $8, %ebp
- jz .L004mw_finish
- jmp .L005mw_loop
-.L004mw_finish:
- movl 28(%esp), %ebp
- andl $7, %ebp
- jnz .L006mw_finish2
- jmp .L007mw_end
-.align 16
-.L006mw_finish2:
- # Tail Round 0
- movl (%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, (%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 1
- movl 4(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 4(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 2
- movl 8(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 8(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 3
- movl 12(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 12(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 4
- movl 16(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 16(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 5
- movl 20(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 20(%edi)
- movl %edx, %esi
- decl %ebp
- jz .L007mw_end
- # Tail Round 6
- movl 24(%ebx), %eax
- mull %ecx
- addl %esi, %eax
- adcl $0, %edx
- movl %eax, 24(%edi)
- movl %edx, %esi
-.L007mw_end:
- movl %esi, %eax
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_mul_words_end:
- .size bn_mul_words,.bn_mul_words_end-bn_mul_words
-.ident "bn_mul_words"
-.text
- .align 16
-.globl bn_sqr_words
- .type bn_sqr_words,@function
-bn_sqr_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %esi
- movl 24(%esp), %edi
- movl 28(%esp), %ebx
- andl $4294967288, %ebx
- jz .L008sw_finish
-.L009sw_loop:
- # Round 0
- movl (%edi), %eax
- mull %eax
- movl %eax, (%esi)
- movl %edx, 4(%esi)
- # Round 4
- movl 4(%edi), %eax
- mull %eax
- movl %eax, 8(%esi)
- movl %edx, 12(%esi)
- # Round 8
- movl 8(%edi), %eax
- mull %eax
- movl %eax, 16(%esi)
- movl %edx, 20(%esi)
- # Round 12
- movl 12(%edi), %eax
- mull %eax
- movl %eax, 24(%esi)
- movl %edx, 28(%esi)
- # Round 16
- movl 16(%edi), %eax
- mull %eax
- movl %eax, 32(%esi)
- movl %edx, 36(%esi)
- # Round 20
- movl 20(%edi), %eax
- mull %eax
- movl %eax, 40(%esi)
- movl %edx, 44(%esi)
- # Round 24
- movl 24(%edi), %eax
- mull %eax
- movl %eax, 48(%esi)
- movl %edx, 52(%esi)
- # Round 28
- movl 28(%edi), %eax
- mull %eax
- movl %eax, 56(%esi)
- movl %edx, 60(%esi)
-
- addl $32, %edi
- addl $64, %esi
- subl $8, %ebx
- jnz .L009sw_loop
-.L008sw_finish:
- movl 28(%esp), %ebx
- andl $7, %ebx
- jz .L010sw_end
- # Tail Round 0
- movl (%edi), %eax
- mull %eax
- movl %eax, (%esi)
- decl %ebx
- movl %edx, 4(%esi)
- jz .L010sw_end
- # Tail Round 1
- movl 4(%edi), %eax
- mull %eax
- movl %eax, 8(%esi)
- decl %ebx
- movl %edx, 12(%esi)
- jz .L010sw_end
- # Tail Round 2
- movl 8(%edi), %eax
- mull %eax
- movl %eax, 16(%esi)
- decl %ebx
- movl %edx, 20(%esi)
- jz .L010sw_end
- # Tail Round 3
- movl 12(%edi), %eax
- mull %eax
- movl %eax, 24(%esi)
- decl %ebx
- movl %edx, 28(%esi)
- jz .L010sw_end
- # Tail Round 4
- movl 16(%edi), %eax
- mull %eax
- movl %eax, 32(%esi)
- decl %ebx
- movl %edx, 36(%esi)
- jz .L010sw_end
- # Tail Round 5
- movl 20(%edi), %eax
- mull %eax
- movl %eax, 40(%esi)
- decl %ebx
- movl %edx, 44(%esi)
- jz .L010sw_end
- # Tail Round 6
- movl 24(%edi), %eax
- mull %eax
- movl %eax, 48(%esi)
- movl %edx, 52(%esi)
-.L010sw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_sqr_words_end:
- .size bn_sqr_words,.bn_sqr_words_end-bn_sqr_words
-.ident "bn_sqr_words"
-.text
- .align 16
-.globl bn_div64
- .type bn_div64,@function
-bn_div64:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
- movl 20(%esp), %edx
- movl 24(%esp), %eax
- movl 28(%esp), %ebx
- divl %ebx
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_div64_end:
- .size bn_div64,.bn_div64_end-bn_div64
-.ident "bn_div64"
-.text
- .align 16
-.globl bn_add_words
- .type bn_add_words,@function
-bn_add_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %ebx
- movl 24(%esp), %esi
- movl 28(%esp), %edi
- movl 32(%esp), %ebp
- xorl %eax, %eax
- andl $4294967288, %ebp
- jz .L011aw_finish
-.L012aw_loop:
- # Round 0
- movl (%esi), %ecx
- movl (%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, (%ebx)
- # Round 1
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 4(%ebx)
- # Round 2
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 8(%ebx)
- # Round 3
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 12(%ebx)
- # Round 4
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 16(%ebx)
- # Round 5
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 20(%ebx)
- # Round 6
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
- # Round 7
- movl 28(%esi), %ecx
- movl 28(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 28(%ebx)
-
- addl $32, %esi
- addl $32, %edi
- addl $32, %ebx
- subl $8, %ebp
- jnz .L012aw_loop
-.L011aw_finish:
- movl 32(%esp), %ebp
- andl $7, %ebp
- jz .L013aw_end
- # Tail Round 0
- movl (%esi), %ecx
- movl (%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, (%ebx)
- jz .L013aw_end
- # Tail Round 1
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 4(%ebx)
- jz .L013aw_end
- # Tail Round 2
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 8(%ebx)
- jz .L013aw_end
- # Tail Round 3
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 12(%ebx)
- jz .L013aw_end
- # Tail Round 4
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 16(%ebx)
- jz .L013aw_end
- # Tail Round 5
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 20(%ebx)
- jz .L013aw_end
- # Tail Round 6
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- addl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- addl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
-.L013aw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_add_words_end:
- .size bn_add_words,.bn_add_words_end-bn_add_words
-.ident "bn_add_words"
-.text
- .align 16
-.globl bn_sub_words
- .type bn_sub_words,@function
-bn_sub_words:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-
-
- movl 20(%esp), %ebx
- movl 24(%esp), %esi
- movl 28(%esp), %edi
- movl 32(%esp), %ebp
- xorl %eax, %eax
- andl $4294967288, %ebp
- jz .L014aw_finish
-.L015aw_loop:
- # Round 0
- movl (%esi), %ecx
- movl (%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, (%ebx)
- # Round 1
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 4(%ebx)
- # Round 2
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 8(%ebx)
- # Round 3
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 12(%ebx)
- # Round 4
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 16(%ebx)
- # Round 5
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 20(%ebx)
- # Round 6
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
- # Round 7
- movl 28(%esi), %ecx
- movl 28(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 28(%ebx)
-
- addl $32, %esi
- addl $32, %edi
- addl $32, %ebx
- subl $8, %ebp
- jnz .L015aw_loop
-.L014aw_finish:
- movl 32(%esp), %ebp
- andl $7, %ebp
- jz .L016aw_end
- # Tail Round 0
- movl (%esi), %ecx
- movl (%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, (%ebx)
- jz .L016aw_end
- # Tail Round 1
- movl 4(%esi), %ecx
- movl 4(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 4(%ebx)
- jz .L016aw_end
- # Tail Round 2
- movl 8(%esi), %ecx
- movl 8(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 8(%ebx)
- jz .L016aw_end
- # Tail Round 3
- movl 12(%esi), %ecx
- movl 12(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 12(%ebx)
- jz .L016aw_end
- # Tail Round 4
- movl 16(%esi), %ecx
- movl 16(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 16(%ebx)
- jz .L016aw_end
- # Tail Round 5
- movl 20(%esi), %ecx
- movl 20(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- decl %ebp
- movl %ecx, 20(%ebx)
- jz .L016aw_end
- # Tail Round 6
- movl 24(%esi), %ecx
- movl 24(%edi), %edx
- subl %eax, %ecx
- movl $0, %eax
- adcl %eax, %eax
- subl %edx, %ecx
- adcl $0, %eax
- movl %ecx, 24(%ebx)
-.L016aw_end:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.bn_sub_words_end:
- .size bn_sub_words,.bn_sub_words_end-bn_sub_words
-.ident "bn_sub_words"
-.text
- .align 16
-.globl bn_mul_comba8
- .type bn_mul_comba8,@function
-bn_mul_comba8:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- # ################## Calculate word 0
- xorl %ebp, %ebp
- # mul a[0]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- # saved r[0]
- # ################## Calculate word 1
- xorl %ebx, %ebx
- # mul a[1]*b[0]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- # saved r[1]
- # ################## Calculate word 2
- xorl %ecx, %ecx
- # mul a[2]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[1]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- # saved r[2]
- # ################## Calculate word 3
- xorl %ebp, %ebp
- # mul a[3]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[2]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 16(%esi), %eax
- # saved r[3]
- # ################## Calculate word 4
- xorl %ebx, %ebx
- # mul a[4]*b[0]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[3]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 20(%esi), %eax
- # saved r[4]
- # ################## Calculate word 5
- xorl %ecx, %ecx
- # mul a[5]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[1]
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[3]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[4]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 24(%esi), %eax
- # saved r[5]
- # ################## Calculate word 6
- xorl %ebp, %ebp
- # mul a[6]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[4]*b[2]
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[3]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[4]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[5]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- movl 28(%esi), %eax
- # saved r[6]
- # ################## Calculate word 7
- xorl %ebx, %ebx
- # mul a[7]*b[0]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[5]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[4]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[5]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[6]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 28(%eax)
- movl 28(%esi), %eax
- # saved r[7]
- # ################## Calculate word 8
- xorl %ecx, %ecx
- # mul a[7]*b[1]
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[6]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[5]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 16(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[4]
- mull %edx
- addl %eax, %ebp
- movl 12(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[3]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[6]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%eax)
- movl 28(%esi), %eax
- # saved r[8]
- # ################## Calculate word 9
- xorl %ebp, %ebp
- # mul a[7]*b[2]
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[6]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 16(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[4]
- mull %edx
- addl %eax, %ebx
- movl 16(%esi), %eax
- adcl %edx, %ecx
- movl 20(%edi), %edx
- adcl $0, %ebp
- # mul a[4]*b[5]
- mull %edx
- addl %eax, %ebx
- movl 12(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[3]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[7]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 36(%eax)
- movl 28(%esi), %eax
- # saved r[9]
- # ################## Calculate word 10
- xorl %ebx, %ebx
- # mul a[7]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[4]
- mull %edx
- addl %eax, %ecx
- movl 20(%esi), %eax
- adcl %edx, %ebp
- movl 20(%edi), %edx
- adcl $0, %ebx
- # mul a[5]*b[5]
- mull %edx
- addl %eax, %ecx
- movl 16(%esi), %eax
- adcl %edx, %ebp
- movl 24(%edi), %edx
- adcl $0, %ebx
- # mul a[4]*b[6]
- mull %edx
- addl %eax, %ecx
- movl 12(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[3]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 16(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%eax)
- movl 28(%esi), %eax
- # saved r[10]
- # ################## Calculate word 11
- xorl %ecx, %ecx
- # mul a[7]*b[4]
- mull %edx
- addl %eax, %ebp
- movl 24(%esi), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- # mul a[6]*b[5]
- mull %edx
- addl %eax, %ebp
- movl 20(%esi), %eax
- adcl %edx, %ebx
- movl 24(%edi), %edx
- adcl $0, %ecx
- # mul a[5]*b[6]
- mull %edx
- addl %eax, %ebp
- movl 16(%esi), %eax
- adcl %edx, %ebx
- movl 28(%edi), %edx
- adcl $0, %ecx
- # mul a[4]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 20(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 44(%eax)
- movl 28(%esi), %eax
- # saved r[11]
- # ################## Calculate word 12
- xorl %ebp, %ebp
- # mul a[7]*b[5]
- mull %edx
- addl %eax, %ebx
- movl 24(%esi), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- # mul a[6]*b[6]
- mull %edx
- addl %eax, %ebx
- movl 20(%esi), %eax
- adcl %edx, %ecx
- movl 28(%edi), %edx
- adcl $0, %ebp
- # mul a[5]*b[7]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 24(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%eax)
- movl 28(%esi), %eax
- # saved r[12]
- # ################## Calculate word 13
- xorl %ebx, %ebx
- # mul a[7]*b[6]
- mull %edx
- addl %eax, %ecx
- movl 24(%esi), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- # mul a[6]*b[7]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 28(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 52(%eax)
- movl 28(%esi), %eax
- # saved r[13]
- # ################## Calculate word 14
- xorl %ecx, %ecx
- # mul a[7]*b[7]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%eax)
- # saved r[14]
- # save r[15]
- movl %ebx, 60(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba8_end:
- .size bn_mul_comba8,.bn_mul_comba8_end-bn_mul_comba8
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_mul_comba4
- .type bn_mul_comba4,@function
-bn_mul_comba4:
- pushl %esi
- movl 12(%esp), %esi
- pushl %edi
- movl 20(%esp), %edi
- pushl %ebp
- pushl %ebx
- xorl %ebx, %ebx
- movl (%esi), %eax
- xorl %ecx, %ecx
- movl (%edi), %edx
- # ################## Calculate word 0
- xorl %ebp, %ebp
- # mul a[0]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl (%edi), %edx
- adcl $0, %ebp
- movl %ebx, (%eax)
- movl 4(%esi), %eax
- # saved r[0]
- # ################## Calculate word 1
- xorl %ebx, %ebx
- # mul a[1]*b[0]
- mull %edx
- addl %eax, %ecx
- movl (%esi), %eax
- adcl %edx, %ebp
- movl 4(%edi), %edx
- adcl $0, %ebx
- # mul a[0]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl (%edi), %edx
- adcl $0, %ebx
- movl %ecx, 4(%eax)
- movl 8(%esi), %eax
- # saved r[1]
- # ################## Calculate word 2
- xorl %ecx, %ecx
- # mul a[2]*b[0]
- mull %edx
- addl %eax, %ebp
- movl 4(%esi), %eax
- adcl %edx, %ebx
- movl 4(%edi), %edx
- adcl $0, %ecx
- # mul a[1]*b[1]
- mull %edx
- addl %eax, %ebp
- movl (%esi), %eax
- adcl %edx, %ebx
- movl 8(%edi), %edx
- adcl $0, %ecx
- # mul a[0]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl (%edi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%eax)
- movl 12(%esi), %eax
- # saved r[2]
- # ################## Calculate word 3
- xorl %ebp, %ebp
- # mul a[3]*b[0]
- mull %edx
- addl %eax, %ebx
- movl 8(%esi), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- # mul a[2]*b[1]
- mull %edx
- addl %eax, %ebx
- movl 4(%esi), %eax
- adcl %edx, %ecx
- movl 8(%edi), %edx
- adcl $0, %ebp
- # mul a[1]*b[2]
- mull %edx
- addl %eax, %ebx
- movl (%esi), %eax
- adcl %edx, %ecx
- movl 12(%edi), %edx
- adcl $0, %ebp
- # mul a[0]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- movl 4(%edi), %edx
- adcl $0, %ebp
- movl %ebx, 12(%eax)
- movl 12(%esi), %eax
- # saved r[3]
- # ################## Calculate word 4
- xorl %ebx, %ebx
- # mul a[3]*b[1]
- mull %edx
- addl %eax, %ecx
- movl 8(%esi), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- # mul a[2]*b[2]
- mull %edx
- addl %eax, %ecx
- movl 4(%esi), %eax
- adcl %edx, %ebp
- movl 12(%edi), %edx
- adcl $0, %ebx
- # mul a[1]*b[3]
- mull %edx
- addl %eax, %ecx
- movl 20(%esp), %eax
- adcl %edx, %ebp
- movl 8(%edi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%eax)
- movl 12(%esi), %eax
- # saved r[4]
- # ################## Calculate word 5
- xorl %ecx, %ecx
- # mul a[3]*b[2]
- mull %edx
- addl %eax, %ebp
- movl 8(%esi), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- # mul a[2]*b[3]
- mull %edx
- addl %eax, %ebp
- movl 20(%esp), %eax
- adcl %edx, %ebx
- movl 12(%edi), %edx
- adcl $0, %ecx
- movl %ebp, 20(%eax)
- movl 12(%esi), %eax
- # saved r[5]
- # ################## Calculate word 6
- xorl %ebp, %ebp
- # mul a[3]*b[3]
- mull %edx
- addl %eax, %ebx
- movl 20(%esp), %eax
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%eax)
- # saved r[6]
- # save r[7]
- movl %ecx, 28(%eax)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_mul_comba4_end:
- .size bn_mul_comba4,.bn_mul_comba4_end-bn_mul_comba4
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_sqr_comba8
- .type bn_sqr_comba8,@function
-bn_sqr_comba8:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- # ############### Calculate word 0
- xorl %ebp, %ebp
- # sqr a[0]*a[0]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- # saved r[0]
- # ############### Calculate word 1
- xorl %ebx, %ebx
- # sqr a[1]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- # saved r[1]
- # ############### Calculate word 2
- xorl %ecx, %ecx
- # sqr a[2]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- # sqr a[1]*a[1]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- # saved r[2]
- # ############### Calculate word 3
- xorl %ebp, %ebp
- # sqr a[3]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[2]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl (%esi), %edx
- # saved r[3]
- # ############### Calculate word 4
- xorl %ebx, %ebx
- # sqr a[4]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 12(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- # sqr a[3]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- # sqr a[2]*a[2]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl (%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 20(%esi), %eax
- # saved r[4]
- # ############### Calculate word 5
- xorl %ecx, %ecx
- # sqr a[5]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- movl 4(%esi), %edx
- # sqr a[4]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- # sqr a[3]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- movl (%esi), %edx
- # saved r[5]
- # ############### Calculate word 6
- xorl %ebp, %ebp
- # sqr a[6]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[5]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 16(%esi), %eax
- adcl $0, %ebp
- movl 8(%esi), %edx
- # sqr a[4]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- # sqr a[3]*a[3]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- movl 28(%esi), %eax
- # saved r[6]
- # ############### Calculate word 7
- xorl %ebx, %ebx
- # sqr a[7]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 4(%esi), %edx
- # sqr a[6]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- movl 8(%esi), %edx
- # sqr a[5]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %eax
- adcl $0, %ebx
- movl 12(%esi), %edx
- # sqr a[4]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 28(%edi)
- movl 4(%esi), %edx
- # saved r[7]
- # ############### Calculate word 8
- xorl %ecx, %ecx
- # sqr a[7]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 8(%esi), %edx
- # sqr a[6]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 20(%esi), %eax
- adcl $0, %ecx
- movl 12(%esi), %edx
- # sqr a[5]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 16(%esi), %eax
- adcl $0, %ecx
- # sqr a[4]*a[4]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 8(%esi), %edx
- adcl $0, %ecx
- movl %ebp, 32(%edi)
- movl 28(%esi), %eax
- # saved r[8]
- # ############### Calculate word 9
- xorl %ebp, %ebp
- # sqr a[7]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- movl 12(%esi), %edx
- # sqr a[6]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 20(%esi), %eax
- adcl $0, %ebp
- movl 16(%esi), %edx
- # sqr a[5]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 28(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 36(%edi)
- movl 12(%esi), %edx
- # saved r[9]
- # ############### Calculate word 10
- xorl %ebx, %ebx
- # sqr a[7]*a[3]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 24(%esi), %eax
- adcl $0, %ebx
- movl 16(%esi), %edx
- # sqr a[6]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 20(%esi), %eax
- adcl $0, %ebx
- # sqr a[5]*a[5]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 16(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 40(%edi)
- movl 28(%esi), %eax
- # saved r[10]
- # ############### Calculate word 11
- xorl %ecx, %ecx
- # sqr a[7]*a[4]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 24(%esi), %eax
- adcl $0, %ecx
- movl 20(%esi), %edx
- # sqr a[6]*a[5]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 28(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 44(%edi)
- movl 20(%esi), %edx
- # saved r[11]
- # ############### Calculate word 12
- xorl %ebp, %ebp
- # sqr a[7]*a[5]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %eax
- adcl $0, %ebp
- # sqr a[6]*a[6]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 24(%esi), %edx
- adcl $0, %ebp
- movl %ebx, 48(%edi)
- movl 28(%esi), %eax
- # saved r[12]
- # ############### Calculate word 13
- xorl %ebx, %ebx
- # sqr a[7]*a[6]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 28(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 52(%edi)
- # saved r[13]
- # ############### Calculate word 14
- xorl %ecx, %ecx
- # sqr a[7]*a[7]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- adcl $0, %ecx
- movl %ebp, 56(%edi)
- # saved r[14]
- movl %ebx, 60(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba8_end:
- .size bn_sqr_comba8,.bn_sqr_comba8_end-bn_sqr_comba8
-.ident "desasm.pl"
-.text
- .align 16
-.globl bn_sqr_comba4
- .type bn_sqr_comba4,@function
-bn_sqr_comba4:
- pushl %esi
- pushl %edi
- pushl %ebp
- pushl %ebx
- movl 20(%esp), %edi
- movl 24(%esp), %esi
- xorl %ebx, %ebx
- xorl %ecx, %ecx
- movl (%esi), %eax
- # ############### Calculate word 0
- xorl %ebp, %ebp
- # sqr a[0]*a[0]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- movl (%esi), %edx
- adcl $0, %ebp
- movl %ebx, (%edi)
- movl 4(%esi), %eax
- # saved r[0]
- # ############### Calculate word 1
- xorl %ebx, %ebx
- # sqr a[1]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- movl %ecx, 4(%edi)
- movl (%esi), %edx
- # saved r[1]
- # ############### Calculate word 2
- xorl %ecx, %ecx
- # sqr a[2]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 4(%esi), %eax
- adcl $0, %ecx
- # sqr a[1]*a[1]
- mull %eax
- addl %eax, %ebp
- adcl %edx, %ebx
- movl (%esi), %edx
- adcl $0, %ecx
- movl %ebp, 8(%edi)
- movl 12(%esi), %eax
- # saved r[2]
- # ############### Calculate word 3
- xorl %ebp, %ebp
- # sqr a[3]*a[0]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 8(%esi), %eax
- adcl $0, %ebp
- movl 4(%esi), %edx
- # sqr a[2]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebp
- addl %eax, %ebx
- adcl %edx, %ecx
- movl 12(%esi), %eax
- adcl $0, %ebp
- movl %ebx, 12(%edi)
- movl 4(%esi), %edx
- # saved r[3]
- # ############### Calculate word 4
- xorl %ebx, %ebx
- # sqr a[3]*a[1]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ebx
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %eax
- adcl $0, %ebx
- # sqr a[2]*a[2]
- mull %eax
- addl %eax, %ecx
- adcl %edx, %ebp
- movl 8(%esi), %edx
- adcl $0, %ebx
- movl %ecx, 16(%edi)
- movl 12(%esi), %eax
- # saved r[4]
- # ############### Calculate word 5
- xorl %ecx, %ecx
- # sqr a[3]*a[2]
- mull %edx
- addl %eax, %eax
- adcl %edx, %edx
- adcl $0, %ecx
- addl %eax, %ebp
- adcl %edx, %ebx
- movl 12(%esi), %eax
- adcl $0, %ecx
- movl %ebp, 20(%edi)
- # saved r[5]
- # ############### Calculate word 6
- xorl %ebp, %ebp
- # sqr a[3]*a[3]
- mull %eax
- addl %eax, %ebx
- adcl %edx, %ecx
- adcl $0, %ebp
- movl %ebx, 24(%edi)
- # saved r[6]
- movl %ecx, 28(%edi)
- popl %ebx
- popl %ebp
- popl %edi
- popl %esi
- ret
-.bn_sqr_comba4_end:
- .size bn_sqr_comba4,.bn_sqr_comba4_end-bn_sqr_comba4
-.ident "desasm.pl"
+++ /dev/null
- # Don't even think of reading this code
- # It was automatically generated by bn-586.pl
- # Which is a perl program used to generate the alpha assember.
- # eric <eay@cryptsoft.com>
-
- # DEC Alpha assember
- # Generated from perl scripts contains in SSLeay
- .file 1 "bn-586.s"
- .set noat
- .text
- .align 3
- .globl bn_mul_words
- .ent bn_mul_words
-bn_mul_words:
-bn_mul_words..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $18, 4, $18
- bis $31, $31, $0
- br $100
- blt $18, $100
- ldq $1, 0($17)
- ldq $2, 0($16)
-$101:
- ldq $3, 0($17)
- mulq $3, $19, $4
- addq $17, 8, $17
- umulh $3, $19, $5
- addq $4, $0, $4
- addq $16, 8, $16
- subq $18, 1, $18
- cmpult $4, $0, $0
- stq $4, -8($16)
- addq $5, $0, $0
- bgt $18, $101
- ret $31,($26),1
-$100:
- addq $18, 4, $18
- bgt $18, $101
-$102:
- ret $31,($26),1
- .end bn_mul_words
- .text
- .align 3
- .globl bn_sqr_words
- .ent bn_sqr_words
-bn_sqr_words:
-bn_sqr_words..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $18, 4, $18
- bis $31, $31, $0
- br $103
- blt $18, $103
- ldq $1, 0($17)
- ldq $2, 0($16)
-$104:
- ldq $3, 0($17)
- mulq $3, $3, $4
- addq $17, 8, $17
- addq $16, 16, $16
- subq $18, 1, $18
- umulh $3, $3, $5
- stq $4, -16($16)
- stq $5, -8($16)
- bgt $18, $104
- ret $31,($26),1
-$103:
- addq $18, 4, $18
- bgt $18, $104
-$105:
- ret $31,($26),1
- .end bn_sqr_words
- .text
- .align 3
- .globl bn_mul_add_words
- .ent bn_mul_add_words
-bn_mul_add_words:
-bn_mul_add_words..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $18, 4, $18
- bis $31, $31, $0
- br $106
- blt $18, $106
- ldq $1, 0($17)
- ldq $2, 0($16)
-$107:
- ldq $3, 0($17)
- ldq $4, 0($16)
- mulq $3, $19, $5
- subq $18, 1, $18
- addq $17, 8, $17
- umulh $3, $19, $6
- addq $4, $5, $4
- addq $16, 8, $16
- cmpult $4, $5, $7
- addq $4, $0, $4
- addq $6, $7, $6
- cmpult $4, $0, $0
- stq $4, -8($16)
- addq $6, $0, $0
- bgt $18, $107
- ret $31,($26),1
-$106:
- addq $18, 4, $18
- bgt $18, $107
-$108:
- ret $31,($26),1
- .end bn_mul_add_words
- .text
- .align 3
- .globl bn_add_words
- .ent bn_add_words
-bn_add_words:
-bn_add_words..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $19, 4, $19
- bis $31, $31, $0
- br $109
- blt $19, $109
- ldq $1, 0($17)
- ldq $2, 0($18)
-$110:
- ldq $3, 8($17)
- ldq $4, 8($18)
- ldq $5, 16($17)
- ldq $6, 16($18)
- ldq $7, 24($17)
- ldq $8, 24($18)
- addq $1, $2, $22
- cmpult $22, $2, $23
- addq $22, $0, $22
- cmpult $22, $0, $0
- addq $0, $23, $0
- addq $3, $4, $25
- cmpult $25, $4, $24
- addq $25, $0, $25
- cmpult $25, $0, $0
- addq $0, $24, $0
- addq $5, $6, $28
- cmpult $28, $6, $27
- addq $28, $0, $28
- cmpult $28, $0, $0
- addq $0, $27, $0
- addq $7, $8, $20
- cmpult $20, $8, $21
- addq $20, $0, $20
- cmpult $20, $0, $0
- addq $0, $21, $0
- stq $22, 0($16)
- stq $25, 0($16)
- stq $28, 0($16)
- stq $20, 0($16)
- subq $19, 4, $19
- addq $17, 32, $17
- addq $18, 32, $18
- addq $16, 32, $16
- blt $19, $109
- ldq $1, 0($17)
- ldq $2, 0($18)
- br $110
-$111:
- ldq $1, 0($17)
- ldq $2, 0($18)
- addq $1, $2, $3
- cmpult $3, $2, $23
- addq $3, $0, $3
- cmpult $3, $0, $0
- addq $0, $23, $0
- stq $3, 0($16)
- addq $17, 8, $17
- addq $18, 8, $18
- addq $16, 8, $16
- subq $19, 1, $19
- bgt $19, $111
- ret $31,($26),1
-$109:
- addq $19, 4, $19
- bgt $19, $111
-$112:
- ret $31,($26),1
- .end bn_add_words
- .text
- .align 3
- .globl bn_sub_words
- .ent bn_sub_words
-bn_sub_words:
-bn_sub_words..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $19, 4, $19
- bis $31, $31, $0
- blt $19, $113
- ldq $1, 0($17)
- ldq $2, 0($18)
-$114:
- ldq $3, 8($17)
- cmpult $1, $2, $4
- ldq $5, 8($18)
- subq $1, $2, $1
- ldq $6, 16($17)
- cmpult $1, $0, $2
- ldq $7, 16($18)
- subq $1, $0, $23
- ldq $8, 24($17)
- addq $2, $4, $0
- cmpult $3, $5, $24
- subq $3, $5, $3
- ldq $22, 24($18)
- cmpult $3, $0, $5
- subq $3, $0, $25
- addq $5, $24, $0
- cmpult $6, $7, $27
- subq $6, $7, $6
- stq $23, 0($16)
- cmpult $6, $0, $7
- subq $6, $0, $28
- addq $7, $27, $0
- cmpult $8, $22, $21
- subq $8, $22, $8
- stq $25, 8($16)
- cmpult $8, $0, $22
- subq $8, $0, $20
- addq $22, $21, $0
- stq $28, 16($16)
- subq $19, 4, $19
- stq $20, 24($16)
- addq $17, 32, $17
- addq $18, 32, $18
- addq $16, 32, $16
- blt $19, $113
- ldq $1, 0($17)
- ldq $2, 0($18)
- br $114
-$115:
- ldq $1, 0($17)
- ldq $2, 0($18)
- cmpult $1, $2, $27
- subq $1, $2, $1
- cmpult $1, $0, $2
- subq $1, $0, $1
- stq $1, 0($16)
- addq $2, $27, $0
- addq $17, 8, $17
- addq $18, 8, $18
- addq $16, 8, $16
- subq $19, 1, $19
- bgt $19, $115
- ret $31,($26),1
-$113:
- addq $19, 4, $19
- bgt $19, $115
-$116:
- ret $31,($26),1
- .end bn_sub_words
- #
- # What follows was taken directly from the C compiler with a few
- # hacks to redo the lables.
- #
-.text
- .align 3
- .globl bn_div64
- .ent bn_div64
-bn_div64:
- ldgp $29,0($27)
-bn_div64..ng:
- lda $30,-48($30)
- .frame $30,48,$26,0
- stq $26,0($30)
- stq $9,8($30)
- stq $10,16($30)
- stq $11,24($30)
- stq $12,32($30)
- stq $13,40($30)
- .mask 0x4003e00,-48
- .prologue 1
- bis $16,$16,$9
- bis $17,$17,$10
- bis $18,$18,$11
- bis $31,$31,$13
- bis $31,2,$12
- bne $11,$9119
- lda $0,-1
- br $31,$9136
- .align 4
-$9119:
- bis $11,$11,$16
- jsr $26,BN_num_bits_word
- ldgp $29,0($26)
- subq $0,64,$1
- beq $1,$9120
- bis $31,1,$1
- sll $1,$0,$1
- cmpule $9,$1,$1
- bne $1,$9120
- # lda $16,_IO_stderr_
- # lda $17,$C32
- # bis $0,$0,$18
- # jsr $26,fprintf
- # ldgp $29,0($26)
- jsr $26,abort
- ldgp $29,0($26)
- .align 4
-$9120:
- bis $31,64,$3
- cmpult $9,$11,$2
- subq $3,$0,$1
- addl $1,$31,$0
- subq $9,$11,$1
- cmoveq $2,$1,$9
- beq $0,$9122
- zapnot $0,15,$2
- subq $3,$0,$1
- sll $11,$2,$11
- sll $9,$2,$3
- srl $10,$1,$1
- sll $10,$2,$10
- bis $3,$1,$9
-$9122:
- srl $11,32,$5
- zapnot $11,15,$6
- lda $7,-1
- .align 5
-$9123:
- srl $9,32,$1
- subq $1,$5,$1
- bne $1,$9126
- zapnot $7,15,$27
- br $31,$9127
- .align 4
-$9126:
- bis $9,$9,$24
- bis $5,$5,$25
- divqu $24,$25,$27
-$9127:
- srl $10,32,$4
- .align 5
-$9128:
- mulq $27,$5,$1
- subq $9,$1,$3
- zapnot $3,240,$1
- bne $1,$9129
- mulq $6,$27,$2
- sll $3,32,$1
- addq $1,$4,$1
- cmpule $2,$1,$2
- bne $2,$9129
- subq $27,1,$27
- br $31,$9128
- .align 4
-$9129:
- mulq $27,$6,$1
- mulq $27,$5,$4
- srl $1,32,$3
- sll $1,32,$1
- addq $4,$3,$4
- cmpult $10,$1,$2
- subq $10,$1,$10
- addq $2,$4,$2
- cmpult $9,$2,$1
- bis $2,$2,$4
- beq $1,$9134
- addq $9,$11,$9
- subq $27,1,$27
-$9134:
- subl $12,1,$12
- subq $9,$4,$9
- beq $12,$9124
- sll $27,32,$13
- sll $9,32,$2
- srl $10,32,$1
- sll $10,32,$10
- bis $2,$1,$9
- br $31,$9123
- .align 4
-$9124:
- bis $13,$27,$0
-$9136:
- ldq $26,0($30)
- ldq $9,8($30)
- ldq $10,16($30)
- ldq $11,24($30)
- ldq $12,32($30)
- ldq $13,40($30)
- addq $30,48,$30
- ret $31,($26),1
- .end bn_div64
- .text
- .align 3
- .globl bn_mul_comba8
- .ent bn_mul_comba8
-bn_mul_comba8:
-bn_mul_comba8..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- subq $30, 16, $30
- ldq $0, 0($17)
- ldq $1, 0($18)
- stq $9, 0($30)
- stq $10, 8($30)
- ldq $2, 8($17)
- ldq $3, 8($18)
- ldq $4, 16($17)
- ldq $5, 16($18)
- ldq $6, 24($17)
- ldq $7, 24($18)
- ldq $8, 8($17)
- ldq $22, 8($18)
- ldq $23, 8($17)
- ldq $24, 8($18)
- ldq $25, 8($17)
- ldq $27, 8($18)
- ldq $28, 8($17)
- ldq $21, 8($18)
- bis $31, $31, $9
- mulq $0, $1, $20
- umulh $0, $1, $19
- stq $20, 0($16)
- bis $31, $31, $10
- mulq $0, $3, $17
- umulh $0, $3, $18
- addq $19, $17, $19
- cmpult $19, $17, $20
- addq $20, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $17
- addq $10, $17, $10
- mulq $2, $1, $20
- umulh $2, $1, $18
- addq $19, $20, $19
- cmpult $19, $20, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $20
- addq $10, $20, $10
- stq $19, 8($16)
- bis $31, $31, $17
- mulq $0, $5, $18
- umulh $0, $5, $20
- addq $9, $18, $9
- cmpult $9, $18, $19
- addq $19, $20, $20
- addq $10, $20, $10
- cmpult $10, $20, $18
- addq $17, $18, $17
- mulq $2, $3, $19
- umulh $2, $3, $20
- addq $9, $19, $9
- cmpult $9, $19, $18
- addq $18, $20, $20
- addq $10, $20, $10
- cmpult $10, $20, $19
- addq $17, $19, $17
- mulq $4, $1, $18
- umulh $4, $1, $20
- addq $9, $18, $9
- cmpult $9, $18, $19
- addq $19, $20, $20
- addq $10, $20, $10
- cmpult $10, $20, $18
- addq $17, $18, $17
- stq $9, 16($16)
- bis $31, $31, $19
- mulq $0, $7, $20
- umulh $0, $7, $18
- addq $10, $20, $10
- cmpult $10, $20, $9
- addq $9, $18, $18
- addq $17, $18, $17
- cmpult $17, $18, $20
- addq $19, $20, $19
- mulq $2, $5, $9
- umulh $2, $5, $18
- addq $10, $9, $10
- cmpult $10, $9, $20
- addq $20, $18, $18
- addq $17, $18, $17
- cmpult $17, $18, $9
- addq $19, $9, $19
- mulq $4, $3, $20
- umulh $4, $3, $18
- addq $10, $20, $10
- cmpult $10, $20, $9
- addq $9, $18, $18
- addq $17, $18, $17
- cmpult $17, $18, $20
- addq $19, $20, $19
- mulq $6, $1, $9
- umulh $6, $1, $18
- addq $10, $9, $10
- cmpult $10, $9, $20
- addq $20, $18, $18
- addq $17, $18, $17
- cmpult $17, $18, $9
- addq $19, $9, $19
- stq $10, 24($16)
- bis $31, $31, $20
- mulq $0, $22, $18
- umulh $0, $22, $9
- addq $17, $18, $17
- cmpult $17, $18, $10
- addq $10, $9, $9
- addq $19, $9, $19
- cmpult $19, $9, $18
- addq $20, $18, $20
- mulq $2, $7, $10
- umulh $2, $7, $9
- addq $17, $10, $17
- cmpult $17, $10, $18
- addq $18, $9, $9
- addq $19, $9, $19
- cmpult $19, $9, $10
- addq $20, $10, $20
- mulq $4, $5, $18
- umulh $4, $5, $9
- addq $17, $18, $17
- cmpult $17, $18, $10
- addq $10, $9, $9
- addq $19, $9, $19
- cmpult $19, $9, $18
- addq $20, $18, $20
- mulq $6, $3, $10
- umulh $6, $3, $9
- addq $17, $10, $17
- cmpult $17, $10, $18
- addq $18, $9, $9
- addq $19, $9, $19
- cmpult $19, $9, $10
- addq $20, $10, $20
- mulq $8, $1, $18
- umulh $8, $1, $9
- addq $17, $18, $17
- cmpult $17, $18, $10
- addq $10, $9, $9
- addq $19, $9, $19
- cmpult $19, $9, $18
- addq $20, $18, $20
- stq $17, 32($16)
- bis $31, $31, $10
- mulq $0, $24, $9
- umulh $0, $24, $18
- addq $19, $9, $19
- cmpult $19, $9, $17
- addq $17, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $9
- addq $10, $9, $10
- mulq $2, $22, $17
- umulh $2, $22, $18
- addq $19, $17, $19
- cmpult $19, $17, $9
- addq $9, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $17
- addq $10, $17, $10
- mulq $4, $7, $9
- umulh $4, $7, $18
- addq $19, $9, $19
- cmpult $19, $9, $17
- addq $17, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $9
- addq $10, $9, $10
- mulq $6, $5, $17
- umulh $6, $5, $18
- addq $19, $17, $19
- cmpult $19, $17, $9
- addq $9, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $17
- addq $10, $17, $10
- mulq $8, $3, $9
- umulh $8, $3, $18
- addq $19, $9, $19
- cmpult $19, $9, $17
- addq $17, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $9
- addq $10, $9, $10
- mulq $23, $1, $17
- umulh $23, $1, $18
- addq $19, $17, $19
- cmpult $19, $17, $9
- addq $9, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $17
- addq $10, $17, $10
- stq $19, 40($16)
- bis $31, $31, $9
- mulq $0, $27, $18
- umulh $0, $27, $17
- addq $20, $18, $20
- cmpult $20, $18, $19
- addq $19, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $18
- addq $9, $18, $9
- mulq $2, $24, $19
- umulh $2, $24, $17
- addq $20, $19, $20
- cmpult $20, $19, $18
- addq $18, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $19
- addq $9, $19, $9
- mulq $4, $22, $18
- umulh $4, $22, $17
- addq $20, $18, $20
- cmpult $20, $18, $19
- addq $19, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $18
- addq $9, $18, $9
- mulq $6, $7, $19
- umulh $6, $7, $17
- addq $20, $19, $20
- cmpult $20, $19, $18
- addq $18, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $19
- addq $9, $19, $9
- mulq $8, $5, $18
- umulh $8, $5, $17
- addq $20, $18, $20
- cmpult $20, $18, $19
- addq $19, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $18
- addq $9, $18, $9
- mulq $23, $3, $19
- umulh $23, $3, $17
- addq $20, $19, $20
- cmpult $20, $19, $18
- addq $18, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $19
- addq $9, $19, $9
- mulq $25, $1, $18
- umulh $25, $1, $17
- addq $20, $18, $20
- cmpult $20, $18, $19
- addq $19, $17, $17
- addq $10, $17, $10
- cmpult $10, $17, $18
- addq $9, $18, $9
- stq $20, 48($16)
- bis $31, $31, $19
- mulq $0, $21, $17
- umulh $0, $21, $18
- addq $10, $17, $10
- cmpult $10, $17, $20
- addq $20, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $17
- addq $19, $17, $19
- mulq $2, $27, $20
- umulh $2, $27, $18
- addq $10, $20, $10
- cmpult $10, $20, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $19, $0, $19
- mulq $4, $24, $20
- umulh $4, $24, $17
- addq $10, $20, $10
- cmpult $10, $20, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $19, $0, $19
- mulq $6, $22, $20
- umulh $6, $22, $18
- addq $10, $20, $10
- cmpult $10, $20, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $19, $0, $19
- mulq $8, $7, $20
- umulh $8, $7, $17
- addq $10, $20, $10
- cmpult $10, $20, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $19, $0, $19
- mulq $23, $5, $20
- umulh $23, $5, $18
- addq $10, $20, $10
- cmpult $10, $20, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $19, $0, $19
- mulq $25, $3, $20
- umulh $25, $3, $17
- addq $10, $20, $10
- cmpult $10, $20, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $19, $0, $19
- mulq $28, $1, $20
- umulh $28, $1, $18
- addq $10, $20, $10
- cmpult $10, $20, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $19, $0, $19
- stq $10, 56($16)
- bis $31, $31, $20
- mulq $2, $21, $17
- umulh $2, $21, $18
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $0, $18, $18
- addq $19, $18, $19
- cmpult $19, $18, $1
- addq $20, $1, $20
- mulq $4, $27, $10
- umulh $4, $27, $17
- addq $9, $10, $9
- cmpult $9, $10, $0
- addq $0, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $20, $18, $20
- mulq $6, $24, $1
- umulh $6, $24, $2
- addq $9, $1, $9
- cmpult $9, $1, $10
- addq $10, $2, $2
- addq $19, $2, $19
- cmpult $19, $2, $0
- addq $20, $0, $20
- mulq $8, $22, $17
- umulh $8, $22, $18
- addq $9, $17, $9
- cmpult $9, $17, $1
- addq $1, $18, $18
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $20, $10, $20
- mulq $23, $7, $2
- umulh $23, $7, $0
- addq $9, $2, $9
- cmpult $9, $2, $17
- addq $17, $0, $0
- addq $19, $0, $19
- cmpult $19, $0, $1
- addq $20, $1, $20
- mulq $25, $5, $18
- umulh $25, $5, $10
- addq $9, $18, $9
- cmpult $9, $18, $2
- addq $2, $10, $10
- addq $19, $10, $19
- cmpult $19, $10, $17
- addq $20, $17, $20
- mulq $28, $3, $0
- umulh $28, $3, $1
- addq $9, $0, $9
- cmpult $9, $0, $18
- addq $18, $1, $1
- addq $19, $1, $19
- cmpult $19, $1, $2
- addq $20, $2, $20
- stq $9, 64($16)
- bis $31, $31, $10
- mulq $4, $21, $17
- umulh $4, $21, $0
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $18, $0, $0
- addq $20, $0, $20
- cmpult $20, $0, $1
- addq $10, $1, $10
- mulq $6, $27, $2
- umulh $6, $27, $3
- addq $19, $2, $19
- cmpult $19, $2, $9
- addq $9, $3, $3
- addq $20, $3, $20
- cmpult $20, $3, $17
- addq $10, $17, $10
- mulq $8, $24, $18
- umulh $8, $24, $0
- addq $19, $18, $19
- cmpult $19, $18, $1
- addq $1, $0, $0
- addq $20, $0, $20
- cmpult $20, $0, $4
- addq $10, $4, $10
- mulq $23, $22, $2
- umulh $23, $22, $9
- addq $19, $2, $19
- cmpult $19, $2, $3
- addq $3, $9, $9
- addq $20, $9, $20
- cmpult $20, $9, $17
- addq $10, $17, $10
- mulq $25, $7, $18
- umulh $25, $7, $1
- addq $19, $18, $19
- cmpult $19, $18, $0
- addq $0, $1, $1
- addq $20, $1, $20
- cmpult $20, $1, $4
- addq $10, $4, $10
- mulq $28, $5, $2
- umulh $28, $5, $3
- addq $19, $2, $19
- cmpult $19, $2, $9
- addq $9, $3, $3
- addq $20, $3, $20
- cmpult $20, $3, $17
- addq $10, $17, $10
- stq $19, 72($16)
- bis $31, $31, $18
- mulq $6, $21, $0
- umulh $6, $21, $1
- addq $20, $0, $20
- cmpult $20, $0, $4
- addq $4, $1, $1
- addq $10, $1, $10
- cmpult $10, $1, $2
- addq $18, $2, $18
- mulq $8, $27, $9
- umulh $8, $27, $3
- addq $20, $9, $20
- cmpult $20, $9, $17
- addq $17, $3, $3
- addq $10, $3, $10
- cmpult $10, $3, $5
- addq $18, $5, $18
- mulq $23, $24, $19
- umulh $23, $24, $0
- addq $20, $19, $20
- cmpult $20, $19, $4
- addq $4, $0, $0
- addq $10, $0, $10
- cmpult $10, $0, $1
- addq $18, $1, $18
- mulq $25, $22, $2
- umulh $25, $22, $6
- addq $20, $2, $20
- cmpult $20, $2, $9
- addq $9, $6, $6
- addq $10, $6, $10
- cmpult $10, $6, $17
- addq $18, $17, $18
- mulq $28, $7, $3
- umulh $28, $7, $5
- addq $20, $3, $20
- cmpult $20, $3, $19
- addq $19, $5, $5
- addq $10, $5, $10
- cmpult $10, $5, $4
- addq $18, $4, $18
- stq $20, 80($16)
- bis $31, $31, $0
- mulq $8, $21, $1
- umulh $8, $21, $2
- addq $10, $1, $10
- cmpult $10, $1, $9
- addq $9, $2, $2
- addq $18, $2, $18
- cmpult $18, $2, $6
- addq $0, $6, $0
- mulq $23, $27, $17
- umulh $23, $27, $3
- addq $10, $17, $10
- cmpult $10, $17, $19
- addq $19, $3, $3
- addq $18, $3, $18
- cmpult $18, $3, $5
- addq $0, $5, $0
- mulq $25, $24, $4
- umulh $25, $24, $7
- addq $10, $4, $10
- cmpult $10, $4, $20
- addq $20, $7, $7
- addq $18, $7, $18
- cmpult $18, $7, $1
- addq $0, $1, $0
- mulq $28, $22, $9
- umulh $28, $22, $2
- addq $10, $9, $10
- cmpult $10, $9, $6
- addq $6, $2, $2
- addq $18, $2, $18
- cmpult $18, $2, $8
- addq $0, $8, $0
- stq $10, 88($16)
- bis $31, $31, $17
- mulq $23, $21, $19
- umulh $23, $21, $3
- addq $18, $19, $18
- cmpult $18, $19, $5
- addq $5, $3, $3
- addq $0, $3, $0
- cmpult $0, $3, $4
- addq $17, $4, $17
- mulq $25, $27, $20
- umulh $25, $27, $7
- addq $18, $20, $18
- cmpult $18, $20, $1
- addq $1, $7, $7
- addq $0, $7, $0
- cmpult $0, $7, $9
- addq $17, $9, $17
- mulq $28, $24, $6
- umulh $28, $24, $2
- addq $18, $6, $18
- cmpult $18, $6, $8
- addq $8, $2, $2
- addq $0, $2, $0
- cmpult $0, $2, $22
- addq $17, $22, $17
- stq $18, 96($16)
- bis $31, $31, $10
- mulq $25, $21, $19
- umulh $25, $21, $5
- addq $0, $19, $0
- cmpult $0, $19, $3
- addq $3, $5, $5
- addq $17, $5, $17
- cmpult $17, $5, $4
- addq $10, $4, $10
- mulq $28, $27, $23
- umulh $28, $27, $20
- addq $0, $23, $0
- cmpult $0, $23, $1
- addq $1, $20, $20
- addq $17, $20, $17
- cmpult $17, $20, $7
- addq $10, $7, $10
- stq $0, 104($16)
- bis $31, $31, $9
- mulq $28, $21, $6
- umulh $28, $21, $8
- addq $17, $6, $17
- cmpult $17, $6, $2
- addq $2, $8, $8
- addq $10, $8, $10
- cmpult $10, $8, $22
- addq $9, $22, $9
- stq $17, 112($16)
- stq $10, 120($16)
- ldq $9, 0($30)
- ldq $10, 8($30)
- addq $30, 16, $30
- ret $31,($26),1
- .end bn_mul_comba8
- .text
- .align 3
- .globl bn_mul_comba4
- .ent bn_mul_comba4
-bn_mul_comba4:
-bn_mul_comba4..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- ldq $0, 0($17)
- ldq $1, 0($18)
- ldq $2, 8($17)
- ldq $3, 8($18)
- mulq $0, $1, $4
- ldq $5, 16($17)
- ldq $6, 16($18)
- umulh $0, $1, $7
- ldq $8, 24($17)
- ldq $22, 24($18)
- mulq $0, $3, $23
- stq $4, 0($16)
- bis $31, $31, $24
- mulq $2, $1, $28
- bis $31, $31, $25
- bis $31, $31,
- addq $24, $7, $24
- umulh $0, $3, $21
- cmpult $24, $7, $20
- addq $24, $23, $24
- addq $25, $20, $25
- umulh $2, $1, $19
- cmpult $24, $23, $17
- addq $24, $28, $24
- addq $27, $17, $27
- mulq $0, $6, $18
- cmpult $24, $28, $4
- addq $25, $4, $25
- stq $24, 8($16)
- addq $25, $27, $24
- bis $31, $31, $25
- addq $24, $21, $24
- bis $31, $31, $27
- mulq $2, $3, $7
- cmpult $24, $21, $20
- addq $24, $19, $24
- addq $25, $20, $25
- mulq $5, $1, $23
- cmpult $24, $19, $17
- addq $24, $7, $24
- addq $27, $17, $27
- umulh $0, $6, $28
- cmpult $24, $18, $4
- addq $24, $7, $24
- addq $25, $4, $25
- umulh $2, $3, $21
- cmpult $24, $7, $20
- addq $24, $23, $24
- addq $27, $20, $27
- umulh $5, $1, $19
- cmpult $24, $23, $17
- addq $25, $17, $25
- stq $24, 16($16)
- addq $25, $27, $24
- bis $31, $31, $25
- addq $24, $28, $24
- bis $31, $31, $27
- mulq $0, $22, $18
- cmpult $24, $28, $4
- addq $24, $21, $24
- addq $25, $4, $25
- mulq $2, $6, $7
- cmpult $24, $21, $20
- addq $24, $19, $24
- addq $25, $20, $25
- mulq $5, $3, $23
- cmpult $24, $19, $17
- addq $24, $18, $24
- addq $25, $17, $25
- mulq $8, $1, $28
- cmpult $24, $18, $4
- addq $24, $7, $24
- addq $25, $4, $25
- umulh $0, $22, $21
- cmpult $24, $7, $20
- addq $24, $23, $24
- addq $25, $20, $25
- umulh $2, $6, $19
- cmpult $24, $23, $17
- addq $24, $28, $24
- addq $25, $17, $25
- umulh $5, $3, $18
- cmpult $24, $28, $4
- addq $25, $4, $25
- stq $24, 24($16)
- addq $25, $27, $24
- bis $31, $31, $25
- addq $24, $21, $24
- bis $31, $31, $27
- umulh $8, $1, $7
- cmpult $24, $21, $20
- addq $24, $19, $24
- addq $25, $20, $25
- mulq $2, $22, $23
- cmpult $24, $19, $17
- addq $24, $18, $24
- addq $25, $17, $25
- mulq $5, $6, $28
- cmpult $24, $18, $4
- addq $24, $7, $24
- addq $25, $4, $25
- mulq $8, $3, $21
- cmpult $24, $7, $20
- addq $24, $23, $24
- addq $25, $20, $25
- umulh $2, $22, $19
- cmpult $24, $23, $17
- addq $24, $28, $24
- addq $25, $17, $25
- umulh $5, $6, $18
- cmpult $24, $28, $4
- addq $24, $21, $24
- addq $25, $4, $25
- umulh $8, $3, $7
- cmpult $24, $21, $20
- addq $25, $20, $25
- stq $24, 32($16)
- addq $25, $27, $24
- bis $31, $31, $25
- addq $24, $19, $24
- bis $31, $31, $27
- mulq $5, $22, $23
- cmpult $24, $19, $17
- addq $24, $18, $24
- addq $25, $17, $25
- mulq $8, $6, $28
- cmpult $24, $18, $4
- addq $24, $7, $24
- addq $25, $4, $25
- umulh $5, $22, $21
- cmpult $24, $7, $20
- addq $24, $23, $24
- addq $25, $20, $25
- umulh $8, $6, $19
- cmpult $24, $23, $17
- addq $24, $28, $24
- addq $25, $17, $25
- mulq $8, $22, $18
- cmpult $24, $28, $4
- addq $25, $4, $25
- stq $24, 40($16)
- addq $25, $27, $24
- bis $31, $31, $25
- addq $24, $21, $24
- bis $31, $31, $27
- umulh $8, $22, $7
- cmpult $24, $21, $20
- addq $24, $19, $24
- addq $25, $20, $25
- cmpult $24, $19, $23
- addq $24, $18, $24
- addq $25, $23, $25
- cmpult $24, $18, $17
- addq $25, $17, $25
- stq $24, 48($16)
- addq $25, $27, $24
- addq $24, $7, $24
- stq $24, 56($16)
- ret $31,($26),1
- .end bn_mul_comba4
- .text
- .align 3
- .globl bn_sqr_comba4
- .ent bn_sqr_comba4
-bn_sqr_comba4:
-bn_sqr_comba4..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- ldq $0, 0($17)
- ldq $1, 8($17)
- ldq $2, 16($17)
- ldq $3, 24($17)
- bis $31, $31, $6
- mulq $0, $0, $4
- umulh $0, $0, $5
- stq $4, 0($16)
- bis $31, $31, $4
- mulq $0, $1, $7
- umulh $0, $1, $8
- cmplt $7, $31, $22
- cmplt $8, $31, $23
- addq $7, $7, $7
- addq $8, $8, $8
- addq $8, $22, $8
- addq $4, $23, $4
- addq $5, $7, $5
- addq $6, $8, $6
- cmpult $5, $7, $24
- cmpult $6, $8, $25
- addq $6, $24, $6
- addq $4, $25, $4
- stq $5, 8($16)
- bis $31, $31, $5
- mulq $1, $1, $27
- umulh $1, $1, $28
- addq $6, $27, $6
- addq $4, $28, $4
- cmpult $6, $27, $21
- cmpult $4, $28, $20
- addq $4, $21, $4
- addq $5, $20, $5
- mulq $2, $0, $19
- umulh $2, $0, $18
- cmplt $19, $31, $17
- cmplt $18, $31, $22
- addq $19, $19, $19
- addq $18, $18, $18
- addq $18, $17, $18
- addq $5, $22, $5
- addq $6, $19, $6
- addq $4, $18, $4
- cmpult $6, $19, $23
- cmpult $4, $18, $7
- addq $4, $23, $4
- addq $5, $7, $5
- stq $6, 16($16)
- bis $31, $31, $6
- mulq $3, $0, $8
- umulh $3, $0, $24
- cmplt $8, $31, $25
- cmplt $24, $31, $27
- addq $8, $8, $8
- addq $24, $24, $24
- addq $24, $25, $24
- addq $6, $27, $6
- addq $4, $8, $4
- addq $5, $24, $5
- cmpult $4, $8, $28
- cmpult $5, $24, $21
- addq $5, $28, $5
- addq $6, $21, $6
- mulq $2, $1, $20
- umulh $2, $1, $17
- cmplt $20, $31, $22
- cmplt $17, $31, $19
- addq $20, $20, $20
- addq $17, $17, $17
- addq $17, $22, $17
- addq $6, $19, $6
- addq $4, $20, $4
- addq $5, $17, $5
- cmpult $4, $20, $18
- cmpult $5, $17, $23
- addq $5, $18, $5
- addq $6, $23, $6
- stq $4, 24($16)
- bis $31, $31, $4
- mulq $2, $2, $7
- umulh $2, $2, $25
- addq $5, $7, $5
- addq $6, $25, $6
- cmpult $5, $7, $27
- cmpult $6, $25, $8
- addq $6, $27, $6
- addq $4, $8, $4
- mulq $3, $1, $24
- umulh $3, $1, $28
- cmplt $24, $31, $21
- cmplt $28, $31, $22
- addq $24, $24, $24
- addq $28, $28, $28
- addq $28, $21, $28
- addq $4, $22, $4
- addq $5, $24, $5
- addq $6, $28, $6
- cmpult $5, $24, $19
- cmpult $6, $28, $20
- addq $6, $19, $6
- addq $4, $20, $4
- stq $5, 32($16)
- bis $31, $31, $5
- mulq $3, $2, $17
- umulh $3, $2, $18
- cmplt $17, $31, $23
- cmplt $18, $31, $7
- addq $17, $17, $17
- addq $18, $18, $18
- addq $18, $23, $18
- addq $5, $7, $5
- addq $6, $17, $6
- addq $4, $18, $4
- cmpult $6, $17, $25
- cmpult $4, $18, $27
- addq $4, $25, $4
- addq $5, $27, $5
- stq $6, 40($16)
- bis $31, $31, $6
- mulq $3, $3, $8
- umulh $3, $3, $21
- addq $4, $8, $4
- addq $5, $21, $5
- cmpult $4, $8, $22
- cmpult $5, $21, $24
- addq $5, $22, $5
- addq $6, $24, $6
- stq $4, 48($16)
- stq $5, 56($16)
- ret $31,($26),1
- .end bn_sqr_comba4
- .text
- .align 3
- .globl bn_sqr_comba8
- .ent bn_sqr_comba8
-bn_sqr_comba8:
-bn_sqr_comba8..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- ldq $0, 0($17)
- ldq $1, 8($17)
- ldq $2, 16($17)
- ldq $3, 24($17)
- ldq $4, 32($17)
- ldq $5, 40($17)
- ldq $6, 48($17)
- ldq $7, 56($17)
- bis $31, $31, $23
- mulq $0, $0, $8
- umulh $0, $0, $22
- stq $8, 0($16)
- bis $31, $31, $8
- mulq $1, $0, $24
- umulh $1, $0, $25
- cmplt $24, $31, $27
- cmplt $25, $31, $28
- addq $24, $24, $24
- addq $25, $25, $25
- addq $25, $27, $25
- addq $8, $28, $8
- addq $22, $24, $22
- addq $23, $25, $23
- cmpult $22, $24, $21
- cmpult $23, $25, $20
- addq $23, $21, $23
- addq $8, $20, $8
- stq $22, 8($16)
- bis $31, $31, $22
- mulq $1, $1, $19
- umulh $1, $1, $18
- addq $23, $19, $23
- addq $8, $18, $8
- cmpult $23, $19, $17
- cmpult $8, $18, $27
- addq $8, $17, $8
- addq $22, $27, $22
- mulq $2, $0, $28
- umulh $2, $0, $24
- cmplt $28, $31, $25
- cmplt $24, $31, $21
- addq $28, $28, $28
- addq $24, $24, $24
- addq $24, $25, $24
- addq $22, $21, $22
- addq $23, $28, $23
- addq $8, $24, $8
- cmpult $23, $28, $20
- cmpult $8, $24, $19
- addq $8, $20, $8
- addq $22, $19, $22
- stq $23, 16($16)
- bis $31, $31, $23
- mulq $2, $1, $18
- umulh $2, $1, $17
- cmplt $18, $31, $27
- cmplt $17, $31, $25
- addq $18, $18, $18
- addq $17, $17, $17
- addq $17, $27, $17
- addq $23, $25, $23
- addq $8, $18, $8
- addq $22, $17, $22
- cmpult $8, $18, $21
- cmpult $22, $17, $28
- addq $22, $21, $22
- addq $23, $28, $23
- mulq $3, $0, $24
- umulh $3, $0, $20
- cmplt $24, $31, $19
- cmplt $20, $31, $27
- addq $24, $24, $24
- addq $20, $20, $20
- addq $20, $19, $20
- addq $23, $27, $23
- addq $8, $24, $8
- addq $22, $20, $22
- cmpult $8, $24, $25
- cmpult $22, $20, $18
- addq $22, $25, $22
- addq $23, $18, $23
- stq $8, 24($16)
- bis $31, $31, $8
- mulq $2, $2, $17
- umulh $2, $2, $21
- addq $22, $17, $22
- addq $23, $21, $23
- cmpult $22, $17, $28
- cmpult $23, $21, $19
- addq $23, $28, $23
- addq $8, $19, $8
- mulq $3, $1, $27
- umulh $3, $1, $24
- cmplt $27, $31, $20
- cmplt $24, $31, $25
- addq $27, $27, $27
- addq $24, $24, $24
- addq $24, $20, $24
- addq $8, $25, $8
- addq $22, $27, $22
- addq $23, $24, $23
- cmpult $22, $27, $18
- cmpult $23, $24, $17
- addq $23, $18, $23
- addq $8, $17, $8
- mulq $4, $0, $21
- umulh $4, $0, $28
- cmplt $21, $31, $19
- cmplt $28, $31, $20
- addq $21, $21, $21
- addq $28, $28, $28
- addq $28, $19, $28
- addq $8, $20, $8
- addq $22, $21, $22
- addq $23, $28, $23
- cmpult $22, $21, $25
- cmpult $23, $28, $27
- addq $23, $25, $23
- addq $8, $27, $8
- stq $22, 32($16)
- bis $31, $31, $22
- mulq $3, $2, $24
- umulh $3, $2, $18
- cmplt $24, $31, $17
- cmplt $18, $31, $19
- addq $24, $24, $24
- addq $18, $18, $18
- addq $18, $17, $18
- addq $22, $19, $22
- addq $23, $24, $23
- addq $8, $18, $8
- cmpult $23, $24, $20
- cmpult $8, $18, $21
- addq $8, $20, $8
- addq $22, $21, $22
- mulq $4, $1, $28
- umulh $4, $1, $25
- cmplt $28, $31, $27
- cmplt $25, $31, $17
- addq $28, $28, $28
- addq $25, $25, $25
- addq $25, $27, $25
- addq $22, $17, $22
- addq $23, $28, $23
- addq $8, $25, $8
- cmpult $23, $28, $19
- cmpult $8, $25, $24
- addq $8, $19, $8
- addq $22, $24, $22
- mulq $5, $0, $18
- umulh $5, $0, $20
- cmplt $18, $31, $21
- cmplt $20, $31, $27
- addq $18, $18, $18
- addq $20, $20, $20
- addq $20, $21, $20
- addq $22, $27, $22
- addq $23, $18, $23
- addq $8, $20, $8
- cmpult $23, $18, $17
- cmpult $8, $20, $28
- addq $8, $17, $8
- addq $22, $28, $22
- stq $23, 40($16)
- bis $31, $31, $23
- mulq $3, $3, $25
- umulh $3, $3, $19
- addq $8, $25, $8
- addq $22, $19, $22
- cmpult $8, $25, $24
- cmpult $22, $19, $21
- addq $22, $24, $22
- addq $23, $21, $23
- mulq $4, $2, $27
- umulh $4, $2, $18
- cmplt $27, $31, $20
- cmplt $18, $31, $17
- addq $27, $27, $27
- addq $18, $18, $18
- addq $18, $20, $18
- addq $23, $17, $23
- addq $8, $27, $8
- addq $22, $18, $22
- cmpult $8, $27, $28
- cmpult $22, $18, $25
- addq $22, $28, $22
- addq $23, $25, $23
- mulq $5, $1, $19
- umulh $5, $1, $24
- cmplt $19, $31, $21
- cmplt $24, $31, $20
- addq $19, $19, $19
- addq $24, $24, $24
- addq $24, $21, $24
- addq $23, $20, $23
- addq $8, $19, $8
- addq $22, $24, $22
- cmpult $8, $19, $17
- cmpult $22, $24, $27
- addq $22, $17, $22
- addq $23, $27, $23
- mulq $6, $0, $18
- umulh $6, $0, $28
- cmplt $18, $31, $25
- cmplt $28, $31, $21
- addq $18, $18, $18
- addq $28, $28, $28
- addq $28, $25, $28
- addq $23, $21, $23
- addq $8, $18, $8
- addq $22, $28, $22
- cmpult $8, $18, $20
- cmpult $22, $28, $19
- addq $22, $20, $22
- addq $23, $19, $23
- stq $8, 48($16)
- bis $31, $31, $8
- mulq $4, $3, $24
- umulh $4, $3, $17
- cmplt $24, $31, $27
- cmplt $17, $31, $25
- addq $24, $24, $24
- addq $17, $17, $17
- addq $17, $27, $17
- addq $8, $25, $8
- addq $22, $24, $22
- addq $23, $17, $23
- cmpult $22, $24, $21
- cmpult $23, $17, $18
- addq $23, $21, $23
- addq $8, $18, $8
- mulq $5, $2, $28
- umulh $5, $2, $20
- cmplt $28, $31, $19
- cmplt $20, $31, $27
- addq $28, $28, $28
- addq $20, $20, $20
- addq $20, $19, $20
- addq $8, $27, $8
- addq $22, $28, $22
- addq $23, $20, $23
- cmpult $22, $28, $25
- cmpult $23, $20, $24
- addq $23, $25, $23
- addq $8, $24, $8
- mulq $6, $1, $17
- umulh $6, $1, $21
- cmplt $17, $31, $18
- cmplt $21, $31, $19
- addq $17, $17, $17
- addq $21, $21, $21
- addq $21, $18, $21
- addq $8, $19, $8
- addq $22, $17, $22
- addq $23, $21, $23
- cmpult $22, $17, $27
- cmpult $23, $21, $28
- addq $23, $27, $23
- addq $8, $28, $8
- mulq $7, $0, $20
- umulh $7, $0, $25
- cmplt $20, $31, $24
- cmplt $25, $31, $18
- addq $20, $20, $20
- addq $25, $25, $25
- addq $25, $24, $25
- addq $8, $18, $8
- addq $22, $20, $22
- addq $23, $25, $23
- cmpult $22, $20, $19
- cmpult $23, $25, $17
- addq $23, $19, $23
- addq $8, $17, $8
- stq $22, 56($16)
- bis $31, $31, $22
- mulq $4, $4, $21
- umulh $4, $4, $27
- addq $23, $21, $23
- addq $8, $27, $8
- cmpult $23, $21, $28
- cmpult $8, $27, $24
- addq $8, $28, $8
- addq $22, $24, $22
- mulq $5, $3, $18
- umulh $5, $3, $20
- cmplt $18, $31, $25
- cmplt $20, $31, $19
- addq $18, $18, $18
- addq $20, $20, $20
- addq $20, $25, $20
- addq $22, $19, $22
- addq $23, $18, $23
- addq $8, $20, $8
- cmpult $23, $18, $17
- cmpult $8, $20, $21
- addq $8, $17, $8
- addq $22, $21, $22
- mulq $6, $2, $27
- umulh $6, $2, $28
- cmplt $27, $31, $24
- cmplt $28, $31, $25
- addq $27, $27, $27
- addq $28, $28, $28
- addq $28, $24, $28
- addq $22, $25, $22
- addq $23, $27, $23
- addq $8, $28, $8
- cmpult $23, $27, $19
- cmpult $8, $28, $18
- addq $8, $19, $8
- addq $22, $18, $22
- mulq $7, $1, $20
- umulh $7, $1, $17
- cmplt $20, $31, $21
- cmplt $17, $31, $24
- addq $20, $20, $20
- addq $17, $17, $17
- addq $17, $21, $17
- addq $22, $24, $22
- addq $23, $20, $23
- addq $8, $17, $8
- cmpult $23, $20, $25
- cmpult $8, $17, $27
- addq $8, $25, $8
- addq $22, $27, $22
- stq $23, 64($16)
- bis $31, $31, $23
- mulq $5, $4, $28
- umulh $5, $4, $19
- cmplt $28, $31, $18
- cmplt $19, $31, $21
- addq $28, $28, $28
- addq $19, $19, $19
- addq $19, $18, $19
- addq $23, $21, $23
- addq $8, $28, $8
- addq $22, $19, $22
- cmpult $8, $28, $24
- cmpult $22, $19, $20
- addq $22, $24, $22
- addq $23, $20, $23
- mulq $6, $3, $17
- umulh $6, $3, $25
- cmplt $17, $31, $27
- cmplt $25, $31, $18
- addq $17, $17, $17
- addq $25, $25, $25
- addq $25, $27, $25
- addq $23, $18, $23
- addq $8, $17, $8
- addq $22, $25, $22
- cmpult $8, $17, $21
- cmpult $22, $25, $28
- addq $22, $21, $22
- addq $23, $28, $23
- mulq $7, $2, $19
- umulh $7, $2, $24
- cmplt $19, $31, $20
- cmplt $24, $31, $27
- addq $19, $19, $19
- addq $24, $24, $24
- addq $24, $20, $24
- addq $23, $27, $23
- addq $8, $19, $8
- addq $22, $24, $22
- cmpult $8, $19, $18
- cmpult $22, $24, $17
- addq $22, $18, $22
- addq $23, $17, $23
- stq $8, 72($16)
- bis $31, $31, $8
- mulq $5, $5, $25
- umulh $5, $5, $21
- addq $22, $25, $22
- addq $23, $21, $23
- cmpult $22, $25, $28
- cmpult $23, $21, $20
- addq $23, $28, $23
- addq $8, $20, $8
- mulq $6, $4, $27
- umulh $6, $4, $19
- cmplt $27, $31, $24
- cmplt $19, $31, $18
- addq $27, $27, $27
- addq $19, $19, $19
- addq $19, $24, $19
- addq $8, $18, $8
- addq $22, $27, $22
- addq $23, $19, $23
- cmpult $22, $27, $17
- cmpult $23, $19, $25
- addq $23, $17, $23
- addq $8, $25, $8
- mulq $7, $3, $21
- umulh $7, $3, $28
- cmplt $21, $31, $20
- cmplt $28, $31, $24
- addq $21, $21, $21
- addq $28, $28, $28
- addq $28, $20, $28
- addq $8, $24, $8
- addq $22, $21, $22
- addq $23, $28, $23
- cmpult $22, $21, $18
- cmpult $23, $28, $27
- addq $23, $18, $23
- addq $8, $27, $8
- stq $22, 80($16)
- bis $31, $31, $22
- mulq $6, $5, $19
- umulh $6, $5, $17
- cmplt $19, $31, $25
- cmplt $17, $31, $20
- addq $19, $19, $19
- addq $17, $17, $17
- addq $17, $25, $17
- addq $22, $20, $22
- addq $23, $19, $23
- addq $8, $17, $8
- cmpult $23, $19, $24
- cmpult $8, $17, $21
- addq $8, $24, $8
- addq $22, $21, $22
- mulq $7, $4, $28
- umulh $7, $4, $18
- cmplt $28, $31, $27
- cmplt $18, $31, $25
- addq $28, $28, $28
- addq $18, $18, $18
- addq $18, $27, $18
- addq $22, $25, $22
- addq $23, $28, $23
- addq $8, $18, $8
- cmpult $23, $28, $20
- cmpult $8, $18, $19
- addq $8, $20, $8
- addq $22, $19, $22
- stq $23, 88($16)
- bis $31, $31, $23
- mulq $6, $6, $17
- umulh $6, $6, $24
- addq $8, $17, $8
- addq $22, $24, $22
- cmpult $8, $17, $21
- cmpult $22, $24, $27
- addq $22, $21, $22
- addq $23, $27, $23
- mulq $7, $5, $25
- umulh $7, $5, $28
- cmplt $25, $31, $18
- cmplt $28, $31, $20
- addq $25, $25, $25
- addq $28, $28, $28
- addq $28, $18, $28
- addq $23, $20, $23
- addq $8, $25, $8
- addq $22, $28, $22
- cmpult $8, $25, $19
- cmpult $22, $28, $17
- addq $22, $19, $22
- addq $23, $17, $23
- stq $8, 96($16)
- bis $31, $31, $8
- mulq $7, $6, $24
- umulh $7, $6, $21
- cmplt $24, $31, $27
- cmplt $21, $31, $18
- addq $24, $24, $24
- addq $21, $21, $21
- addq $21, $27, $21
- addq $8, $18, $8
- addq $22, $24, $22
- addq $23, $21, $23
- cmpult $22, $24, $20
- cmpult $23, $21, $25
- addq $23, $20, $23
- addq $8, $25, $8
- stq $22, 104($16)
- bis $31, $31, $22
- mulq $7, $7, $28
- umulh $7, $7, $19
- addq $23, $28, $23
- addq $8, $19, $8
- cmpult $23, $28, $17
- cmpult $8, $19, $27
- addq $8, $17, $8
- addq $22, $27, $22
- stq $23, 112($16)
- stq $8, 120($16)
- ret $31,($26),1
- .end bn_sqr_comba8
+++ /dev/null
- .text
- .align 3
- .globl bn_mul_comba4
- .ent bn_mul_comba4
-bn_mul_comba4:
-bn_mul_comba4..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- ldq $0, 0($17)
- ldq $1, 0($18)
- ldq $2, 8($17)
- ldq $3, 8($18)
- ldq $4, 16($17)
- ldq $5, 16($18)
- ldq $6, 24($17)
- ldq $7, 24($18)
- bis $31, $31, $23
- mulq $0, $1, $8
- umulh $0, $1, $22
- stq $8, 0($16)
- bis $31, $31, $8
- mulq $0, $3, $24
- umulh $0, $3, $25
- addq $22, $24, $22
- cmpult $22, $24, $27
- addq $27, $25, $25
- addq $23, $25, $23
- cmpult $23, $25, $28
- addq $8, $28, $8
- mulq $2, $1, $21
- umulh $2, $1, $20
- addq $22, $21, $22
- cmpult $22, $21, $19
- addq $19, $20, $20
- addq $23, $20, $23
- cmpult $23, $20, $17
- addq $8, $17, $8
- stq $22, 8($16)
- bis $31, $31, $22
- mulq $2, $3, $18
- umulh $2, $3, $24
- addq $23, $18, $23
- cmpult $23, $18, $27
- addq $27, $24, $24
- addq $8, $24, $8
- cmpult $8, $24, $25
- addq $22, $25, $22
- mulq $0, $5, $28
- umulh $0, $5, $21
- addq $23, $28, $23
- cmpult $23, $28, $19
- addq $19, $21, $21
- addq $8, $21, $8
- cmpult $8, $21, $20
- addq $22, $20, $22
- mulq $4, $1, $17
- umulh $4, $1, $18
- addq $23, $17, $23
- cmpult $23, $17, $27
- addq $27, $18, $18
- addq $8, $18, $8
- cmpult $8, $18, $24
- addq $22, $24, $22
- stq $23, 16($16)
- bis $31, $31, $23
- mulq $0, $7, $25
- umulh $0, $7, $28
- addq $8, $25, $8
- cmpult $8, $25, $19
- addq $19, $28, $28
- addq $22, $28, $22
- cmpult $22, $28, $21
- addq $23, $21, $23
- mulq $2, $5, $20
- umulh $2, $5, $17
- addq $8, $20, $8
- cmpult $8, $20, $27
- addq $27, $17, $17
- addq $22, $17, $22
- cmpult $22, $17, $18
- addq $23, $18, $23
- mulq $4, $3, $24
- umulh $4, $3, $25
- addq $8, $24, $8
- cmpult $8, $24, $19
- addq $19, $25, $25
- addq $22, $25, $22
- cmpult $22, $25, $28
- addq $23, $28, $23
- mulq $6, $1, $21
- umulh $6, $1, $0
- addq $8, $21, $8
- cmpult $8, $21, $20
- addq $20, $0, $0
- addq $22, $0, $22
- cmpult $22, $0, $27
- addq $23, $27, $23
- stq $8, 24($16)
- bis $31, $31, $8
- mulq $2, $7, $17
- umulh $2, $7, $18
- addq $22, $17, $22
- cmpult $22, $17, $24
- addq $24, $18, $18
- addq $23, $18, $23
- cmpult $23, $18, $19
- addq $8, $19, $8
- mulq $4, $5, $25
- umulh $4, $5, $28
- addq $22, $25, $22
- cmpult $22, $25, $21
- addq $21, $28, $28
- addq $23, $28, $23
- cmpult $23, $28, $20
- addq $8, $20, $8
- mulq $6, $3, $0
- umulh $6, $3, $27
- addq $22, $0, $22
- cmpult $22, $0, $1
- addq $1, $27, $27
- addq $23, $27, $23
- cmpult $23, $27, $17
- addq $8, $17, $8
- stq $22, 32($16)
- bis $31, $31, $22
- mulq $4, $7, $24
- umulh $4, $7, $18
- addq $23, $24, $23
- cmpult $23, $24, $19
- addq $19, $18, $18
- addq $8, $18, $8
- cmpult $8, $18, $2
- addq $22, $2, $22
- mulq $6, $5, $25
- umulh $6, $5, $21
- addq $23, $25, $23
- cmpult $23, $25, $28
- addq $28, $21, $21
- addq $8, $21, $8
- cmpult $8, $21, $20
- addq $22, $20, $22
- stq $23, 40($16)
- bis $31, $31, $23
- mulq $6, $7, $0
- umulh $6, $7, $1
- addq $8, $0, $8
- cmpult $8, $0, $27
- addq $27, $1, $1
- addq $22, $1, $22
- cmpult $22, $1, $17
- addq $23, $17, $23
- stq $8, 48($16)
- stq $22, 56($16)
- ret $31,($26),1
- .end bn_mul_comba4
- .text
- .align 3
- .globl bn_mul_comba8
- .ent bn_mul_comba8
-bn_mul_comba8:
-bn_mul_comba8..ng:
- .frame $30,0,$26,0
- .prologue 0
-
- stq $9, 8($30)
- stq $10, 16($30)
- ldq $0, 0($17)
- ldq $1, 0($18)
- ldq $2, 8($17)
- ldq $3, 8($18)
- ldq $4, 16($17)
- ldq $5, 16($18)
- ldq $6, 24($17)
- ldq $7, 24($18)
- ldq $8, 8($17)
- ldq $22, 8($18)
- ldq $23, 8($17)
- ldq $24, 8($18)
- ldq $25, 8($17)
- ldq $27, 8($18)
- ldq $28, 8($17)
- ldq $21, 8($18)
- bis $31, $31, $9
- mulq $0, $1, $20
- umulh $0, $1, $19
- stq $20, 0($16)
- bis $31, $31, $20
- mulq $0, $3, $10
- umulh $0, $3, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $10
- addq $20, $10, $20
- mulq $2, $1, $18
- umulh $2, $1, $17
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $10, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $20, $18, $20
- stq $19, 8($16)
- bis $31, $31, $19
- mulq $0, $5, $10
- umulh $0, $5, $17
- addq $9, $10, $9
- cmpult $9, $10, $18
- addq $18, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- mulq $2, $3, $18
- umulh $2, $3, $17
- addq $9, $18, $9
- cmpult $9, $18, $10
- addq $10, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $18
- addq $19, $18, $19
- mulq $4, $1, $10
- umulh $4, $1, $17
- addq $9, $10, $9
- cmpult $9, $10, $18
- addq $18, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- stq $9, 16($16)
- bis $31, $31, $9
- mulq $0, $7, $18
- umulh $0, $7, $17
- addq $20, $18, $20
- cmpult $20, $18, $10
- addq $10, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $9, $18, $9
- mulq $2, $5, $10
- umulh $2, $5, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- mulq $4, $3, $18
- umulh $4, $3, $17
- addq $20, $18, $20
- cmpult $20, $18, $10
- addq $10, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $9, $18, $9
- mulq $6, $1, $10
- umulh $6, $1, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- stq $20, 24($16)
- bis $31, $31, $20
- mulq $0, $22, $18
- umulh $0, $22, $17
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $10, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $20, $18, $20
- mulq $2, $7, $10
- umulh $2, $7, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $10
- addq $20, $10, $20
- mulq $4, $5, $18
- umulh $4, $5, $17
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $10, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $20, $18, $20
- mulq $6, $3, $10
- umulh $6, $3, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $10
- addq $20, $10, $20
- mulq $8, $1, $18
- umulh $8, $1, $17
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $10, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $20, $18, $20
- stq $19, 32($16)
- bis $31, $31, $19
- mulq $0, $24, $10
- umulh $0, $24, $17
- addq $9, $10, $9
- cmpult $9, $10, $18
- addq $18, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- mulq $2, $22, $18
- umulh $2, $22, $17
- addq $9, $18, $9
- cmpult $9, $18, $10
- addq $10, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $18
- addq $19, $18, $19
- mulq $4, $7, $10
- umulh $4, $7, $17
- addq $9, $10, $9
- cmpult $9, $10, $18
- addq $18, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- mulq $6, $5, $18
- umulh $6, $5, $17
- addq $9, $18, $9
- cmpult $9, $18, $10
- addq $10, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $18
- addq $19, $18, $19
- mulq $8, $3, $10
- umulh $8, $3, $17
- addq $9, $10, $9
- cmpult $9, $10, $18
- addq $18, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- mulq $23, $1, $18
- umulh $23, $1, $17
- addq $9, $18, $9
- cmpult $9, $18, $10
- addq $10, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $18
- addq $19, $18, $19
- stq $9, 40($16)
- bis $31, $31, $9
- mulq $0, $27, $10
- umulh $0, $27, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- mulq $2, $24, $18
- umulh $2, $24, $17
- addq $20, $18, $20
- cmpult $20, $18, $10
- addq $10, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $9, $18, $9
- mulq $4, $22, $10
- umulh $4, $22, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- mulq $6, $7, $18
- umulh $6, $7, $17
- addq $20, $18, $20
- cmpult $20, $18, $10
- addq $10, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $9, $18, $9
- mulq $8, $5, $10
- umulh $8, $5, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- mulq $23, $3, $18
- umulh $23, $3, $17
- addq $20, $18, $20
- cmpult $20, $18, $10
- addq $10, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $9, $18, $9
- mulq $25, $1, $10
- umulh $25, $1, $17
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $18, $17, $17
- addq $19, $17, $19
- cmpult $19, $17, $10
- addq $9, $10, $9
- stq $20, 48($16)
- bis $31, $31, $20
- mulq $0, $21, $18
- umulh $0, $21, $17
- addq $19, $18, $19
- cmpult $19, $18, $10
- addq $10, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $20, $18, $20
- mulq $2, $27, $10
- umulh $2, $27, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $20, $0, $20
- mulq $4, $24, $10
- umulh $4, $24, $18
- addq $19, $10, $19
- cmpult $19, $10, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $20, $0, $20
- mulq $6, $22, $10
- umulh $6, $22, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $20, $0, $20
- mulq $8, $7, $10
- umulh $8, $7, $18
- addq $19, $10, $19
- cmpult $19, $10, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $20, $0, $20
- mulq $23, $5, $10
- umulh $23, $5, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $20, $0, $20
- mulq $25, $3, $10
- umulh $25, $3, $18
- addq $19, $10, $19
- cmpult $19, $10, $17
- addq $17, $18, $18
- addq $9, $18, $9
- cmpult $9, $18, $0
- addq $20, $0, $20
- mulq $28, $1, $10
- umulh $28, $1, $17
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $0
- addq $20, $0, $20
- stq $19, 56($16)
- bis $31, $31, $19
- mulq $2, $21, $10
- umulh $2, $21, $18
- addq $9, $10, $9
- cmpult $9, $10, $17
- addq $17, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $0
- addq $19, $0, $19
- mulq $4, $27, $1
- umulh $4, $27, $10
- addq $9, $1, $9
- cmpult $9, $1, $17
- addq $17, $10, $10
- addq $20, $10, $20
- cmpult $20, $10, $18
- addq $19, $18, $19
- mulq $6, $24, $0
- umulh $6, $24, $2
- addq $9, $0, $9
- cmpult $9, $0, $1
- addq $1, $2, $2
- addq $20, $2, $20
- cmpult $20, $2, $17
- addq $19, $17, $19
- mulq $8, $22, $10
- umulh $8, $22, $18
- addq $9, $10, $9
- cmpult $9, $10, $0
- addq $0, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $1
- addq $19, $1, $19
- mulq $23, $7, $2
- umulh $23, $7, $17
- addq $9, $2, $9
- cmpult $9, $2, $10
- addq $10, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $0
- addq $19, $0, $19
- mulq $25, $5, $18
- umulh $25, $5, $1
- addq $9, $18, $9
- cmpult $9, $18, $2
- addq $2, $1, $1
- addq $20, $1, $20
- cmpult $20, $1, $10
- addq $19, $10, $19
- mulq $28, $3, $17
- umulh $28, $3, $0
- addq $9, $17, $9
- cmpult $9, $17, $18
- addq $18, $0, $0
- addq $20, $0, $20
- cmpult $20, $0, $2
- addq $19, $2, $19
- stq $9, 64($16)
- bis $31, $31, $9
- mulq $4, $21, $1
- umulh $4, $21, $10
- addq $20, $1, $20
- cmpult $20, $1, $17
- addq $17, $10, $10
- addq $19, $10, $19
- cmpult $19, $10, $18
- addq $9, $18, $9
- mulq $6, $27, $0
- umulh $6, $27, $2
- addq $20, $0, $20
- cmpult $20, $0, $3
- addq $3, $2, $2
- addq $19, $2, $19
- cmpult $19, $2, $1
- addq $9, $1, $9
- mulq $8, $24, $17
- umulh $8, $24, $10
- addq $20, $17, $20
- cmpult $20, $17, $18
- addq $18, $10, $10
- addq $19, $10, $19
- cmpult $19, $10, $4
- addq $9, $4, $9
- mulq $23, $22, $0
- umulh $23, $22, $3
- addq $20, $0, $20
- cmpult $20, $0, $2
- addq $2, $3, $3
- addq $19, $3, $19
- cmpult $19, $3, $1
- addq $9, $1, $9
- mulq $25, $7, $17
- umulh $25, $7, $18
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $10, $18, $18
- addq $19, $18, $19
- cmpult $19, $18, $4
- addq $9, $4, $9
- mulq $28, $5, $0
- umulh $28, $5, $2
- addq $20, $0, $20
- cmpult $20, $0, $3
- addq $3, $2, $2
- addq $19, $2, $19
- cmpult $19, $2, $1
- addq $9, $1, $9
- stq $20, 72($16)
- bis $31, $31, $20
- mulq $6, $21, $17
- umulh $6, $21, $10
- addq $19, $17, $19
- cmpult $19, $17, $18
- addq $18, $10, $10
- addq $9, $10, $9
- cmpult $9, $10, $4
- addq $20, $4, $20
- mulq $8, $27, $0
- umulh $8, $27, $3
- addq $19, $0, $19
- cmpult $19, $0, $2
- addq $2, $3, $3
- addq $9, $3, $9
- cmpult $9, $3, $1
- addq $20, $1, $20
- mulq $23, $24, $5
- umulh $23, $24, $17
- addq $19, $5, $19
- cmpult $19, $5, $18
- addq $18, $17, $17
- addq $9, $17, $9
- cmpult $9, $17, $10
- addq $20, $10, $20
- mulq $25, $22, $4
- umulh $25, $22, $6
- addq $19, $4, $19
- cmpult $19, $4, $0
- addq $0, $6, $6
- addq $9, $6, $9
- cmpult $9, $6, $2
- addq $20, $2, $20
- mulq $28, $7, $3
- umulh $28, $7, $1
- addq $19, $3, $19
- cmpult $19, $3, $5
- addq $5, $1, $1
- addq $9, $1, $9
- cmpult $9, $1, $18
- addq $20, $18, $20
- stq $19, 80($16)
- bis $31, $31, $19
- mulq $8, $21, $17
- umulh $8, $21, $10
- addq $9, $17, $9
- cmpult $9, $17, $4
- addq $4, $10, $10
- addq $20, $10, $20
- cmpult $20, $10, $0
- addq $19, $0, $19
- mulq $23, $27, $6
- umulh $23, $27, $2
- addq $9, $6, $9
- cmpult $9, $6, $3
- addq $3, $2, $2
- addq $20, $2, $20
- cmpult $20, $2, $5
- addq $19, $5, $19
- mulq $25, $24, $1
- umulh $25, $24, $18
- addq $9, $1, $9
- cmpult $9, $1, $7
- addq $7, $18, $18
- addq $20, $18, $20
- cmpult $20, $18, $17
- addq $19, $17, $19
- mulq $28, $22, $4
- umulh $28, $22, $10
- addq $9, $4, $9
- cmpult $9, $4, $0
- addq $0, $10, $10
- addq $20, $10, $20
- cmpult $20, $10, $8
- addq $19, $8, $19
- stq $9, 88($16)
- bis $31, $31, $9
- mulq $23, $21, $6
- umulh $23, $21, $3
- addq $20, $6, $20
- cmpult $20, $6, $2
- addq $2, $3, $3
- addq $19, $3, $19
- cmpult $19, $3, $5
- addq $9, $5, $9
- mulq $25, $27, $1
- umulh $25, $27, $7
- addq $20, $1, $20
- cmpult $20, $1, $18
- addq $18, $7, $7
- addq $19, $7, $19
- cmpult $19, $7, $17
- addq $9, $17, $9
- mulq $28, $24, $4
- umulh $28, $24, $0
- addq $20, $4, $20
- cmpult $20, $4, $10
- addq $10, $0, $0
- addq $19, $0, $19
- cmpult $19, $0, $8
- addq $9, $8, $9
- stq $20, 96($16)
- bis $31, $31, $20
- mulq $25, $21, $22
- umulh $25, $21, $6
- addq $19, $22, $19
- cmpult $19, $22, $2
- addq $2, $6, $6
- addq $9, $6, $9
- cmpult $9, $6, $3
- addq $20, $3, $20
- mulq $28, $27, $5
- umulh $28, $27, $23
- addq $19, $5, $19
- cmpult $19, $5, $1
- addq $1, $23, $23
- addq $9, $23, $9
- cmpult $9, $23, $18
- addq $20, $18, $20
- stq $19, 104($16)
- bis $31, $31, $19
- mulq $28, $21, $7
- umulh $28, $21, $17
- addq $9, $7, $9
- cmpult $9, $7, $4
- addq $4, $17, $17
- addq $20, $17, $20
- cmpult $20, $17, $10
- addq $19, $10, $19
- stq $9, 112($16)
- stq $20, 120($16)
- ldq $9, 8($30)
- ldq $10, 16($30)
- ret $31,($26),1
- .end bn_mul_comba8
require("x86/sub.pl");
require("x86/comba.pl");
-&asm_init($ARGV[0],"bn-586.pl");
+&asm_init($ARGV[0],$0);
&bn_mul_add_words("bn_mul_add_words");
&bn_mul_words("bn_mul_words");
+++ /dev/null
-begin 640 x86nt32.obj
-M3`$"`/H&DC-6`@``"P`````````N=&5X=```````````````\@$``&0`````
-M```````````````@`#!@+F1A=&$```#R`0````````````!6`@``````````
-M````````0``PP%535E>+?"04BUPD&(M,)"`S]HML)!S![0)T7(L#]^$#!X/2
-M``/&@](`B0>+\HM#!/?A`T<$@](``\:#T@")1P2+\HM#"/?A`T<(@](``\:#
-MT@")1PB+\HM##/?A`T<,@](``\:#T@")1PR+\H/#$(/'$$UT`NNDBVPD'(/E
-M`W1"BP/WX0,'@](``\:#T@")!XOR370MBT,$]^$#1P2#T@`#QH/2`(E'!(OR
-M3705BT,(]^$#1PB#T@`#QH/2`(E'"(ORB\9?7EM=PU535E>+?"04BUPD&(ML
-M)!R+3"0@,_;![0)T18L#]^$#QH/2`(D'B_*+0P3WX0/&@](`B4<$B_*+0PCW
-MX0/&@](`B4<(B_*+0PSWX0/&@](`B4<,B_*#PQ"#QQ!-=`+KNXML)!R#Y0-T
-M,8L#]^$#QH/2`(D'B_)-="&+0P3WX0/&@](`B4<$B_)-=`^+0PCWX0/&@](`
-MB4<(B_*+QE]>6UW#4U97BW0D$(M\)!2+7"08P>L"=#6+!_?@B0:)5@2+1P3W
-MX(E&"(E6#(M'"/?@B480B584BT<,]^")1AB)5AR#QQ"#QB!+=`+KRXM<)!B#
-MXP-T)8L']^")!HE6!$MT&8M'!/?@B48(B58,2W0+BT<(]^")1A")5A1?7EO#
-MBU0D!(M$)`CW="0,PRYF:6QE`````````/[_``!G`BY<8W)Y<'1O7&)N7&%S
-M;5QX.#9N=#,R+F%S;0```````````"YT97AT``````````$````#`?(!````
-M`````````````````"YD871A``````````(````#`0``````````````````
-M```````````$``````````$`(``"```````5````R0````$`(``"```````B
-M````:@$```$`(``"```````P````Y0$```$`(``"`#H```!?8FY?;75L7V%D
-L9%]W;W)D`%]B;E]M=6Q?=V]R9`!?8FY?<W%R7W=O<F1S`%]B;E]D:78V-```
-`
-end
+++ /dev/null
-begin 640 x86w16.obj
-M@!P`&BY<8W)Y<'1O7&)N7&%S;5QX.#9W,38N87-MQY8U```$7T)34P5?1$%4
-M009$1U)/55`&1E]415A4!4-/3E-4`T)34P5#3TY35`1$051!!$-/1$5EF`<`
-M2/`!!0H!&)@'`$@```,)`0R8!P!(```&"`$*F`<`2````@<!#YH(``3_`O\#
-M_P14D$4```$-7V)N7W-Q<E]W;W)D<U4!``E?8FY?9&EV-C3B`0`07V)N7VUU
-M;%]A9&1?=V]R9`````Q?8FY?;75L7W=O<F3<``#`B`0``*(!T:#T`0$``%53
-M5E<>!HOL,_:+?A".7A*+7A2.1A:+3AJ+;AC1[='M=&"+P2;W)P,%@](`$\:#
-MT@")!8ORB\$F]V<"`T4"@](`$\:#T@")10*+\HO!)O=G!`-%!(/2`!/&@](`
-MB44$B_*+P2;W9P8#10:#T@`3QH/2`(E%!HOR@\,(@\<(370"ZZ"+[(MN&(/E
-M`TUX18O!)O<G`P6#T@`3QH/2`(D%B_)->"^+P2;W9P(#10*#T@`3QH/2`(E%
-M`HOR37@6B\$F]V<$`T4$@](`$\:#T@")102+\HO&!Q]?7EM=RY!54U97'@8S
-M]HOLBWX0CEX2BUX4CD86BTX:BVX8B\$F]R<#QH/2`(D%B_)-=$*+P2;W9P(#
-MQH/2`(E%`HOR370OB\$F]V<$`\:#T@")102+\DUT'(O!)O=G!@/&@](`B44&
-MB_)-=`F#PPB#QPCKKI"+Q@<?7UY;7<N055-65QX&B^R+=A".7A*+?A2.1A:+
-M7AB+Z]'KT>MT.2:+!??@B02)5`(FBT4"]^")1`2)5`8FBT4$]^")1`B)5`HF
-MBT4&]^")1`R)5`Z#QPB#QA!+=`+KQX/E`TUX*":+!??@B02)5`)->!LFBT4"
-M]^")1`2)5`9->`PFBT4$]^")1`B)5`H''U]>6UW+58OLBU8&BT8(]W8*7<NZ
-%B@(``'0`
-`
-end
+++ /dev/null
-begin 640 x86w32.obj
-M@!P`&BY<8W)Y<'1O7&)N7&%S;5QX.#9W,S(N87-MR98U```$7T)34P5?1$%4
-M009$1U)/55`&1E]415A4!4-/3E-4`T)34P5#3TY35`1$051!!$-/1$5EF`<`
-M2(`"!0H!AY@'`$@```,)`0R8!P!(```&"`$*F`<`2````@<!#YH(``3_`O\#
-M_P14D$4```$-7V)N7W-Q<E]W;W)D<[\!``E?8FY?9&EV-C1H`@`07V)N7VUU
-M;%]A9&1?=V]R9`````Q?8FY?;75L7W=O<F0B`0"(B`0``*(!T:"$`@$``%53
-M9E97'@:+[&8S]HM^$HY>%(M>%HY&&&:+3AR+;AK1[='M#X2``&:+P68F]R=F
-M`P5F@](`9A/&9H/2`&:)!6:+\F:+P68F]V<$9@-%!&:#T@!F$\9F@](`9HE%
-M!&:+\F:+P68F]V<(9@-%"&:#T@!F$\9F@](`9HE%"&:+\F:+P68F]V<,9@-%
-M#&:#T@!F$\9F@](`9HE%#&:+\H/#$(/'$$UT`NN`B^R+;AJ#Y0-->%UFB\%F
-M)O<G9@,%9H/2`&83QF:#T@!FB05FB_)->#]FB\%F)O=G!&8#101F@](`9A/&
-M9H/2`&:)101FB_)->!YFB\%F)O=G"&8#10AF@](`9A/&9H/2`&:)10AFB_)F
-MB\9FB]9FP>H0!Q]?9EY;7<N055-F5E<>!F8S]HOLBWX2CEX4BUX6CD889HM.
-M'(MN&F:+P68F]R=F`\9F@](`9HD%9HOR37149HO!9B;W9P1F`\9F@](`9HE%
-M!&:+\DUT.V:+P68F]V<(9@/&9H/2`&:)10AFB_)-=")FB\%F)O=G#&8#QF:#
-MT@!FB44,9HOR370)@\,0@\<0ZY:09HO&9HO69L'J$`<?7V9>6UW+D%535E<>
-M!HOLBW80CEX2BWX4CD86BUX8B^O1Z]'K=$EF)HL%9O?@9HD$9HE4!&8FBT4$
-M9O?@9HE$"&:)5`QF)HM%"&;WX&:)1!!FB5049B:+10QF]^!FB4089HE4'(/'
-M$(/&($MT`NNW@^4#37@T9B:+!6;WX&:)!&:)5`1->"-F)HM%!&;WX&:)1`AF
-MB50,37@09B:+10AF]^!FB4009HE4%`<?7UY;7<M5B^QFBU8&9HM&"F;W=@YF
-.B]!FP>H07<O`B@(``'0`
-`
-end