summaryrefslogtreecommitdiff
path: root/app/openssl/crypto/bf
diff options
context:
space:
mode:
authorParménides GV <parmegv@sdf.org>2014-06-13 12:21:38 +0200
committerParménides GV <parmegv@sdf.org>2014-06-13 12:21:38 +0200
commitd09435a9d2adb0d0cafa0b1f9f6cfe326346cbc6 (patch)
tree946278cd836aa6af639675f4eb5ec7378d783dd8 /app/openssl/crypto/bf
parent5d47fde84a03acb1201f8650c55dc0cd981df679 (diff)
parent3a71bc9e4aa4296f460e2e3c55de74c9852477ad (diff)
Merge branch 'develop' into release-0.5.2
Diffstat (limited to 'app/openssl/crypto/bf')
-rw-r--r--app/openssl/crypto/bf/asm/bf-586.S896
-rw-r--r--app/openssl/crypto/bf/bf_skey.c8
-rw-r--r--app/openssl/crypto/bf/blowfish.h4
3 files changed, 907 insertions, 1 deletions
diff --git a/app/openssl/crypto/bf/asm/bf-586.S b/app/openssl/crypto/bf/asm/bf-586.S
new file mode 100644
index 00000000..aa718d40
--- /dev/null
+++ b/app/openssl/crypto/bf/asm/bf-586.S
@@ -0,0 +1,896 @@
+.file "bf-586.s"
+.text
+.globl BF_encrypt
+.type BF_encrypt,@function
+.align 16
+BF_encrypt:
+.L_BF_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ xorl %eax,%eax
+ movl (%ebp),%ebx
+ xorl %ecx,%ecx
+ xorl %ebx,%edi
+
+
+ movl 4(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 8(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 12(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 16(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 20(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 24(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 28(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 32(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 36(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 40(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 44(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 48(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 52(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 56(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 60(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 64(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+
+ movl 20(%esp),%eax
+ xorl %ebx,%edi
+ movl 68(%ebp),%edx
+ xorl %edx,%esi
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size BF_encrypt,.-.L_BF_encrypt_begin
+.globl BF_decrypt
+.type BF_decrypt,@function
+.align 16
+BF_decrypt:
+.L_BF_decrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ xorl %eax,%eax
+ movl 68(%ebp),%ebx
+ xorl %ecx,%ecx
+ xorl %ebx,%edi
+
+
+ movl 64(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 60(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 56(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 52(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 48(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 44(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 40(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 36(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 32(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 28(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 24(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 20(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 16(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 12(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%edi
+
+
+ movl 8(%ebp),%edx
+ movl %edi,%ebx
+ xorl %edx,%esi
+ shrl $16,%ebx
+ movl %edi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+ xorl %eax,%eax
+ xorl %ebx,%esi
+
+
+ movl 4(%ebp),%edx
+ movl %esi,%ebx
+ xorl %edx,%edi
+ shrl $16,%ebx
+ movl %esi,%edx
+ movb %bh,%al
+ andl $255,%ebx
+ movb %dh,%cl
+ andl $255,%edx
+ movl 72(%ebp,%eax,4),%eax
+ movl 1096(%ebp,%ebx,4),%ebx
+ addl %eax,%ebx
+ movl 2120(%ebp,%ecx,4),%eax
+ xorl %eax,%ebx
+ movl 3144(%ebp,%edx,4),%edx
+ addl %edx,%ebx
+
+ movl 20(%esp),%eax
+ xorl %ebx,%edi
+ movl (%ebp),%edx
+ xorl %edx,%esi
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size BF_decrypt,.-.L_BF_decrypt_begin
+.globl BF_cbc_encrypt
+.type BF_cbc_encrypt,@function
+.align 16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 56(%esp),%ecx
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L000decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz .L001encrypt_finish
+.L002encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L002encrypt_loop
+.L001encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L003finish
+ call .L004PIC_point
+.L004PIC_point:
+ popl %edx
+ leal .L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L006ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L007ej6:
+ movb 5(%esi),%dh
+.L008ej5:
+ movb 4(%esi),%dl
+.L009ej4:
+ movl (%esi),%ecx
+ jmp .L010ejend
+.L011ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L012ej2:
+ movb 1(%esi),%ch
+.L013ej1:
+ movb (%esi),%cl
+.L010ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L003finish
+.L000decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz .L014decrypt_finish
+.L015decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L015decrypt_loop
+.L014decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L003finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L016dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L017dj6:
+ movb %dh,5(%edi)
+.L018dj5:
+ movb %dl,4(%edi)
+.L019dj4:
+ movl %ecx,(%edi)
+ jmp .L020djend
+.L021dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L022dj2:
+ movb %ch,1(%esi)
+.L023dj1:
+ movb %cl,(%esi)
+.L020djend:
+ jmp .L003finish
+.L003finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L005cbc_enc_jmp_table:
+.long 0
+.long .L013ej1-.L004PIC_point
+.long .L012ej2-.L004PIC_point
+.long .L011ej3-.L004PIC_point
+.long .L009ej4-.L004PIC_point
+.long .L008ej5-.L004PIC_point
+.long .L007ej6-.L004PIC_point
+.long .L006ej7-.L004PIC_point
+.align 64
+.size BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
diff --git a/app/openssl/crypto/bf/bf_skey.c b/app/openssl/crypto/bf/bf_skey.c
index 3673cdee..3b0bca41 100644
--- a/app/openssl/crypto/bf/bf_skey.c
+++ b/app/openssl/crypto/bf/bf_skey.c
@@ -58,11 +58,19 @@
#include <stdio.h>
#include <string.h>
+#include <openssl/crypto.h>
#include <openssl/blowfish.h>
#include "bf_locl.h"
#include "bf_pi.h"
void BF_set_key(BF_KEY *key, int len, const unsigned char *data)
+#ifdef OPENSSL_FIPS
+ {
+ fips_cipher_abort(BLOWFISH);
+ private_BF_set_key(key, len, data);
+ }
+void private_BF_set_key(BF_KEY *key, int len, const unsigned char *data)
+#endif
{
int i;
BF_LONG *p,ri,in[2];
diff --git a/app/openssl/crypto/bf/blowfish.h b/app/openssl/crypto/bf/blowfish.h
index b97e76f9..4b6c8920 100644
--- a/app/openssl/crypto/bf/blowfish.h
+++ b/app/openssl/crypto/bf/blowfish.h
@@ -104,7 +104,9 @@ typedef struct bf_key_st
BF_LONG S[4*256];
} BF_KEY;
-
+#ifdef OPENSSL_FIPS
+void private_BF_set_key(BF_KEY *key, int len, const unsigned char *data);
+#endif
void BF_set_key(BF_KEY *key, int len, const unsigned char *data);
void BF_encrypt(BF_LONG *data,const BF_KEY *key);