[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[SCM] GNU gnutls branch, master, updated. gnutls_3_0_4-31-g4ac9123
From: |
Nikos Mavrogiannopoulos |
Subject: |
[SCM] GNU gnutls branch, master, updated. gnutls_3_0_4-31-g4ac9123 |
Date: |
Tue, 25 Oct 2011 17:38:55 +0000 |
This is an automated email from the git hooks/post-receive script. It was
generated because a ref change was pushed to the repository containing
the project "GNU gnutls".
http://git.savannah.gnu.org/cgit/gnutls.git/commit/?id=4ac912339d7b33b25c64116d1b6f420dcdaa4a67
The branch, master has been updated
via 4ac912339d7b33b25c64116d1b6f420dcdaa4a67 (commit)
via e3e90b119bf04d8db44ee5aada6129654eab8440 (commit)
via f2c9ee62541384a17990e8280dba52832593b5d2 (commit)
from a416d288a60adcefb359f5dbdffcb61fd735eb5a (commit)
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
- Log -----------------------------------------------------------------
commit 4ac912339d7b33b25c64116d1b6f420dcdaa4a67
Author: Nikos Mavrogiannopoulos <address@hidden>
Date: Tue Oct 25 19:40:02 2011 +0200
more files to ignore
commit e3e90b119bf04d8db44ee5aada6129654eab8440
Author: Nikos Mavrogiannopoulos <address@hidden>
Date: Tue Oct 25 19:39:09 2011 +0200
Added COFF versions of assembly files.
commit f2c9ee62541384a17990e8280dba52832593b5d2
Author: Nikos Mavrogiannopoulos <address@hidden>
Date: Tue Oct 25 18:48:24 2011 +0200
Added Jan.
-----------------------------------------------------------------------
Summary of changes:
.gitignore | 9 +
THANKS | 1 +
configure.ac | 11 ++
lib/accelerated/x86/Makefile.am | 8 +
.../appro-aes-x86.s => coff/appro-aes-x86-coff.s} | 184 +++++++++-----------
.../{asm/padlock-x86.s => coff/padlock-x86-coff.s} | 134 +++++++--------
6 files changed, 171 insertions(+), 176 deletions(-)
copy lib/accelerated/x86/{asm/appro-aes-x86.s => coff/appro-aes-x86-coff.s}
(93%)
copy lib/accelerated/x86/{asm/padlock-x86.s => coff/padlock-x86-coff.s} (81%)
diff --git a/.gitignore b/.gitignore
index 53791c4..e26f700 100644
--- a/.gitignore
+++ b/.gitignore
@@ -532,3 +532,12 @@ tests/x509paths/
tests/x509self
tests/x509sign-verify
tests/x509signself
+gl/tests/arpa/
+gl/tests/test-accept
+gl/tests/test-bind
+gl/tests/test-getpeername
+gl/tests/test-listen
+gl/tests/test-recvfrom
+gl/tests/test-sendto
+gl/tests/test-setsockopt
+gl/tests/test-shutdown
diff --git a/THANKS b/THANKS
index c2c5f47..c50bb40 100644
--- a/THANKS
+++ b/THANKS
@@ -121,6 +121,7 @@ Jonathan Nieder *jrnider [at] gmail.com*
J. Cameijo Cerdeira *cerdeira [at] co.sapo.pt*
Benjamin Hof *benjamin.hof [at] stusta.net*
Vincent Untz *vuntz [at] gnome.org*
+Jan Misiak address@hidden
----------------------------------------------------------------------
Copying and distribution of this file, with or without modification,
diff --git a/configure.ac b/configure.ac
index d558994..c881664 100644
--- a/configure.ac
+++ b/configure.ac
@@ -68,6 +68,16 @@ fi
AM_CONDITIONAL(ENABLE_CXX, test "$use_cxx" != "no")
AM_CONDITIONAL(WANT_TEST_SUITE, [test -f tests/suite/mini-eagain2.c])
+dnl Detect windows build
+case "$host" in
+ *-*-mingw32*)
+ have_win32=yes
+ ;;
+ *)
+ ;;
+esac
+
+AM_CONDITIONAL(WIN32, test "$have_win32" = yes)
dnl Hardware Acceleration
AC_ARG_ENABLE(hardware-acceleration,
@@ -75,6 +85,7 @@ AC_ARG_ENABLE(hardware-acceleration,
use_accel=$enableval, use_accel=yes)
hw_accel=none
+
if test "$use_accel" != "no"; then
case $host_cpu in
i?86 | x86_64 | amd64)
diff --git a/lib/accelerated/x86/Makefile.am b/lib/accelerated/x86/Makefile.am
index 3530998..689f977 100644
--- a/lib/accelerated/x86/Makefile.am
+++ b/lib/accelerated/x86/Makefile.am
@@ -39,9 +39,17 @@ noinst_LTLIBRARIES = libx86.la
libx86_la_SOURCES = sha-padlock.c hmac-padlock.c aes-x86.c aes-padlock.c
aes-gcm-padlock.c aes-padlock.h aes-x86.h x86.h sha-padlock.h
if ASM_X86_64
+
AM_CFLAGS += -DASM_X86_64 -DASM_X86
libx86_la_SOURCES += asm/appro-aes-x86-64.s asm/appro-aes-gcm-x86-64.s
aes-gcm-x86.c asm/padlock-x86-64.s asm/cpuid-x86-64.s
+
else
+
AM_CFLAGS += -DASM_X86_32 -DASM_X86
+if WIN32
+libx86_la_SOURCES += coff/appro-aes-x86-coff.s coff/padlock-x86-coff.s
asm/cpuid-x86.s
+else
libx86_la_SOURCES += asm/appro-aes-x86.s asm/padlock-x86.s asm/cpuid-x86.s
endif
+
+endif
diff --git a/lib/accelerated/x86/asm/appro-aes-x86.s
b/lib/accelerated/x86/coff/appro-aes-x86-coff.s
similarity index 93%
copy from lib/accelerated/x86/asm/appro-aes-x86.s
copy to lib/accelerated/x86/coff/appro-aes-x86-coff.s
index b1ce9bc..2f9a6bc 100644
--- a/lib/accelerated/x86/asm/appro-aes-x86.s
+++ b/lib/accelerated/x86/coff/appro-aes-x86-coff.s
@@ -37,10 +37,10 @@
.file "aesni-x86.s"
.text
-.globl aesni_encrypt
-.type aesni_encrypt,@function
+.globl _aesni_encrypt
+.def _aesni_encrypt; .scl 2; .type 32; .endef
.align 16
-aesni_encrypt:
+_aesni_encrypt:
.L_aesni_encrypt_begin:
movl 4(%esp),%eax
movl 12(%esp),%edx
@@ -60,11 +60,10 @@ aesni_encrypt:
.byte 102,15,56,221,209
movups %xmm2,(%eax)
ret
-.size aesni_encrypt,.-.L_aesni_encrypt_begin
-.globl aesni_decrypt
-.type aesni_decrypt,@function
+.globl _aesni_decrypt
+.def _aesni_decrypt; .scl 2; .type 32; .endef
.align 16
-aesni_decrypt:
+_aesni_decrypt:
.L_aesni_decrypt_begin:
movl 4(%esp),%eax
movl 12(%esp),%edx
@@ -84,10 +83,9 @@ aesni_decrypt:
.byte 102,15,56,223,209
movups %xmm2,(%eax)
ret
-.size aesni_decrypt,.-.L_aesni_decrypt_begin
-.type _aesni_encrypt3,@function
+.def __aesni_encrypt3; .scl 3; .type 32; .endef
.align 16
-_aesni_encrypt3:
+__aesni_encrypt3:
movups (%edx),%xmm0
shrl $1,%ecx
movups 16(%edx),%xmm1
@@ -115,10 +113,9 @@ _aesni_encrypt3:
.byte 102,15,56,221,216
.byte 102,15,56,221,224
ret
-.size _aesni_encrypt3,.-_aesni_encrypt3
-.type _aesni_decrypt3,@function
+.def __aesni_decrypt3; .scl 3; .type 32; .endef
.align 16
-_aesni_decrypt3:
+__aesni_decrypt3:
movups (%edx),%xmm0
shrl $1,%ecx
movups 16(%edx),%xmm1
@@ -146,10 +143,9 @@ _aesni_decrypt3:
.byte 102,15,56,223,216
.byte 102,15,56,223,224
ret
-.size _aesni_decrypt3,.-_aesni_decrypt3
-.type _aesni_encrypt4,@function
+.def __aesni_encrypt4; .scl 3; .type 32; .endef
.align 16
-_aesni_encrypt4:
+__aesni_encrypt4:
movups (%edx),%xmm0
movups 16(%edx),%xmm1
shrl $1,%ecx
@@ -182,10 +178,9 @@ _aesni_encrypt4:
.byte 102,15,56,221,224
.byte 102,15,56,221,232
ret
-.size _aesni_encrypt4,.-_aesni_encrypt4
-.type _aesni_decrypt4,@function
+.def __aesni_decrypt4; .scl 3; .type 32; .endef
.align 16
-_aesni_decrypt4:
+__aesni_decrypt4:
movups (%edx),%xmm0
movups 16(%edx),%xmm1
shrl $1,%ecx
@@ -218,10 +213,9 @@ _aesni_decrypt4:
.byte 102,15,56,223,224
.byte 102,15,56,223,232
ret
-.size _aesni_decrypt4,.-_aesni_decrypt4
-.type _aesni_encrypt6,@function
+.def __aesni_encrypt6; .scl 3; .type 32; .endef
.align 16
-_aesni_encrypt6:
+__aesni_encrypt6:
movups (%edx),%xmm0
shrl $1,%ecx
movups 16(%edx),%xmm1
@@ -275,10 +269,9 @@ _aesni_encrypt6:
.byte 102,15,56,221,240
.byte 102,15,56,221,248
ret
-.size _aesni_encrypt6,.-_aesni_encrypt6
-.type _aesni_decrypt6,@function
+.def __aesni_decrypt6; .scl 3; .type 32; .endef
.align 16
-_aesni_decrypt6:
+__aesni_decrypt6:
movups (%edx),%xmm0
shrl $1,%ecx
movups 16(%edx),%xmm1
@@ -332,11 +325,10 @@ _aesni_decrypt6:
.byte 102,15,56,223,240
.byte 102,15,56,223,248
ret
-.size _aesni_decrypt6,.-_aesni_decrypt6
-.globl aesni_ecb_encrypt
-.type aesni_ecb_encrypt,@function
+.globl _aesni_ecb_encrypt
+.def _aesni_ecb_encrypt; .scl 2; .type 32; .endef
.align 16
-aesni_ecb_encrypt:
+_aesni_ecb_encrypt:
.L_aesni_ecb_encrypt_begin:
pushl %ebp
pushl %ebx
@@ -382,7 +374,7 @@ aesni_ecb_encrypt:
movdqu 80(%esi),%xmm7
leal 96(%esi),%esi
.L011ecb_enc_loop6_enter:
- call _aesni_encrypt6
+ call __aesni_encrypt6
movl %ebp,%edx
movl %ebx,%ecx
subl $96,%eax
@@ -409,7 +401,7 @@ aesni_ecb_encrypt:
je .L016ecb_enc_four
movups 64(%esi),%xmm6
xorps %xmm7,%xmm7
- call _aesni_encrypt6
+ call __aesni_encrypt6
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
@@ -434,20 +426,20 @@ aesni_ecb_encrypt:
.align 16
.L014ecb_enc_two:
xorps %xmm4,%xmm4
- call _aesni_encrypt3
+ call __aesni_encrypt3
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
jmp .L008ecb_ret
.align 16
.L015ecb_enc_three:
- call _aesni_encrypt3
+ call __aesni_encrypt3
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
jmp .L008ecb_ret
.align 16
.L016ecb_enc_four:
- call _aesni_encrypt4
+ call __aesni_encrypt4
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
@@ -485,7 +477,7 @@ aesni_ecb_encrypt:
movdqu 80(%esi),%xmm7
leal 96(%esi),%esi
.L019ecb_dec_loop6_enter:
- call _aesni_decrypt6
+ call __aesni_decrypt6
movl %ebp,%edx
movl %ebx,%ecx
subl $96,%eax
@@ -512,7 +504,7 @@ aesni_ecb_encrypt:
je .L024ecb_dec_four
movups 64(%esi),%xmm6
xorps %xmm7,%xmm7
- call _aesni_decrypt6
+ call __aesni_decrypt6
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
@@ -537,20 +529,20 @@ aesni_ecb_encrypt:
.align 16
.L022ecb_dec_two:
xorps %xmm4,%xmm4
- call _aesni_decrypt3
+ call __aesni_decrypt3
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
jmp .L008ecb_ret
.align 16
.L023ecb_dec_three:
- call _aesni_decrypt3
+ call __aesni_decrypt3
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
jmp .L008ecb_ret
.align 16
.L024ecb_dec_four:
- call _aesni_decrypt4
+ call __aesni_decrypt4
movups %xmm2,(%edi)
movups %xmm3,16(%edi)
movups %xmm4,32(%edi)
@@ -561,11 +553,10 @@ aesni_ecb_encrypt:
popl %ebx
popl %ebp
ret
-.size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
-.globl aesni_ccm64_encrypt_blocks
-.type aesni_ccm64_encrypt_blocks,@function
+.globl _aesni_ccm64_encrypt_blocks
+.def _aesni_ccm64_encrypt_blocks; .scl 2; .type 32; .endef
.align 16
-aesni_ccm64_encrypt_blocks:
+_aesni_ccm64_encrypt_blocks:
.L_aesni_ccm64_encrypt_blocks_begin:
pushl %ebp
pushl %ebx
@@ -596,9 +587,10 @@ aesni_ccm64_encrypt_blocks:
movl %ebp,28(%esp)
shrl $1,%ecx
leal (%edx),%ebp
+ movdqa (%esp),%xmm5
movdqa %xmm7,%xmm2
movl %ecx,%ebx
- movdqa (%esp),%xmm5
+.byte 102,15,56,0,253
.L026ccm64_enc_outer:
movups (%ebp),%xmm0
movl %ebx,%ecx
@@ -619,7 +611,6 @@ aesni_ccm64_encrypt_blocks:
.byte 102,15,56,220,216
movups (%edx),%xmm0
jnz .L027ccm64_enc2_loop
-.byte 102,15,56,0,253
.byte 102,15,56,220,209
.byte 102,15,56,220,217
paddq 16(%esp),%xmm7
@@ -631,7 +622,7 @@ aesni_ccm64_encrypt_blocks:
movdqa %xmm7,%xmm2
movups %xmm6,(%edi)
leal 16(%edi),%edi
-.byte 102,15,56,0,253
+.byte 102,15,56,0,213
jnz .L026ccm64_enc_outer
movl 48(%esp),%esp
movl 40(%esp),%edi
@@ -641,11 +632,10 @@ aesni_ccm64_encrypt_blocks:
popl %ebx
popl %ebp
ret
-.size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
-.globl aesni_ccm64_decrypt_blocks
-.type aesni_ccm64_decrypt_blocks,@function
+.globl _aesni_ccm64_decrypt_blocks
+.def _aesni_ccm64_decrypt_blocks; .scl 2; .type 32; .endef
.align 16
-aesni_ccm64_decrypt_blocks:
+_aesni_ccm64_decrypt_blocks:
.L_aesni_ccm64_decrypt_blocks_begin:
pushl %ebp
pushl %ebx
@@ -692,7 +682,6 @@ aesni_ccm64_decrypt_blocks:
.byte 102,15,56,221,209
movups (%esi),%xmm6
paddq 16(%esp),%xmm7
-.byte 102,15,56,0,253
leal 16(%esi),%esi
jmp .L029ccm64_dec_outer
.align 16
@@ -702,6 +691,7 @@ aesni_ccm64_decrypt_blocks:
movl %ebx,%ecx
movups %xmm6,(%edi)
leal 16(%edi),%edi
+.byte 102,15,56,0,213
subl $1,%eax
jz .L030ccm64_dec_break
movups (%ebp),%xmm0
@@ -726,7 +716,6 @@ aesni_ccm64_decrypt_blocks:
paddq 16(%esp),%xmm7
.byte 102,15,56,220,209
.byte 102,15,56,220,217
-.byte 102,15,56,0,253
leal 16(%esi),%esi
.byte 102,15,56,221,208
.byte 102,15,56,221,216
@@ -754,11 +743,10 @@ aesni_ccm64_decrypt_blocks:
popl %ebx
popl %ebp
ret
-.size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
-.globl aesni_ctr32_encrypt_blocks
-.type aesni_ctr32_encrypt_blocks,@function
+.globl _aesni_ctr32_encrypt_blocks
+.def _aesni_ctr32_encrypt_blocks; .scl 2; .type 32; .endef
.align 16
-aesni_ctr32_encrypt_blocks:
+_aesni_ctr32_encrypt_blocks:
.L_aesni_ctr32_encrypt_blocks_begin:
pushl %ebp
pushl %ebx
@@ -903,7 +891,7 @@ aesni_ctr32_encrypt_blocks:
por %xmm7,%xmm5
je .L040ctr32_four
por %xmm7,%xmm6
- call _aesni_encrypt6
+ call __aesni_encrypt6
movups (%esi),%xmm1
movups 16(%esi),%xmm0
xorps %xmm1,%xmm2
@@ -942,7 +930,7 @@ aesni_ctr32_encrypt_blocks:
jmp .L036ctr32_ret
.align 16
.L038ctr32_two:
- call _aesni_encrypt3
+ call __aesni_encrypt3
movups (%esi),%xmm5
movups 16(%esi),%xmm6
xorps %xmm5,%xmm2
@@ -952,7 +940,7 @@ aesni_ctr32_encrypt_blocks:
jmp .L036ctr32_ret
.align 16
.L039ctr32_three:
- call _aesni_encrypt3
+ call __aesni_encrypt3
movups (%esi),%xmm5
movups 16(%esi),%xmm6
xorps %xmm5,%xmm2
@@ -965,7 +953,7 @@ aesni_ctr32_encrypt_blocks:
jmp .L036ctr32_ret
.align 16
.L040ctr32_four:
- call _aesni_encrypt4
+ call __aesni_encrypt4
movups (%esi),%xmm6
movups 16(%esi),%xmm7
movups 32(%esi),%xmm1
@@ -985,11 +973,10 @@ aesni_ctr32_encrypt_blocks:
popl %ebx
popl %ebp
ret
-.size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
-.globl aesni_xts_encrypt
-.type aesni_xts_encrypt,@function
+.globl _aesni_xts_encrypt
+.def _aesni_xts_encrypt; .scl 2; .type 32; .endef
.align 16
-aesni_xts_encrypt:
+_aesni_xts_encrypt:
.L_aesni_xts_encrypt_begin:
pushl %ebp
pushl %ebx
@@ -1182,7 +1169,7 @@ aesni_xts_encrypt:
pxor 48(%esp),%xmm5
movdqa %xmm7,64(%esp)
pxor %xmm7,%xmm6
- call _aesni_encrypt6
+ call __aesni_encrypt6
movaps 64(%esp),%xmm1
xorps (%esp),%xmm2
xorps 16(%esp),%xmm3
@@ -1226,7 +1213,7 @@ aesni_xts_encrypt:
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
xorps %xmm4,%xmm4
- call _aesni_encrypt3
+ call __aesni_encrypt3
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
movups %xmm2,(%edi)
@@ -1244,7 +1231,7 @@ aesni_xts_encrypt:
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
xorps %xmm7,%xmm4
- call _aesni_encrypt3
+ call __aesni_encrypt3
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
xorps %xmm7,%xmm4
@@ -1266,7 +1253,7 @@ aesni_xts_encrypt:
xorps 16(%esp),%xmm3
xorps %xmm7,%xmm4
xorps %xmm6,%xmm5
- call _aesni_encrypt4
+ call __aesni_encrypt4
xorps (%esp),%xmm2
xorps 16(%esp),%xmm3
xorps %xmm7,%xmm4
@@ -1332,11 +1319,10 @@ aesni_xts_encrypt:
popl %ebx
popl %ebp
ret
-.size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
-.globl aesni_xts_decrypt
-.type aesni_xts_decrypt,@function
+.globl _aesni_xts_decrypt
+.def _aesni_xts_decrypt; .scl 2; .type 32; .endef
.align 16
-aesni_xts_decrypt:
+_aesni_xts_decrypt:
.L_aesni_xts_decrypt_begin:
pushl %ebp
pushl %ebx
@@ -1534,7 +1520,7 @@ aesni_xts_decrypt:
pxor 48(%esp),%xmm5
movdqa %xmm7,64(%esp)
pxor %xmm7,%xmm6
- call _aesni_decrypt6
+ call __aesni_decrypt6
movaps 64(%esp),%xmm1
xorps (%esp),%xmm2
xorps 16(%esp),%xmm3
@@ -1577,7 +1563,7 @@ aesni_xts_decrypt:
leal 32(%esi),%esi
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
- call _aesni_decrypt3
+ call __aesni_decrypt3
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
movups %xmm2,(%edi)
@@ -1595,7 +1581,7 @@ aesni_xts_decrypt:
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
xorps %xmm7,%xmm4
- call _aesni_decrypt3
+ call __aesni_decrypt3
xorps %xmm5,%xmm2
xorps %xmm6,%xmm3
xorps %xmm7,%xmm4
@@ -1617,7 +1603,7 @@ aesni_xts_decrypt:
xorps 16(%esp),%xmm3
xorps %xmm7,%xmm4
xorps %xmm6,%xmm5
- call _aesni_decrypt4
+ call __aesni_decrypt4
xorps (%esp),%xmm2
xorps 16(%esp),%xmm3
xorps %xmm7,%xmm4
@@ -1708,11 +1694,10 @@ aesni_xts_decrypt:
popl %ebx
popl %ebp
ret
-.size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
-.globl aesni_cbc_encrypt
-.type aesni_cbc_encrypt,@function
+.globl _aesni_cbc_encrypt
+.def _aesni_cbc_encrypt; .scl 2; .type 32; .endef
.align 16
-aesni_cbc_encrypt:
+_aesni_cbc_encrypt:
.L_aesni_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
@@ -1798,7 +1783,7 @@ aesni_cbc_encrypt:
movdqu 48(%esi),%xmm5
movdqu 64(%esi),%xmm6
movdqu 80(%esi),%xmm7
- call _aesni_decrypt6
+ call __aesni_decrypt6
movups (%esi),%xmm1
movups 16(%esi),%xmm0
xorps (%esp),%xmm2
@@ -1847,7 +1832,7 @@ aesni_cbc_encrypt:
movaps %xmm7,(%esp)
movups (%esi),%xmm2
xorps %xmm7,%xmm7
- call _aesni_decrypt6
+ call __aesni_decrypt6
movups (%esi),%xmm1
movups 16(%esi),%xmm0
xorps (%esp),%xmm2
@@ -1886,7 +1871,7 @@ aesni_cbc_encrypt:
.align 16
.L081cbc_dec_two:
xorps %xmm4,%xmm4
- call _aesni_decrypt3
+ call __aesni_decrypt3
xorps %xmm7,%xmm2
xorps %xmm6,%xmm3
movups %xmm2,(%edi)
@@ -1897,7 +1882,7 @@ aesni_cbc_encrypt:
jmp .L079cbc_dec_tail_collected
.align 16
.L082cbc_dec_three:
- call _aesni_decrypt3
+ call __aesni_decrypt3
xorps %xmm7,%xmm2
xorps %xmm6,%xmm3
xorps %xmm5,%xmm4
@@ -1910,7 +1895,7 @@ aesni_cbc_encrypt:
jmp .L079cbc_dec_tail_collected
.align 16
.L083cbc_dec_four:
- call _aesni_decrypt4
+ call __aesni_decrypt4
movups 16(%esi),%xmm1
movups 32(%esi),%xmm0
xorps %xmm7,%xmm2
@@ -1946,10 +1931,9 @@ aesni_cbc_encrypt:
popl %ebx
popl %ebp
ret
-.size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
-.type _aesni_set_encrypt_key,@function
+.def __aesni_set_encrypt_key; .scl 3; .type 32; .endef
.align 16
-_aesni_set_encrypt_key:
+__aesni_set_encrypt_key:
testl %eax,%eax
jz .L086bad_pointer
testl %edx,%edx
@@ -2125,27 +2109,25 @@ _aesni_set_encrypt_key:
.L089bad_keybits:
movl $-2,%eax
ret
-.size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key
-.globl aesni_set_encrypt_key
-.type aesni_set_encrypt_key,@function
+.globl _aesni_set_encrypt_key
+.def _aesni_set_encrypt_key; .scl 2; .type 32; .endef
.align 16
-aesni_set_encrypt_key:
+_aesni_set_encrypt_key:
.L_aesni_set_encrypt_key_begin:
movl 4(%esp),%eax
movl 8(%esp),%ecx
movl 12(%esp),%edx
- call _aesni_set_encrypt_key
+ call __aesni_set_encrypt_key
ret
-.size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
-.globl aesni_set_decrypt_key
-.type aesni_set_decrypt_key,@function
+.globl _aesni_set_decrypt_key
+.def _aesni_set_decrypt_key; .scl 2; .type 32; .endef
.align 16
-aesni_set_decrypt_key:
+_aesni_set_decrypt_key:
.L_aesni_set_decrypt_key_begin:
movl 4(%esp),%eax
movl 8(%esp),%ecx
movl 12(%esp),%edx
- call _aesni_set_encrypt_key
+ call __aesni_set_encrypt_key
movl 12(%esp),%edx
shll $4,%ecx
testl %eax,%eax
@@ -2174,13 +2156,7 @@ aesni_set_decrypt_key:
xorl %eax,%eax
.L100dec_key_ret:
ret
-.size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
.byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
.byte 115,108,46,111,114,103,62,0
-
-#if defined(__ELF__)
-.section .note.GNU-stack,"",%progbits
-#endif
-
diff --git a/lib/accelerated/x86/asm/padlock-x86.s
b/lib/accelerated/x86/coff/padlock-x86-coff.s
similarity index 81%
copy from lib/accelerated/x86/asm/padlock-x86.s
copy to lib/accelerated/x86/coff/padlock-x86-coff.s
index b2fca21..c1014bd 100644
--- a/lib/accelerated/x86/asm/padlock-x86.s
+++ b/lib/accelerated/x86/coff/padlock-x86-coff.s
@@ -35,12 +35,12 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-.file "padlock-x86.s"
+.file "./engines/asm/e_padlock-x86.s"
.text
-.globl padlock_capability
-.type padlock_capability,@function
+.globl _padlock_capability
+.def _padlock_capability; .scl 2; .type 32; .endef
.align 16
-padlock_capability:
+_padlock_capability:
.L_padlock_capability_begin:
pushl %ebx
pushfl
@@ -87,11 +87,10 @@ padlock_capability:
.L000noluck:
popl %ebx
ret
-.size padlock_capability,.-.L_padlock_capability_begin
-.globl padlock_key_bswap
-.type padlock_key_bswap,@function
+.globl _padlock_key_bswap
+.def _padlock_key_bswap; .scl 2; .type 32; .endef
.align 16
-padlock_key_bswap:
+_padlock_key_bswap:
.L_padlock_key_bswap_begin:
movl 4(%esp),%edx
movl 240(%edx),%ecx
@@ -103,24 +102,21 @@ padlock_key_bswap:
subl $1,%ecx
jnz .L001bswap_loop
ret
-.size padlock_key_bswap,.-.L_padlock_key_bswap_begin
-.globl padlock_verify_context
-.type padlock_verify_context,@function
+.globl _padlock_verify_context
+.def _padlock_verify_context; .scl 2; .type 32; .endef
.align 16
-padlock_verify_context:
+_padlock_verify_context:
.L_padlock_verify_context_begin:
movl 4(%esp),%edx
- leal .Lpadlock_saved_context-.L002verify_pic_point,%eax
+ leal .Lpadlock_saved_context,%eax
pushfl
- call _padlock_verify_ctx
+ call __padlock_verify_ctx
.L002verify_pic_point:
leal 4(%esp),%esp
ret
-.size padlock_verify_context,.-.L_padlock_verify_context_begin
-.type _padlock_verify_ctx,@function
+.def __padlock_verify_ctx; .scl 3; .type 32; .endef
.align 16
-_padlock_verify_ctx:
- addl (%esp),%eax
+__padlock_verify_ctx:
btl $30,4(%esp)
jnc .L003verified
cmpl (%eax),%edx
@@ -130,20 +126,18 @@ _padlock_verify_ctx:
.L003verified:
movl %edx,(%eax)
ret
-.size _padlock_verify_ctx,.-_padlock_verify_ctx
-.globl padlock_reload_key
-.type padlock_reload_key,@function
+.globl _padlock_reload_key
+.def _padlock_reload_key; .scl 2; .type 32; .endef
.align 16
-padlock_reload_key:
+_padlock_reload_key:
.L_padlock_reload_key_begin:
pushfl
popfl
ret
-.size padlock_reload_key,.-.L_padlock_reload_key_begin
-.globl padlock_aes_block
-.type padlock_aes_block,@function
+.globl _padlock_aes_block
+.def _padlock_aes_block; .scl 2; .type 32; .endef
.align 16
-padlock_aes_block:
+_padlock_aes_block:
.L_padlock_aes_block_begin:
pushl %edi
pushl %esi
@@ -159,11 +153,10 @@ padlock_aes_block:
popl %esi
popl %edi
ret
-.size padlock_aes_block,.-.L_padlock_aes_block_begin
-.globl padlock_ecb_encrypt
-.type padlock_ecb_encrypt,@function
+.globl _padlock_ecb_encrypt
+.def _padlock_ecb_encrypt; .scl 2; .type 32; .endef
.align 16
-padlock_ecb_encrypt:
+_padlock_ecb_encrypt:
.L_padlock_ecb_encrypt_begin:
pushl %ebp
pushl %ebx
@@ -177,10 +170,10 @@ padlock_ecb_encrypt:
jnz .L004ecb_abort
testl $15,%ecx
jnz .L004ecb_abort
- leal .Lpadlock_saved_context-.L005ecb_pic_point,%eax
+ leal .Lpadlock_saved_context,%eax
pushfl
cld
- call _padlock_verify_ctx
+ call __padlock_verify_ctx
.L005ecb_pic_point:
leal 16(%edx),%edx
xorl %eax,%eax
@@ -290,11 +283,10 @@ padlock_ecb_encrypt:
popl %ebx
popl %ebp
ret
-.size padlock_ecb_encrypt,.-.L_padlock_ecb_encrypt_begin
-.globl padlock_cbc_encrypt
-.type padlock_cbc_encrypt,@function
+.globl _padlock_cbc_encrypt
+.def _padlock_cbc_encrypt; .scl 2; .type 32; .endef
.align 16
-padlock_cbc_encrypt:
+_padlock_cbc_encrypt:
.L_padlock_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
@@ -308,10 +300,10 @@ padlock_cbc_encrypt:
jnz .L015cbc_abort
testl $15,%ecx
jnz .L015cbc_abort
- leal .Lpadlock_saved_context-.L016cbc_pic_point,%eax
+ leal .Lpadlock_saved_context,%eax
pushfl
cld
- call _padlock_verify_ctx
+ call __padlock_verify_ctx
.L016cbc_pic_point:
leal 16(%edx),%edx
xorl %eax,%eax
@@ -425,11 +417,10 @@ padlock_cbc_encrypt:
popl %ebx
popl %ebp
ret
-.size padlock_cbc_encrypt,.-.L_padlock_cbc_encrypt_begin
-.globl padlock_xstore
-.type padlock_xstore,@function
+.globl _padlock_xstore
+.def _padlock_xstore; .scl 2; .type 32; .endef
.align 16
-padlock_xstore:
+_padlock_xstore:
.L_padlock_xstore_begin:
pushl %edi
movl 8(%esp),%edi
@@ -437,10 +428,9 @@ padlock_xstore:
.byte 15,167,192
popl %edi
ret
-.size padlock_xstore,.-.L_padlock_xstore_begin
-.type _win32_segv_handler,@function
+.def __win32_segv_handler; .scl 3; .type 32; .endef
.align 16
-_win32_segv_handler:
+__win32_segv_handler:
movl $1,%eax
movl 4(%esp),%edx
movl 12(%esp),%ecx
@@ -450,11 +440,10 @@ _win32_segv_handler:
movl $0,%eax
.L026ret:
ret
-.size _win32_segv_handler,.-_win32_segv_handler
-.globl padlock_sha1_oneshot
-.type padlock_sha1_oneshot,@function
+.globl _padlock_sha1_oneshot
+.def _padlock_sha1_oneshot; .scl 2; .type 32; .endef
.align 16
-padlock_sha1_oneshot:
+_padlock_sha1_oneshot:
.L_padlock_sha1_oneshot_begin:
pushl %edi
pushl %esi
@@ -462,6 +451,9 @@ padlock_sha1_oneshot:
movl 12(%esp),%edi
movl 16(%esp),%esi
movl 20(%esp),%ecx
+ pushl __win32_segv_handler
+.byte 100,255,48
+.byte 100,137,32
movl %esp,%edx
addl $-128,%esp
movups (%edi),%xmm0
@@ -475,17 +467,18 @@ padlock_sha1_oneshot:
movaps (%esp),%xmm0
movl 16(%esp),%eax
movl %edx,%esp
- movl 12(%esp),%edi
+.byte 100,143,5,0,0,0,0
+ leal 4(%esp),%esp
+ movl 16(%esp),%edi
movups %xmm0,(%edi)
movl %eax,16(%edi)
popl %esi
popl %edi
ret
-.size padlock_sha1_oneshot,.-.L_padlock_sha1_oneshot_begin
-.globl padlock_sha1_blocks
-.type padlock_sha1_blocks,@function
+.globl _padlock_sha1_blocks
+.def _padlock_sha1_blocks; .scl 2; .type 32; .endef
.align 16
-padlock_sha1_blocks:
+_padlock_sha1_blocks:
.L_padlock_sha1_blocks_begin:
pushl %edi
pushl %esi
@@ -511,11 +504,10 @@ padlock_sha1_blocks:
popl %esi
popl %edi
ret
-.size padlock_sha1_blocks,.-.L_padlock_sha1_blocks_begin
-.globl padlock_sha256_oneshot
-.type padlock_sha256_oneshot,@function
+.globl _padlock_sha256_oneshot
+.def _padlock_sha256_oneshot; .scl 2; .type 32; .endef
.align 16
-padlock_sha256_oneshot:
+_padlock_sha256_oneshot:
.L_padlock_sha256_oneshot_begin:
pushl %edi
pushl %esi
@@ -523,6 +515,9 @@ padlock_sha256_oneshot:
movl 12(%esp),%edi
movl 16(%esp),%esi
movl 20(%esp),%ecx
+ pushl __win32_segv_handler
+.byte 100,255,48
+.byte 100,137,32
movl %esp,%edx
addl $-128,%esp
movups (%edi),%xmm0
@@ -536,17 +531,18 @@ padlock_sha256_oneshot:
movaps (%esp),%xmm0
movaps 16(%esp),%xmm1
movl %edx,%esp
- movl 12(%esp),%edi
+.byte 100,143,5,0,0,0,0
+ leal 4(%esp),%esp
+ movl 16(%esp),%edi
movups %xmm0,(%edi)
movups %xmm1,16(%edi)
popl %esi
popl %edi
ret
-.size padlock_sha256_oneshot,.-.L_padlock_sha256_oneshot_begin
-.globl padlock_sha256_blocks
-.type padlock_sha256_blocks,@function
+.globl _padlock_sha256_blocks
+.def _padlock_sha256_blocks; .scl 2; .type 32; .endef
.align 16
-padlock_sha256_blocks:
+_padlock_sha256_blocks:
.L_padlock_sha256_blocks_begin:
pushl %edi
pushl %esi
@@ -572,11 +568,10 @@ padlock_sha256_blocks:
popl %esi
popl %edi
ret
-.size padlock_sha256_blocks,.-.L_padlock_sha256_blocks_begin
-.globl padlock_sha512_blocks
-.type padlock_sha512_blocks,@function
+.globl _padlock_sha512_blocks
+.def _padlock_sha512_blocks; .scl 2; .type 32; .endef
.align 16
-padlock_sha512_blocks:
+_padlock_sha512_blocks:
.L_padlock_sha512_blocks_begin:
pushl %edi
pushl %esi
@@ -609,7 +604,6 @@ padlock_sha512_blocks:
popl %esi
popl %edi
ret
-.size padlock_sha512_blocks,.-.L_padlock_sha512_blocks_begin
.byte 86,73,65,32,80,97,100,108,111,99,107,32,120,56,54,32
.byte 109,111,100,117,108,101,44,32,67,82,89,80,84,79,71,65
.byte 77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101
@@ -619,7 +613,3 @@ padlock_sha512_blocks:
.align 4
.Lpadlock_saved_context:
.long 0
-
-#if defined(__ELF__)
-.section .note.GNU-stack,"",%progbits
-#endif
hooks/post-receive
--
GNU gnutls
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [SCM] GNU gnutls branch, master, updated. gnutls_3_0_4-31-g4ac9123,
Nikos Mavrogiannopoulos <=