From 5c72e5ea7a9ce343e4cd595c3fc82956843185c8 Mon Sep 17 00:00:00 2001 From: Andy Polyakov Date: Sat, 11 Feb 2017 22:08:12 +0100 Subject: [PATCH] modes/asm/*-x86_64.pl: add CFI annotations. Reviewed-by: Rich Salz --- crypto/modes/asm/aesni-gcm-x86_64.pl | 32 ++++++++++++++++++++++++++++ crypto/modes/asm/ghash-x86_64.pl | 29 +++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/crypto/modes/asm/aesni-gcm-x86_64.pl b/crypto/modes/asm/aesni-gcm-x86_64.pl index 5ad62b3979..5e69cb86fa 100644 --- a/crypto/modes/asm/aesni-gcm-x86_64.pl +++ b/crypto/modes/asm/aesni-gcm-x86_64.pl @@ -424,6 +424,7 @@ $code.=<<___; .type aesni_gcm_decrypt,\@function,6 .align 32 aesni_gcm_decrypt: +.cfi_startproc xor $ret,$ret # We call |_aesni_ctr32_ghash_6x|, which requires at least 96 (0x60) @@ -432,12 +433,19 @@ aesni_gcm_decrypt: jb .Lgcm_dec_abort lea (%rsp),%rax # save stack pointer +.cfi_def_cfa_register %rax push %rbx +.cfi_push %rbx push %rbp +.cfi_push %rbp push %r12 +.cfi_push %r12 push %r13 +.cfi_push %r13 push %r14 +.cfi_push %r14 push %r15 +.cfi_push %r15 ___ $code.=<<___ if ($win64); lea -0xa8(%rsp),%rsp @@ -537,15 +545,23 @@ $code.=<<___ if ($win64); ___ $code.=<<___; mov -48(%rax),%r15 +.cfi_restore %r15 mov -40(%rax),%r14 +.cfi_restore %r14 mov -32(%rax),%r13 +.cfi_restore %r13 mov -24(%rax),%r12 +.cfi_restore %r12 mov -16(%rax),%rbp +.cfi_restore %rbp mov -8(%rax),%rbx +.cfi_restore %rbx lea (%rax),%rsp # restore %rsp +.cfi_def_cfa_register %rsp .Lgcm_dec_abort: mov $ret,%rax # return value ret +.cfi_endproc .size aesni_gcm_decrypt,.-aesni_gcm_decrypt ___ @@ -645,6 +661,7 @@ _aesni_ctr32_6x: .type aesni_gcm_encrypt,\@function,6 .align 32 aesni_gcm_encrypt: +.cfi_startproc xor $ret,$ret # We call |_aesni_ctr32_6x| twice, each call consuming 96 bytes of @@ -654,12 +671,19 @@ aesni_gcm_encrypt: jb .Lgcm_enc_abort lea (%rsp),%rax # save stack pointer +.cfi_def_cfa_register %rax push %rbx +.cfi_push %rbx push %rbp +.cfi_push %rbp push %r12 +.cfi_push %r12 push %r13 +.cfi_push %r13 push %r14 +.cfi_push %r14 push %r15 +.cfi_push %r15 ___ $code.=<<___ if ($win64); lea -0xa8(%rsp),%rsp @@ -931,15 +955,23 @@ $code.=<<___ if ($win64); ___ $code.=<<___; mov -48(%rax),%r15 +.cfi_restore %r15 mov -40(%rax),%r14 +.cfi_restore %r14 mov -32(%rax),%r13 +.cfi_restore %r13 mov -24(%rax),%r12 +.cfi_restore %r12 mov -16(%rax),%rbp +.cfi_restore %rbp mov -8(%rax),%rbx +.cfi_restore %rbx lea (%rax),%rsp # restore %rsp +.cfi_def_cfa_register %rsp .Lgcm_enc_abort: mov $ret,%rax # return value ret +.cfi_endproc .size aesni_gcm_encrypt,.-aesni_gcm_encrypt ___ diff --git a/crypto/modes/asm/ghash-x86_64.pl b/crypto/modes/asm/ghash-x86_64.pl index caa9ced696..817f6e59a0 100644 --- a/crypto/modes/asm/ghash-x86_64.pl +++ b/crypto/modes/asm/ghash-x86_64.pl @@ -236,13 +236,21 @@ $code=<<___; .type gcm_gmult_4bit,\@function,2 .align 16 gcm_gmult_4bit: +.cfi_startproc push %rbx +.cfi_push %rbx push %rbp # %rbp and others are pushed exclusively in +.cfi_push %rbp push %r12 # order to reuse Win64 exception handler... +.cfi_push %r12 push %r13 +.cfi_push %r13 push %r14 +.cfi_push %r14 push %r15 +.cfi_push %r15 sub \$280,%rsp +.cfi_adjust_cfa_offset 280 .Lgmult_prologue: movzb 15($Xi),$Zlo @@ -254,10 +262,14 @@ $code.=<<___; mov $Zhi,($Xi) lea 280+48(%rsp),%rsi +.cfi_def_cfa %rsi,8 mov -8(%rsi),%rbx +.cfi_restore %rbx lea (%rsi),%rsp +.cfi_def_cfa_register %rsp .Lgmult_epilogue: ret +.cfi_endproc .size gcm_gmult_4bit,.-gcm_gmult_4bit ___ @@ -271,13 +283,21 @@ $code.=<<___; .type gcm_ghash_4bit,\@function,4 .align 16 gcm_ghash_4bit: +.cfi_startproc push %rbx +.cfi_push %rbx push %rbp +.cfi_push %rbp push %r12 +.cfi_push %r12 push %r13 +.cfi_push %r13 push %r14 +.cfi_push %r14 push %r15 +.cfi_push %r15 sub \$280,%rsp +.cfi_adjust_cfa_offset 280 .Lghash_prologue: mov $inp,%r14 # reassign couple of args mov $len,%r15 @@ -406,15 +426,24 @@ $code.=<<___; mov $Zhi,($Xi) lea 280+48(%rsp),%rsi +.cfi_def_cfa %rsi,8 mov -48(%rsi),%r15 +.cfi_restore %r15 mov -40(%rsi),%r14 +.cfi_restore %r14 mov -32(%rsi),%r13 +.cfi_restore %r13 mov -24(%rsi),%r12 +.cfi_restore %r12 mov -16(%rsi),%rbp +.cfi_restore %rbp mov -8(%rsi),%rbx +.cfi_restore %rbx lea 0(%rsi),%rsp +.cfi_def_cfa_register %rsp .Lghash_epilogue: ret +.cfi_endproc .size gcm_ghash_4bit,.-gcm_ghash_4bit ___ -- 2.25.1