From: Anton Korobeynikov Date: Mon, 10 Dec 2007 15:27:07 +0000 (+0000) Subject: And finally annotate X86-64 version of callback. X-Git-Url: http://plrg.eecs.uci.edu/git/?a=commitdiff_plain;h=3a7bcc4d1badce527e2caae2f400c1a91abdbed8;p=oota-llvm.git And finally annotate X86-64 version of callback. All bad stuff from SSE version is implicitely inherited :) git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@44794 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/lib/Target/X86/X86JITInfo.cpp b/lib/Target/X86/X86JITInfo.cpp index 01d2c5c693a..4bafdcd8e8d 100644 --- a/lib/Target/X86/X86JITInfo.cpp +++ b/lib/Target/X86/X86JITInfo.cpp @@ -58,17 +58,27 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" + ".cfi_startproc\n" // Save RBP "pushq %rbp\n" + ".cfi_def_cfa_offset 16\n" + ".cfi_offset %rbp, -16\n" // Save RSP "movq %rsp, %rbp\n" + ".cfi_def_cfa_register %rbp\n" // Save all int arg registers "pushq %rdi\n" + ".cfi_rel_offset %rdi, 0\n" "pushq %rsi\n" + ".cfi_rel_offset %rsi, 8\n" "pushq %rdx\n" + ".cfi_rel_offset %rdx, 16\n" "pushq %rcx\n" + ".cfi_rel_offset %rcx, 24\n" "pushq %r8\n" + ".cfi_rel_offset %r8, 32\n" "pushq %r9\n" + ".cfi_rel_offset %r9, 40\n" // Align stack on 16-byte boundary. ESP might not be properly aligned // (8 byte) if this is called from an indirect stub. "andq $-16, %rsp\n" @@ -97,17 +107,34 @@ extern "C" { "movaps (%rsp), %xmm0\n" // Restore RSP "movq %rbp, %rsp\n" + ".cfi_def_cfa_register esp\n" // Restore all int arg registers "subq $48, %rsp\n" + ".cfi_adjust_cfa_offset 48\n" "popq %r9\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %r9\n" "popq %r8\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %r8\n" "popq %rcx\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %rcx\n" "popq %rdx\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %rdx\n" "popq %rsi\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %rsi\n" "popq %rdi\n" + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %rdi\n" // Restore RBP "popq %rbp\n" - "ret\n"); + ".cfi_adjust_cfa_offset -8\n" + ".cfi_restore %rbp\n" + "ret\n" + ".cfi_endproc\n"); #elif defined(__i386__) || defined(i386) || defined(_M_IX86) #ifndef _MSC_VER void X86CompilationCallback(void); @@ -119,15 +146,15 @@ extern "C" { ".cfi_startproc\n" "pushl %ebp\n" ".cfi_def_cfa_offset 8\n" - ".cfi_offset ebp, -8\n" + ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register ebp\n" + ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset eax, 0\n" + ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset edx, 4\n" + ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset ecx, 8\n" + ".cfi_rel_offset %ecx, 8\n" #if defined(__APPLE__) "andl $-16, %esp\n" // Align ESP on 16-byte boundary #endif @@ -137,21 +164,21 @@ extern "C" { "movl %ebp, (%esp)\n" "call " ASMPREFIX "X86CompilationCallback2\n" "movl %ebp, %esp\n" // Restore ESP - ".cfi_def_cfa_register esp\n" + ".cfi_def_cfa_register %esp\n" "subl $12, %esp\n" ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore ecx\n" + ".cfi_restore %ecx\n" "popl %edx\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore edx\n" + ".cfi_restore %edx\n" "popl %eax\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore eax\n" + ".cfi_restore %eax\n" "popl %ebp\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore ebp\n" + ".cfi_restore %ebp\n" "ret\n" ".cfi_endproc\n"); @@ -165,15 +192,15 @@ extern "C" { ".cfi_startproc\n" "pushl %ebp\n" ".cfi_def_cfa_offset 8\n" - ".cfi_offset ebp, -8\n" + ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register ebp\n" + ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset eax, 0\n" + ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset edx, 4\n" + ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset ecx, 8\n" + ".cfi_rel_offset %ecx, 8\n" "andl $-16, %esp\n" // Align ESP on 16-byte boundary // Save all XMM arg registers "subl $64, %esp\n" @@ -191,29 +218,29 @@ extern "C" { "call " ASMPREFIX "X86CompilationCallback2\n" "addl $16, %esp\n" "movaps 48(%esp), %xmm3\n" - ".cfi_restore xmm3\n" + ".cfi_restore %xmm3\n" "movaps 32(%esp), %xmm2\n" - ".cfi_restore xmm2\n" + ".cfi_restore %xmm2\n" "movaps 16(%esp), %xmm1\n" - ".cfi_restore xmm1\n" + ".cfi_restore %xmm1\n" "movaps (%esp), %xmm0\n" - ".cfi_restore xmm0\n" + ".cfi_restore %xmm0\n" "movl %ebp, %esp\n" // Restore ESP ".cfi_def_cfa_register esp\n" "subl $12, %esp\n" ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore ecx\n" + ".cfi_restore %ecx\n" "popl %edx\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore edx\n" + ".cfi_restore %edx\n" "popl %eax\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore eax\n" + ".cfi_restore %eax\n" "popl %ebp\n" ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore ebp\n" + ".cfi_restore %ebp\n" "ret\n" ".cfi_endproc\n"); #else