From 5f68287605b919dd1974e29f13c266abed9ac7c5 Mon Sep 17 00:00:00 2001 From: Anton Korobeynikov Date: Mon, 10 Dec 2007 23:04:38 +0000 Subject: [PATCH] Provide convenient way to disable CFI stuff for old/broken assemblers. Use it for Darwin. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@44818 91177308-0d34-0410-b5e6-96231b3b80d8 --- lib/Target/X86/X86JITInfo.cpp | 146 ++++++++++++++++++---------------- 1 file changed, 76 insertions(+), 70 deletions(-) diff --git a/lib/Target/X86/X86JITInfo.cpp b/lib/Target/X86/X86JITInfo.cpp index 9aa10d58483..5f88da5f89b 100644 --- a/lib/Target/X86/X86JITInfo.cpp +++ b/lib/Target/X86/X86JITInfo.cpp @@ -47,6 +47,12 @@ static TargetJITInfo::JITCompilerFn JITCompilerFunction; #define GETASMPREFIX(X) GETASMPREFIX2(X) #define ASMPREFIX GETASMPREFIX(__USER_LABEL_PREFIX__) +#if defined(__APPLE__) +# define CFI(x) +#else +# define CFI(x) x +#endif + // Provide a wrapper for X86CompilationCallback2 that saves non-traditional // callee saved registers, for the fastcc calling convention. extern "C" { @@ -58,27 +64,27 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" -// ".cfi_startproc\n" + CFI(".cfi_startproc\n") // Save RBP "pushq %rbp\n" -// ".cfi_def_cfa_offset 16\n" -// ".cfi_offset %rbp, -16\n" + CFI(".cfi_def_cfa_offset 16\n") + CFI(".cfi_offset %rbp, -16\n") // Save RSP "movq %rsp, %rbp\n" -// ".cfi_def_cfa_register %rbp\n" + CFI(".cfi_def_cfa_register %rbp\n") // Save all int arg registers "pushq %rdi\n" -// ".cfi_rel_offset %rdi, 0\n" + CFI(".cfi_rel_offset %rdi, 0\n") "pushq %rsi\n" -// ".cfi_rel_offset %rsi, 8\n" + CFI(".cfi_rel_offset %rsi, 8\n") "pushq %rdx\n" -// ".cfi_rel_offset %rdx, 16\n" + CFI(".cfi_rel_offset %rdx, 16\n") "pushq %rcx\n" -// ".cfi_rel_offset %rcx, 24\n" + CFI(".cfi_rel_offset %rcx, 24\n") "pushq %r8\n" -// ".cfi_rel_offset %r8, 32\n" + CFI(".cfi_rel_offset %r8, 32\n") "pushq %r9\n" -// ".cfi_rel_offset %r9, 40\n" + CFI(".cfi_rel_offset %r9, 40\n") // Align stack on 16-byte boundary. ESP might not be properly aligned // (8 byte) if this is called from an indirect stub. "andq $-16, %rsp\n" @@ -107,35 +113,35 @@ extern "C" { "movaps (%rsp), %xmm0\n" // Restore RSP "movq %rbp, %rsp\n" -// ".cfi_def_cfa_register esp\n" + CFI(".cfi_def_cfa_register esp\n") // Restore all int arg registers "subq $48, %rsp\n" -// ".cfi_adjust_cfa_offset 48\n" + CFI(".cfi_adjust_cfa_offset 48\n") "popq %r9\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %r9\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %r9\n") "popq %r8\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %r8\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %r8\n") "popq %rcx\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %rcx\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %rcx\n") "popq %rdx\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %rdx\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %rdx\n") "popq %rsi\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %rsi\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %rsi\n") "popq %rdi\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %rdi\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %rdi\n") // Restore RBP "popq %rbp\n" -// ".cfi_adjust_cfa_offset -8\n" -// ".cfi_restore %rbp\n" + CFI(".cfi_adjust_cfa_offset -8\n") + CFI(".cfi_restore %rbp\n") "ret\n" -// ".cfi_endproc\n" - ); + CFI(".cfi_endproc\n") + ); #elif defined(__i386__) || defined(i386) || defined(_M_IX86) #ifndef _MSC_VER void X86CompilationCallback(void); @@ -144,18 +150,18 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" -// ".cfi_startproc\n" + CFI(".cfi_startproc\n") "pushl %ebp\n" -// ".cfi_def_cfa_offset 8\n" -// ".cfi_offset %ebp, -8\n" + CFI(".cfi_def_cfa_offset 8\n") + CFI(".cfi_offset %ebp, -8\n") "movl %esp, %ebp\n" // Standard prologue -// ".cfi_def_cfa_register %ebp\n" + CFI(".cfi_def_cfa_register %ebp\n") "pushl %eax\n" -// ".cfi_rel_offset %eax, 0\n" + CFI(".cfi_rel_offset %eax, 0\n") "pushl %edx\n" // Save EAX/EDX/ECX -// ".cfi_rel_offset %edx, 4\n" + CFI(".cfi_rel_offset %edx, 4\n") "pushl %ecx\n" -// ".cfi_rel_offset %ecx, 8\n" + CFI(".cfi_rel_offset %ecx, 8\n") #if defined(__APPLE__) "andl $-16, %esp\n" // Align ESP on 16-byte boundary #endif @@ -165,24 +171,24 @@ extern "C" { "movl %ebp, (%esp)\n" "call " ASMPREFIX "X86CompilationCallback2\n" "movl %ebp, %esp\n" // Restore ESP -// ".cfi_def_cfa_register %esp\n" + CFI(".cfi_def_cfa_register %esp\n") "subl $12, %esp\n" -// ".cfi_adjust_cfa_offset 12\n" + CFI(".cfi_adjust_cfa_offset 12\n") "popl %ecx\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %ecx\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %ecx\n") "popl %edx\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %edx\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %edx\n") "popl %eax\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %eax\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %eax\n") "popl %ebp\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %ebp\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %ebp\n") "ret\n" -// ".cfi_endproc\n" - ); + CFI(".cfi_endproc\n") + ); // Same as X86CompilationCallback but also saves XMM argument registers. void X86CompilationCallback_SSE(void); @@ -191,18 +197,18 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback_SSE\n" ASMPREFIX "X86CompilationCallback_SSE:\n" -// ".cfi_startproc\n" + CFI(".cfi_startproc\n") "pushl %ebp\n" -// ".cfi_def_cfa_offset 8\n" -// ".cfi_offset %ebp, -8\n" + CFI(".cfi_def_cfa_offset 8\n") + CFI(".cfi_offset %ebp, -8\n") "movl %esp, %ebp\n" // Standard prologue -// ".cfi_def_cfa_register %ebp\n" + CFI(".cfi_def_cfa_register %ebp\n") "pushl %eax\n" -// ".cfi_rel_offset %eax, 0\n" + CFI(".cfi_rel_offset %eax, 0\n") "pushl %edx\n" // Save EAX/EDX/ECX -// ".cfi_rel_offset %edx, 4\n" + CFI(".cfi_rel_offset %edx, 4\n") "pushl %ecx\n" -// ".cfi_rel_offset %ecx, 8\n" + CFI(".cfi_rel_offset %ecx, 8\n") "andl $-16, %esp\n" // Align ESP on 16-byte boundary // Save all XMM arg registers "subl $64, %esp\n" @@ -220,32 +226,32 @@ extern "C" { "call " ASMPREFIX "X86CompilationCallback2\n" "addl $16, %esp\n" "movaps 48(%esp), %xmm3\n" -// ".cfi_restore %xmm3\n" + CFI(".cfi_restore %xmm3\n") "movaps 32(%esp), %xmm2\n" -// ".cfi_restore %xmm2\n" + CFI(".cfi_restore %xmm2\n") "movaps 16(%esp), %xmm1\n" -// ".cfi_restore %xmm1\n" + CFI(".cfi_restore %xmm1\n") "movaps (%esp), %xmm0\n" -// ".cfi_restore %xmm0\n" + CFI(".cfi_restore %xmm0\n") "movl %ebp, %esp\n" // Restore ESP -// ".cfi_def_cfa_register esp\n" + CFI(".cfi_def_cfa_register esp\n") "subl $12, %esp\n" -// ".cfi_adjust_cfa_offset 12\n" + CFI(".cfi_adjust_cfa_offset 12\n") "popl %ecx\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %ecx\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %ecx\n") "popl %edx\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %edx\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %edx\n") "popl %eax\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %eax\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %eax\n") "popl %ebp\n" -// ".cfi_adjust_cfa_offset -4\n" -// ".cfi_restore %ebp\n" + CFI(".cfi_adjust_cfa_offset -4\n") + CFI(".cfi_restore %ebp\n") "ret\n" -// ".cfi_endproc\n" - ); + CFI(".cfi_endproc\n") + ); #else void X86CompilationCallback2(void); -- 2.34.1