Author: lattner Date: Mon Dec 10 13:10:18 2007 New Revision: 44803 URL: http://llvm.org/viewvc/llvm-project?rev=44803&view=rev Log: Disable cfi directives for now, darwin does't support them. These should probably be something like:
CFI(".cfi_def_cfa_offset 16\n") where CFI is defined to a noop on darwin and other platforms that don't support those directives. Modified: llvm/trunk/lib/Target/X86/X86JITInfo.cpp Modified: llvm/trunk/lib/Target/X86/X86JITInfo.cpp URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86JITInfo.cpp?rev=44803&r1=44802&r2=44803&view=diff ============================================================================== --- llvm/trunk/lib/Target/X86/X86JITInfo.cpp (original) +++ llvm/trunk/lib/Target/X86/X86JITInfo.cpp Mon Dec 10 13:10:18 2007 @@ -58,27 +58,27 @@ ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" // Save RBP "pushq %rbp\n" - ".cfi_def_cfa_offset 16\n" - ".cfi_offset %rbp, -16\n" +// ".cfi_def_cfa_offset 16\n" +// ".cfi_offset %rbp, -16\n" // Save RSP "movq %rsp, %rbp\n" - ".cfi_def_cfa_register %rbp\n" +// ".cfi_def_cfa_register %rbp\n" // Save all int arg registers "pushq %rdi\n" - ".cfi_rel_offset %rdi, 0\n" +// ".cfi_rel_offset %rdi, 0\n" "pushq %rsi\n" - ".cfi_rel_offset %rsi, 8\n" +// ".cfi_rel_offset %rsi, 8\n" "pushq %rdx\n" - ".cfi_rel_offset %rdx, 16\n" +// ".cfi_rel_offset %rdx, 16\n" "pushq %rcx\n" - ".cfi_rel_offset %rcx, 24\n" +// ".cfi_rel_offset %rcx, 24\n" "pushq %r8\n" - ".cfi_rel_offset %r8, 32\n" +// ".cfi_rel_offset %r8, 32\n" "pushq %r9\n" - ".cfi_rel_offset %r9, 40\n" +// ".cfi_rel_offset %r9, 40\n" // Align stack on 16-byte boundary. ESP might not be properly aligned // (8 byte) if this is called from an indirect stub. "andq $-16, %rsp\n" @@ -107,34 +107,35 @@ "movaps (%rsp), %xmm0\n" // Restore RSP "movq %rbp, %rsp\n" - ".cfi_def_cfa_register esp\n" +// ".cfi_def_cfa_register esp\n" // Restore all int arg registers "subq $48, %rsp\n" - ".cfi_adjust_cfa_offset 48\n" +// ".cfi_adjust_cfa_offset 48\n" "popq %r9\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %r9\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %r9\n" "popq %r8\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %r8\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %r8\n" "popq %rcx\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rcx\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rcx\n" "popq %rdx\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rdx\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rdx\n" "popq %rsi\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rsi\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rsi\n" "popq %rdi\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rdi\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rdi\n" // Restore RBP "popq %rbp\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rbp\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rbp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); #elif defined(__i386__) || defined(i386) || defined(_M_IX86) #ifndef _MSC_VER void X86CompilationCallback(void); @@ -143,18 +144,18 @@ ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" "pushl %ebp\n" - ".cfi_def_cfa_offset 8\n" - ".cfi_offset %ebp, -8\n" +// ".cfi_def_cfa_offset 8\n" +// ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register %ebp\n" +// ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset %eax, 0\n" +// ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset %edx, 4\n" +// ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset %ecx, 8\n" +// ".cfi_rel_offset %ecx, 8\n" #if defined(__APPLE__) "andl $-16, %esp\n" // Align ESP on 16-byte boundary #endif @@ -164,23 +165,24 @@ "movl %ebp, (%esp)\n" "call " ASMPREFIX "X86CompilationCallback2\n" "movl %ebp, %esp\n" // Restore ESP - ".cfi_def_cfa_register %esp\n" +// ".cfi_def_cfa_register %esp\n" "subl $12, %esp\n" - ".cfi_adjust_cfa_offset 12\n" +// ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ecx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ecx\n" "popl %edx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %edx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %edx\n" "popl %eax\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %eax\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %eax\n" "popl %ebp\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ebp\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ebp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); // Same as X86CompilationCallback but also saves XMM argument registers. void X86CompilationCallback_SSE(void); @@ -189,18 +191,18 @@ ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback_SSE\n" ASMPREFIX "X86CompilationCallback_SSE:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" "pushl %ebp\n" - ".cfi_def_cfa_offset 8\n" - ".cfi_offset %ebp, -8\n" +// ".cfi_def_cfa_offset 8\n" +// ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register %ebp\n" +// ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset %eax, 0\n" +// ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset %edx, 4\n" +// ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset %ecx, 8\n" +// ".cfi_rel_offset %ecx, 8\n" "andl $-16, %esp\n" // Align ESP on 16-byte boundary // Save all XMM arg registers "subl $64, %esp\n" @@ -218,31 +220,32 @@ "call " ASMPREFIX "X86CompilationCallback2\n" "addl $16, %esp\n" "movaps 48(%esp), %xmm3\n" - ".cfi_restore %xmm3\n" +// ".cfi_restore %xmm3\n" "movaps 32(%esp), %xmm2\n" - ".cfi_restore %xmm2\n" +// ".cfi_restore %xmm2\n" "movaps 16(%esp), %xmm1\n" - ".cfi_restore %xmm1\n" +// ".cfi_restore %xmm1\n" "movaps (%esp), %xmm0\n" - ".cfi_restore %xmm0\n" +// ".cfi_restore %xmm0\n" "movl %ebp, %esp\n" // Restore ESP - ".cfi_def_cfa_register esp\n" +// ".cfi_def_cfa_register esp\n" "subl $12, %esp\n" - ".cfi_adjust_cfa_offset 12\n" +// ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ecx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ecx\n" "popl %edx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %edx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %edx\n" "popl %eax\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %eax\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %eax\n" "popl %ebp\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ebp\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ebp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); #else void X86CompilationCallback2(void); _______________________________________________ llvm-commits mailing list llvm-commits@cs.uiuc.edu http://lists.cs.uiuc.edu/mailman/listinfo/llvm-commits