Prevent the fault recovery labels to clutter the symbol table and the
disassembly.
Signed-off-by: Jan Beulich <jbeulich@xxxxxxxxxx>
Index: 2007-02-07/xen/arch/x86/x86_32/entry.S
===================================================================
--- 2007-02-07.orig/xen/arch/x86/x86_32/entry.S 2006-12-11 15:09:57.000000000
+0100
+++ 2007-02-07/xen/arch/x86/x86_32/entry.S 2007-02-07 16:30:09.000000000
+0100
@@ -84,10 +84,10 @@ restore_all_guest:
jmp restore_all_vm86
1:
#endif
-FLT1: mov UREGS_ds(%esp),%ds
-FLT2: mov UREGS_es(%esp),%es
-FLT3: mov UREGS_fs(%esp),%fs
-FLT4: mov UREGS_gs(%esp),%gs
+.LFT1: mov UREGS_ds(%esp),%ds
+.LFT2: mov UREGS_es(%esp),%es
+.LFT3: mov UREGS_fs(%esp),%fs
+.LFT4: mov UREGS_gs(%esp),%gs
restore_all_vm86:
popl %ebx
popl %ecx
@@ -97,9 +97,9 @@ restore_all_vm86:
popl %ebp
popl %eax
addl $4,%esp
-FLT5: iret
+.LFT5: iret
.section .fixup,"ax"
-FIX5: subl $28,%esp
+.LFX5: subl $28,%esp
pushl 28(%esp) # error_code/entry_vector
movl %eax,UREGS_eax+4(%esp)
movl %ebp,UREGS_ebp+4(%esp)
@@ -108,7 +108,7 @@ FIX5: subl $28,%esp
movl %edx,UREGS_edx+4(%esp)
movl %ecx,UREGS_ecx+4(%esp)
movl %ebx,UREGS_ebx+4(%esp)
-FIX1: SET_XEN_SEGMENTS(a)
+.LFX1: SET_XEN_SEGMENTS(a)
movl %eax,%fs
movl %eax,%gs
sti
@@ -116,11 +116,11 @@ FIX1: SET_XEN_SEGMENTS(a)
pushfl # EFLAGS
movl $__HYPERVISOR_CS,%eax
pushl %eax # CS
- movl $DBLFLT1,%eax
+ movl $.LDF1,%eax
pushl %eax # EIP
pushl %esi # error_code/entry_vector
jmp handle_exception
-DBLFLT1:GET_CURRENT(%ebx)
+.LDF1: GET_CURRENT(%ebx)
jmp test_all_events
failsafe_callback:
GET_CURRENT(%ebx)
@@ -142,14 +142,14 @@ failsafe_callback:
jmp test_all_events
.previous
.section __pre_ex_table,"a"
- .long FLT1,FIX1
- .long FLT2,FIX1
- .long FLT3,FIX1
- .long FLT4,FIX1
- .long FLT5,FIX5
+ .long .LFT1,.LFX1
+ .long .LFT2,.LFX1
+ .long .LFT3,.LFX1
+ .long .LFT4,.LFX1
+ .long .LFT5,.LFX5
.previous
.section __ex_table,"a"
- .long DBLFLT1,failsafe_callback
+ .long .LDF1,failsafe_callback
.previous
ALIGN
@@ -288,32 +288,33 @@ create_bounce_frame:
testl $(2|X86_EFLAGS_VM),%ecx
jz ring1 /* jump if returning to an existing ring-1 activation */
movl VCPU_kernel_sp(%ebx),%esi
-FLT6: mov VCPU_kernel_ss(%ebx),%gs
+.LFT6: mov VCPU_kernel_ss(%ebx),%gs
testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
- jz nvm86_1
+ jz .Lnvm86_1
subl $16,%esi /* push ES/DS/FS/GS (VM86 stack frame) */
movl UREGS_es+4(%esp),%eax
-FLT7: movl %eax,%gs:(%esi)
+.LFT7: movl %eax,%gs:(%esi)
movl UREGS_ds+4(%esp),%eax
-FLT8: movl %eax,%gs:4(%esi)
+.LFT8: movl %eax,%gs:4(%esi)
movl UREGS_fs+4(%esp),%eax
-FLT9: movl %eax,%gs:8(%esi)
+.LFT9: movl %eax,%gs:8(%esi)
movl UREGS_gs+4(%esp),%eax
-FLT10: movl %eax,%gs:12(%esi)
-nvm86_1:subl $8,%esi /* push SS/ESP (inter-priv iret) */
+.LFT10: movl %eax,%gs:12(%esi)
+.Lnvm86_1:
+ subl $8,%esi /* push SS/ESP (inter-priv iret) */
movl UREGS_esp+4(%esp),%eax
-FLT11: movl %eax,%gs:(%esi)
+.LFT11: movl %eax,%gs:(%esi)
movl UREGS_ss+4(%esp),%eax
-FLT12: movl %eax,%gs:4(%esi)
+.LFT12: movl %eax,%gs:4(%esi)
jmp 1f
ring1: /* obtain ss/esp from oldss/oldesp -- a ring-1 activation exists */
movl UREGS_esp+4(%esp),%esi
-FLT13: mov UREGS_ss+4(%esp),%gs
+.LFT13: mov UREGS_ss+4(%esp),%gs
1: /* Construct a stack frame: EFLAGS, CS/EIP */
movb TRAPBOUNCE_flags(%edx),%cl
subl $12,%esi
movl UREGS_eip+4(%esp),%eax
-FLT14: movl %eax,%gs:(%esi)
+.LFT14: movl %eax,%gs:(%esi)
movl VCPU_vcpu_info(%ebx),%eax
pushl VCPUINFO_upcall_mask(%eax)
testb $TBF_INTERRUPT,%cl
@@ -324,49 +325,51 @@ FLT14: movl %eax,%gs:(%esi)
movw UREGS_cs+4(%esp),%ax # Bits 0-15: CS
#ifdef CONFIG_X86_SUPERVISOR_MODE_KERNEL
testw $2,%ax
- jnz FLT15
+ jnz .LFT15
and $~3,%ax # RPL 1 -> RPL 0
#endif
-FLT15: movl %eax,%gs:4(%esi)
+.LFT15: movl %eax,%gs:4(%esi)
test $0x00FF0000,%eax # Bits 16-23: saved_upcall_mask
setz %ch # %ch == !saved_upcall_mask
movl UREGS_eflags+4(%esp),%eax
andl $~X86_EFLAGS_IF,%eax
shlb $1,%ch # Bit 9 (EFLAGS.IF)
orb %ch,%ah # Fold EFLAGS.IF into %eax
-FLT16: movl %eax,%gs:8(%esi)
+.LFT16: movl %eax,%gs:8(%esi)
test $TBF_EXCEPTION_ERRCODE,%cl
jz 1f
subl $4,%esi # push error_code onto guest frame
movl TRAPBOUNCE_error_code(%edx),%eax
-FLT17: movl %eax,%gs:(%esi)
+.LFT17: movl %eax,%gs:(%esi)
1: testb $TBF_FAILSAFE,%cl
jz 2f
subl $16,%esi # add DS/ES/FS/GS to failsafe stack frame
testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
- jz nvm86_2
+ jz .Lnvm86_2
xorl %eax,%eax # VM86: we write zero selector values
-FLT18: movl %eax,%gs:(%esi)
-FLT19: movl %eax,%gs:4(%esi)
-FLT20: movl %eax,%gs:8(%esi)
-FLT21: movl %eax,%gs:12(%esi)
+.LFT18: movl %eax,%gs:(%esi)
+.LFT19: movl %eax,%gs:4(%esi)
+.LFT20: movl %eax,%gs:8(%esi)
+.LFT21: movl %eax,%gs:12(%esi)
jmp 2f
-nvm86_2:movl UREGS_ds+4(%esp),%eax # non-VM86: write real selector values
-FLT22: movl %eax,%gs:(%esi)
+.Lnvm86_2:
+ movl UREGS_ds+4(%esp),%eax # non-VM86: write real selector values
+.LFT22: movl %eax,%gs:(%esi)
movl UREGS_es+4(%esp),%eax
-FLT23: movl %eax,%gs:4(%esi)
+.LFT23: movl %eax,%gs:4(%esi)
movl UREGS_fs+4(%esp),%eax
-FLT24: movl %eax,%gs:8(%esi)
+.LFT24: movl %eax,%gs:8(%esi)
movl UREGS_gs+4(%esp),%eax
-FLT25: movl %eax,%gs:12(%esi)
+.LFT25: movl %eax,%gs:12(%esi)
2: testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
- jz nvm86_3
+ jz .Lnvm86_3
xorl %eax,%eax /* zero DS-GS, just as a real CPU would */
movl %eax,UREGS_ds+4(%esp)
movl %eax,UREGS_es+4(%esp)
movl %eax,UREGS_fs+4(%esp)
movl %eax,UREGS_gs+4(%esp)
-nvm86_3:/* Rewrite our stack frame and return to ring 1. */
+.Lnvm86_3:
+ /* Rewrite our stack frame and return to ring 1. */
/* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+4(%esp)
@@ -382,16 +385,16 @@ nvm86_3:/* Rewrite our stack frame and r
movb $0,TRAPBOUNCE_flags(%edx)
ret
.section __ex_table,"a"
- .long FLT6,domain_crash_synchronous , FLT7,domain_crash_synchronous
- .long FLT8,domain_crash_synchronous , FLT9,domain_crash_synchronous
- .long FLT10,domain_crash_synchronous , FLT11,domain_crash_synchronous
- .long FLT12,domain_crash_synchronous , FLT13,domain_crash_synchronous
- .long FLT14,domain_crash_synchronous , FLT15,domain_crash_synchronous
- .long FLT16,domain_crash_synchronous , FLT17,domain_crash_synchronous
- .long FLT18,domain_crash_synchronous , FLT19,domain_crash_synchronous
- .long FLT20,domain_crash_synchronous , FLT21,domain_crash_synchronous
- .long FLT22,domain_crash_synchronous , FLT23,domain_crash_synchronous
- .long FLT24,domain_crash_synchronous , FLT25,domain_crash_synchronous
+ .long .LFT6,domain_crash_synchronous , .LFT7,domain_crash_synchronous
+ .long .LFT8,domain_crash_synchronous , .LFT9,domain_crash_synchronous
+ .long .LFT10,domain_crash_synchronous , .LFT11,domain_crash_synchronous
+ .long .LFT12,domain_crash_synchronous , .LFT13,domain_crash_synchronous
+ .long .LFT14,domain_crash_synchronous , .LFT15,domain_crash_synchronous
+ .long .LFT16,domain_crash_synchronous , .LFT17,domain_crash_synchronous
+ .long .LFT18,domain_crash_synchronous , .LFT19,domain_crash_synchronous
+ .long .LFT20,domain_crash_synchronous , .LFT21,domain_crash_synchronous
+ .long .LFT22,domain_crash_synchronous , .LFT23,domain_crash_synchronous
+ .long .LFT24,domain_crash_synchronous , .LFT25,domain_crash_synchronous
.previous
domain_crash_synchronous_string:
Index: 2007-02-07/xen/arch/x86/x86_64/compat/entry.S
===================================================================
--- 2007-02-07.orig/xen/arch/x86/x86_64/compat/entry.S 2007-02-01
17:41:08.000000000 +0100
+++ 2007-02-07/xen/arch/x86/x86_64/compat/entry.S 2007-02-07
16:28:28.000000000 +0100
@@ -123,10 +123,10 @@ compat_bad_hypercall:
compat_restore_all_guest:
RESTORE_ALL
addq $8,%rsp
-CFLT0: iretq
+.Lft0: iretq
.section .fixup,"ax"
-CFIX0: popq -15*8-8(%rsp) # error_code/entry_vector
+.Lfx0: popq -15*8-8(%rsp) # error_code/entry_vector
SAVE_ALL # 15*8 bytes pushed
movq -8(%rsp),%rsi # error_code/entry_vector
sti # after stack abuse (-1024(%rsp))
@@ -135,11 +135,11 @@ CFIX0: popq -15*8-8(%rsp) #
pushq %rax # RSP
pushfq # RFLAGS
pushq $__HYPERVISOR_CS # CS
- leaq CDBLFLT0(%rip),%rax
+ leaq .Ldf0(%rip),%rax
pushq %rax # RIP
pushq %rsi # error_code/entry_vector
jmp handle_exception
-CDBLFLT0:GET_CURRENT(%rbx)
+.Ldf0: GET_CURRENT(%rbx)
jmp compat_test_all_events
compat_failsafe_callback:
GET_CURRENT(%rbx)
@@ -157,10 +157,10 @@ compat_failsafe_callback:
jmp compat_test_all_events
.previous
.section __pre_ex_table,"a"
- .quad CFLT0,CFIX0
+ .quad .Lft0,.Lfx0
.previous
.section __ex_table,"a"
- .quad CDBLFLT0,compat_failsafe_callback
+ .quad .Ldf0,compat_failsafe_callback
.previous
/* %rdx: trap_bounce, %rbx: struct vcpu */
@@ -180,16 +180,16 @@ compat_create_bounce_frame:
jz 1f
/* Push new frame at registered guest-OS stack base. */
movl VCPU_kernel_sp(%rbx),%esi
-CFLT1: mov VCPU_kernel_ss(%rbx),%fs
+.Lft1: mov VCPU_kernel_ss(%rbx),%fs
subl $2*4,%esi
movl UREGS_rsp+8(%rsp),%eax
-CFLT2: movl %eax,%fs:(%rsi)
+.Lft2: movl %eax,%fs:(%rsi)
movl UREGS_ss+8(%rsp),%eax
-CFLT3: movl %eax,%fs:4(%rsi)
+.Lft3: movl %eax,%fs:4(%rsi)
jmp 2f
1: /* In kernel context already: push new frame at existing %rsp. */
movl UREGS_rsp+8(%rsp),%esi
-CFLT4: mov UREGS_ss+8(%rsp),%fs
+.Lft4: mov UREGS_ss+8(%rsp),%fs
2:
movb TRAPBOUNCE_flags(%rdx),%cl
subl $3*4,%esi
@@ -201,7 +201,7 @@ CFLT4: mov UREGS_ss+8(%rsp),%fs
popq %rax
shll $16,%eax # Bits 16-23: saved_upcall_mask
movw UREGS_cs+8(%rsp),%ax # Bits 0-15: CS
-CFLT5: movl %eax,%fs:4(%rsi) # CS / saved_upcall_mask
+.Lft5: movl %eax,%fs:4(%rsi) # CS / saved_upcall_mask
shrl $16,%eax
testb %al,%al # Bits 0-7: saved_upcall_mask
setz %ch # %ch == !saved_upcall_mask
@@ -209,25 +209,25 @@ CFLT5: movl %eax,%fs:4(%rsi)
andl $~X86_EFLAGS_IF,%eax
shlb $1,%ch # Bit 9 (EFLAGS.IF)
orb %ch,%ah # Fold EFLAGS.IF into %eax
-CFLT6: movl %eax,%fs:2*4(%rsi) # EFLAGS
+.Lft6: movl %eax,%fs:2*4(%rsi) # EFLAGS
movl UREGS_rip+8(%rsp),%eax
-CFLT7: movl %eax,%fs:(%rsi) # EIP
+.Lft7: movl %eax,%fs:(%rsi) # EIP
testb $TBF_EXCEPTION_ERRCODE,%cl
jz 1f
subl $4,%esi
movl TRAPBOUNCE_error_code(%rdx),%eax
-CFLT8: movl %eax,%fs:(%rsi) # ERROR CODE
+.Lft8: movl %eax,%fs:(%rsi) # ERROR CODE
1:
testb $TBF_FAILSAFE,%cl
jz 2f
subl $4*4,%esi
movl %gs,%eax
-CFLT9: movl %eax,%fs:3*4(%rsi) # GS
-CFLT10: movl %edi,%fs:2*4(%rsi) # FS
+.Lft9: movl %eax,%fs:3*4(%rsi) # GS
+.Lft10: movl %edi,%fs:2*4(%rsi) # FS
movl %es,%eax
-CFLT11: movl %eax,%fs:1*4(%rsi) # ES
+.Lft11: movl %eax,%fs:1*4(%rsi) # ES
movl %ds,%eax
-CFLT12: movl %eax,%fs:0*4(%rsi) # DS
+.Lft12: movl %eax,%fs:0*4(%rsi) # DS
2:
/* Rewrite our stack frame and return to guest-OS mode. */
/* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
@@ -236,7 +236,7 @@ CFLT12: movl %eax,%fs:0*4(%rsi)
X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
mov %fs,UREGS_ss+8(%rsp)
movl %esi,UREGS_rsp+8(%rsp)
-CFLT13: mov %edi,%fs
+.Lft13: mov %edi,%fs
movzwl TRAPBOUNCE_cs(%rdx),%eax
/* Null selectors (0-3) are not allowed. */
testl $~3,%eax
@@ -247,18 +247,18 @@ CFLT13: mov %edi,%fs
movb $0,TRAPBOUNCE_flags(%rdx)
ret
.section .fixup,"ax"
-CFIX13:
+.Lfx13:
xorl %edi,%edi
- jmp CFLT13
+ jmp .Lft13
.previous
.section __ex_table,"a"
- .quad CFLT1,domain_crash_synchronous , CFLT2,compat_crash_page_fault
- .quad CFLT3,compat_crash_page_fault_4 ,
CFLT4,domain_crash_synchronous
- .quad CFLT5,compat_crash_page_fault_4 ,
CFLT6,compat_crash_page_fault_8
- .quad CFLT7,compat_crash_page_fault , CFLT8,compat_crash_page_fault
- .quad CFLT9,compat_crash_page_fault_12,
CFLT10,compat_crash_page_fault_8
- .quad CFLT11,compat_crash_page_fault_4 , CFLT12,compat_crash_page_fault
- .quad CFLT13,CFIX13
+ .quad .Lft1,domain_crash_synchronous , .Lft2,compat_crash_page_fault
+ .quad .Lft3,compat_crash_page_fault_4 ,
.Lft4,domain_crash_synchronous
+ .quad .Lft5,compat_crash_page_fault_4 ,
.Lft6,compat_crash_page_fault_8
+ .quad .Lft7,compat_crash_page_fault , .Lft8,compat_crash_page_fault
+ .quad .Lft9,compat_crash_page_fault_12,
.Lft10,compat_crash_page_fault_8
+ .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
+ .quad .Lft13,.Lfx13
.previous
compat_crash_page_fault_12:
@@ -268,17 +268,17 @@ compat_crash_page_fault_8:
compat_crash_page_fault_4:
addl $4,%esi
compat_crash_page_fault:
-CFLT14: mov %edi,%fs
+.Lft14: mov %edi,%fs
movl %esi,%edi
call show_page_walk
jmp domain_crash_synchronous
.section .fixup,"ax"
-CFIX14:
+.Lfx14:
xorl %edi,%edi
- jmp CFLT14
+ jmp .Lft14
.previous
.section __ex_table,"a"
- .quad CFLT14,CFIX14
+ .quad .Lft14,.Lfx14
.previous
.section .rodata, "a", @progbits
Index: 2007-02-07/xen/arch/x86/x86_64/entry.S
===================================================================
--- 2007-02-07.orig/xen/arch/x86/x86_64/entry.S 2007-01-08 14:15:31.000000000
+0100
+++ 2007-02-07/xen/arch/x86/x86_64/entry.S 2007-02-07 16:31:20.000000000
+0100
@@ -56,10 +56,10 @@ restore_all_guest:
/* No special register assumptions. */
iret_exit_to_guest:
addq $8,%rsp
-FLT1: iretq
+.LFT1: iretq
.section .fixup,"ax"
-FIX1: popq -15*8-8(%rsp) # error_code/entry_vector
+.LFX1: popq -15*8-8(%rsp) # error_code/entry_vector
SAVE_ALL # 15*8 bytes pushed
movq -8(%rsp),%rsi # error_code/entry_vector
sti # after stack abuse (-1024(%rsp))
@@ -68,11 +68,11 @@ FIX1: popq -15*8-8(%rsp) #
pushq %rax # RSP
pushf # RFLAGS
pushq $__HYPERVISOR_CS # CS
- leaq DBLFLT1(%rip),%rax
+ leaq .LDF1(%rip),%rax
pushq %rax # RIP
pushq %rsi # error_code/entry_vector
jmp handle_exception
-DBLFLT1:GET_CURRENT(%rbx)
+.LDF1: GET_CURRENT(%rbx)
jmp test_all_events
failsafe_callback:
GET_CURRENT(%rbx)
@@ -87,10 +87,10 @@ failsafe_callback:
jmp test_all_events
.previous
.section __pre_ex_table,"a"
- .quad FLT1,FIX1
+ .quad .LFT1,.LFX1
.previous
.section __ex_table,"a"
- .quad DBLFLT1,failsafe_callback
+ .quad .LDF1,failsafe_callback
.previous
ALIGN
@@ -249,9 +249,9 @@ create_bounce_frame:
1: movb TRAPBOUNCE_flags(%rdx),%cl
subq $40,%rsi
movq UREGS_ss+8(%rsp),%rax
-FLT2: movq %rax,32(%rsi) # SS
+.LFT2: movq %rax,32(%rsi) # SS
movq UREGS_rsp+8(%rsp),%rax
-FLT3: movq %rax,24(%rsi) # RSP
+.LFT3: movq %rax,24(%rsi) # RSP
movq VCPU_vcpu_info(%rbx),%rax
pushq VCPUINFO_upcall_mask(%rax)
testb $TBF_INTERRUPT,%cl
@@ -260,7 +260,7 @@ FLT3: movq %rax,24(%rsi)
popq %rax
shlq $32,%rax # Bits 32-39: saved_upcall_mask
movw UREGS_cs+8(%rsp),%ax # Bits 0-15: CS
-FLT4: movq %rax,8(%rsi) # CS / saved_upcall_mask
+.LFT4: movq %rax,8(%rsi) # CS / saved_upcall_mask
shrq $32,%rax
testb $0xFF,%al # Bits 0-7: saved_upcall_mask
setz %ch # %ch == !saved_upcall_mask
@@ -268,30 +268,30 @@ FLT4: movq %rax,8(%rsi)
andq $~X86_EFLAGS_IF,%rax
shlb $1,%ch # Bit 9 (EFLAGS.IF)
orb %ch,%ah # Fold EFLAGS.IF into %eax
-FLT5: movq %rax,16(%rsi) # RFLAGS
+.LFT5: movq %rax,16(%rsi) # RFLAGS
movq UREGS_rip+8(%rsp),%rax
-FLT6: movq %rax,(%rsi) # RIP
+.LFT6: movq %rax,(%rsi) # RIP
testb $TBF_EXCEPTION_ERRCODE,%cl
jz 1f
subq $8,%rsi
movl TRAPBOUNCE_error_code(%rdx),%eax
-FLT7: movq %rax,(%rsi) # ERROR CODE
+.LFT7: movq %rax,(%rsi) # ERROR CODE
1: testb $TBF_FAILSAFE,%cl
jz 2f
subq $32,%rsi
movl %gs,%eax
-FLT8: movq %rax,24(%rsi) # GS
+.LFT8: movq %rax,24(%rsi) # GS
movl %fs,%eax
-FLT9: movq %rax,16(%rsi) # FS
+.LFT9: movq %rax,16(%rsi) # FS
movl %es,%eax
-FLT10: movq %rax,8(%rsi) # ES
+.LFT10: movq %rax,8(%rsi) # ES
movl %ds,%eax
-FLT11: movq %rax,(%rsi) # DS
+.LFT11: movq %rax,(%rsi) # DS
2: subq $16,%rsi
movq UREGS_r11+8(%rsp),%rax
-FLT12: movq %rax,8(%rsi) # R11
+.LFT12: movq %rax,8(%rsi) # R11
movq UREGS_rcx+8(%rsp),%rax
-FLT13: movq %rax,(%rsi) # RCX
+.LFT13: movq %rax,(%rsi) # RCX
/* Rewrite our stack frame and return to guest-OS mode. */
/* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
/* Also clear AC: alignment checks shouldn't trigger in kernel mode. */
@@ -308,12 +308,12 @@ FLT13: movq %rax,(%rsi)
movb $0,TRAPBOUNCE_flags(%rdx)
ret
.section __ex_table,"a"
- .quad FLT2,domain_crash_synchronous , FLT3,domain_crash_synchronous
- .quad FLT4,domain_crash_synchronous , FLT5,domain_crash_synchronous
- .quad FLT6,domain_crash_synchronous , FLT7,domain_crash_synchronous
- .quad FLT8,domain_crash_synchronous , FLT9,domain_crash_synchronous
- .quad FLT10,domain_crash_synchronous , FLT11,domain_crash_synchronous
- .quad FLT12,domain_crash_synchronous , FLT13,domain_crash_synchronous
+ .quad .LFT2,domain_crash_synchronous , .LFT3,domain_crash_synchronous
+ .quad .LFT4,domain_crash_synchronous , .LFT5,domain_crash_synchronous
+ .quad .LFT6,domain_crash_synchronous , .LFT7,domain_crash_synchronous
+ .quad .LFT8,domain_crash_synchronous , .LFT9,domain_crash_synchronous
+ .quad .LFT10,domain_crash_synchronous , .LFT11,domain_crash_synchronous
+ .quad .LFT12,domain_crash_synchronous , .LFT13,domain_crash_synchronous
.previous
domain_crash_synchronous_string:
_______________________________________________
Xen-devel mailing list
Xen-devel@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-devel
|