WARNING - OLD ARCHIVES

This is an archived copy of the Xen.org mailing list, which we have preserved to ensure that existing links to archives are not broken. The live archive, which contains the latest emails, can be found at http://lists.xen.org/
   
 
 
Xen 
 
Home Products Support Community News
 
   
 

xen-changelog

[Xen-changelog] [xen-unstable] x86: hide assembly labels

To: xen-changelog@xxxxxxxxxxxxxxxxxxx
Subject: [Xen-changelog] [xen-unstable] x86: hide assembly labels
From: Xen patchbot-unstable <patchbot-unstable@xxxxxxxxxxxxxxxxxxx>
Date: Fri, 09 Feb 2007 09:41:01 -0800
Delivery-date: Fri, 09 Feb 2007 10:37:10 -0800
Envelope-to: www-data@xxxxxxxxxxxxxxxxxx
List-help: <mailto:xen-changelog-request@lists.xensource.com?subject=help>
List-id: BK change log <xen-changelog.lists.xensource.com>
List-post: <mailto:xen-changelog@lists.xensource.com>
List-subscribe: <http://lists.xensource.com/cgi-bin/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=subscribe>
List-unsubscribe: <http://lists.xensource.com/cgi-bin/mailman/listinfo/xen-changelog>, <mailto:xen-changelog-request@lists.xensource.com?subject=unsubscribe>
Reply-to: xen-devel@xxxxxxxxxxxxxxxxxxx
Sender: xen-changelog-bounces@xxxxxxxxxxxxxxxxxxx
# HG changeset patch
# User Keir Fraser <keir@xxxxxxxxxxxxx>
# Date 1170958736 0
# Node ID 3050c8339da62896a7d3af07e6095a07545e486b
# Parent  0cbf1586a4325cceef93816fdd0353bd23e74e46
x86: hide assembly labels

Prevent the fault recovery labels to clutter the symbol table and the
disassembly.

Signed-off-by: Jan Beulich <jbeulich@xxxxxxxxxx>
---
 xen/arch/x86/x86_32/entry.S        |  107 +++++++++++++++++++------------------
 xen/arch/x86/x86_64/compat/entry.S |   64 +++++++++++-----------
 xen/arch/x86/x86_64/entry.S        |   48 ++++++++--------
 3 files changed, 111 insertions(+), 108 deletions(-)

diff -r 0cbf1586a432 -r 3050c8339da6 xen/arch/x86/x86_32/entry.S
--- a/xen/arch/x86/x86_32/entry.S       Thu Feb 08 18:01:42 2007 +0000
+++ b/xen/arch/x86/x86_32/entry.S       Thu Feb 08 18:18:56 2007 +0000
@@ -84,10 +84,10 @@ restore_all_guest:
         jmp   restore_all_vm86
 1:
 #endif
-FLT1:   mov  UREGS_ds(%esp),%ds
-FLT2:   mov  UREGS_es(%esp),%es
-FLT3:   mov  UREGS_fs(%esp),%fs
-FLT4:   mov  UREGS_gs(%esp),%gs
+.Lft1:  mov  UREGS_ds(%esp),%ds
+.Lft2:  mov  UREGS_es(%esp),%es
+.Lft3:  mov  UREGS_fs(%esp),%fs
+.Lft4:  mov  UREGS_gs(%esp),%gs
 restore_all_vm86:
         popl %ebx
         popl %ecx
@@ -97,9 +97,9 @@ restore_all_vm86:
         popl %ebp
         popl %eax
         addl $4,%esp
-FLT5:   iret
+.Lft5:  iret
 .section .fixup,"ax"
-FIX5:   subl  $28,%esp
+.Lfx5:  subl  $28,%esp
         pushl 28(%esp)                 # error_code/entry_vector
         movl  %eax,UREGS_eax+4(%esp)
         movl  %ebp,UREGS_ebp+4(%esp)
@@ -108,7 +108,7 @@ FIX5:   subl  $28,%esp
         movl  %edx,UREGS_edx+4(%esp)
         movl  %ecx,UREGS_ecx+4(%esp)
         movl  %ebx,UREGS_ebx+4(%esp)
-FIX1:   SET_XEN_SEGMENTS(a)
+.Lfx1:  SET_XEN_SEGMENTS(a)
         movl  %eax,%fs
         movl  %eax,%gs
         sti
@@ -116,11 +116,11 @@ FIX1:   SET_XEN_SEGMENTS(a)
         pushfl                         # EFLAGS
         movl  $__HYPERVISOR_CS,%eax
         pushl %eax                     # CS
-        movl  $DBLFLT1,%eax
+        movl  $.Ldf1,%eax
         pushl %eax                     # EIP
         pushl %esi                     # error_code/entry_vector
         jmp   handle_exception
-DBLFLT1:GET_CURRENT(%ebx)
+.Ldf1:  GET_CURRENT(%ebx)
         jmp   test_all_events
 failsafe_callback:
         GET_CURRENT(%ebx)
@@ -142,14 +142,14 @@ 1:      call  create_bounce_frame
         jmp   test_all_events
 .previous
 .section __pre_ex_table,"a"
-        .long FLT1,FIX1
-        .long FLT2,FIX1
-        .long FLT3,FIX1
-        .long FLT4,FIX1
-        .long FLT5,FIX5
+        .long .Lft1,.Lfx1
+        .long .Lft2,.Lfx1
+        .long .Lft3,.Lfx1
+        .long .Lft4,.Lfx1
+        .long .Lft5,.Lfx5
 .previous
 .section __ex_table,"a"
-        .long DBLFLT1,failsafe_callback
+        .long .Ldf1,failsafe_callback
 .previous
 
         ALIGN
@@ -288,32 +288,33 @@ create_bounce_frame:
         testl $(2|X86_EFLAGS_VM),%ecx
         jz   ring1 /* jump if returning to an existing ring-1 activation */
         movl VCPU_kernel_sp(%ebx),%esi
-FLT6:   mov  VCPU_kernel_ss(%ebx),%gs
+.Lft6:  mov  VCPU_kernel_ss(%ebx),%gs
         testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
-        jz   nvm86_1
+        jz   .Lnvm86_1
         subl $16,%esi       /* push ES/DS/FS/GS (VM86 stack frame) */
         movl UREGS_es+4(%esp),%eax
-FLT7:   movl %eax,%gs:(%esi)
+.Lft7:  movl %eax,%gs:(%esi)
         movl UREGS_ds+4(%esp),%eax
-FLT8:   movl %eax,%gs:4(%esi)
+.Lft8:  movl %eax,%gs:4(%esi)
         movl UREGS_fs+4(%esp),%eax
-FLT9:   movl %eax,%gs:8(%esi)
+.Lft9:  movl %eax,%gs:8(%esi)
         movl UREGS_gs+4(%esp),%eax
-FLT10:  movl %eax,%gs:12(%esi)
-nvm86_1:subl $8,%esi        /* push SS/ESP (inter-priv iret) */
+.Lft10: movl %eax,%gs:12(%esi)
+.Lnvm86_1:
+        subl $8,%esi        /* push SS/ESP (inter-priv iret) */
         movl UREGS_esp+4(%esp),%eax
-FLT11:  movl %eax,%gs:(%esi) 
+.Lft11: movl %eax,%gs:(%esi)
         movl UREGS_ss+4(%esp),%eax
-FLT12:  movl %eax,%gs:4(%esi) 
+.Lft12: movl %eax,%gs:4(%esi)
         jmp 1f
 ring1:  /* obtain ss/esp from oldss/oldesp -- a ring-1 activation exists */
         movl UREGS_esp+4(%esp),%esi
-FLT13:  mov  UREGS_ss+4(%esp),%gs 
+.Lft13: mov  UREGS_ss+4(%esp),%gs
 1:      /* Construct a stack frame: EFLAGS, CS/EIP */
         movb TRAPBOUNCE_flags(%edx),%cl
         subl $12,%esi
         movl UREGS_eip+4(%esp),%eax
-FLT14:  movl %eax,%gs:(%esi) 
+.Lft14: movl %eax,%gs:(%esi)
         movl VCPU_vcpu_info(%ebx),%eax
         pushl VCPUINFO_upcall_mask(%eax)
         testb $TBF_INTERRUPT,%cl
@@ -324,49 +325,51 @@ FLT14:  movl %eax,%gs:(%esi)
         movw UREGS_cs+4(%esp),%ax        # Bits  0-15: CS
 #ifdef CONFIG_X86_SUPERVISOR_MODE_KERNEL
         testw $2,%ax
-        jnz  FLT15
+        jnz  .Lft15
         and  $~3,%ax                     # RPL 1 -> RPL 0
 #endif
-FLT15:  movl %eax,%gs:4(%esi) 
+.Lft15: movl %eax,%gs:4(%esi)
         test $0x00FF0000,%eax            # Bits 16-23: saved_upcall_mask
         setz %ch                         # %ch == !saved_upcall_mask
         movl UREGS_eflags+4(%esp),%eax
         andl $~X86_EFLAGS_IF,%eax
         shlb $1,%ch                      # Bit 9 (EFLAGS.IF)
         orb  %ch,%ah                     # Fold EFLAGS.IF into %eax
-FLT16:  movl %eax,%gs:8(%esi)
+.Lft16: movl %eax,%gs:8(%esi)
         test $TBF_EXCEPTION_ERRCODE,%cl
         jz   1f
         subl $4,%esi                    # push error_code onto guest frame
         movl TRAPBOUNCE_error_code(%edx),%eax
-FLT17:  movl %eax,%gs:(%esi)
+.Lft17: movl %eax,%gs:(%esi)
 1:      testb $TBF_FAILSAFE,%cl
         jz   2f
         subl $16,%esi                # add DS/ES/FS/GS to failsafe stack frame
         testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
-        jz   nvm86_2
+        jz   .Lnvm86_2
         xorl %eax,%eax               # VM86: we write zero selector values
-FLT18:  movl %eax,%gs:(%esi) 
-FLT19:  movl %eax,%gs:4(%esi)
-FLT20:  movl %eax,%gs:8(%esi) 
-FLT21:  movl %eax,%gs:12(%esi)
+.Lft18: movl %eax,%gs:(%esi)
+.Lft19: movl %eax,%gs:4(%esi)
+.Lft20: movl %eax,%gs:8(%esi)
+.Lft21: movl %eax,%gs:12(%esi)
         jmp  2f
-nvm86_2:movl UREGS_ds+4(%esp),%eax   # non-VM86: write real selector values
-FLT22:  movl %eax,%gs:(%esi) 
+.Lnvm86_2:
+        movl UREGS_ds+4(%esp),%eax   # non-VM86: write real selector values
+.Lft22: movl %eax,%gs:(%esi)
         movl UREGS_es+4(%esp),%eax
-FLT23:  movl %eax,%gs:4(%esi)
+.Lft23: movl %eax,%gs:4(%esi)
         movl UREGS_fs+4(%esp),%eax
-FLT24:  movl %eax,%gs:8(%esi) 
+.Lft24: movl %eax,%gs:8(%esi)
         movl UREGS_gs+4(%esp),%eax
-FLT25:  movl %eax,%gs:12(%esi)
+.Lft25: movl %eax,%gs:12(%esi)
 2:      testl $X86_EFLAGS_VM,UREGS_eflags+4(%esp)
-        jz   nvm86_3
+        jz   .Lnvm86_3
         xorl %eax,%eax      /* zero DS-GS, just as a real CPU would */
         movl %eax,UREGS_ds+4(%esp)
         movl %eax,UREGS_es+4(%esp)
         movl %eax,UREGS_fs+4(%esp)
         movl %eax,UREGS_gs+4(%esp)
-nvm86_3:/* Rewrite our stack frame and return to ring 1. */
+.Lnvm86_3:
+        /* Rewrite our stack frame and return to ring 1. */
         /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
         andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
                 X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+4(%esp)
@@ -382,16 +385,16 @@ nvm86_3:/* Rewrite our stack frame and r
         movb $0,TRAPBOUNCE_flags(%edx)
         ret
 .section __ex_table,"a"
-        .long  FLT6,domain_crash_synchronous ,  FLT7,domain_crash_synchronous
-        .long  FLT8,domain_crash_synchronous ,  FLT9,domain_crash_synchronous
-        .long FLT10,domain_crash_synchronous , FLT11,domain_crash_synchronous
-        .long FLT12,domain_crash_synchronous , FLT13,domain_crash_synchronous
-        .long FLT14,domain_crash_synchronous , FLT15,domain_crash_synchronous
-        .long FLT16,domain_crash_synchronous , FLT17,domain_crash_synchronous
-        .long FLT18,domain_crash_synchronous , FLT19,domain_crash_synchronous
-        .long FLT20,domain_crash_synchronous , FLT21,domain_crash_synchronous
-        .long FLT22,domain_crash_synchronous , FLT23,domain_crash_synchronous
-        .long FLT24,domain_crash_synchronous , FLT25,domain_crash_synchronous
+        .long  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
+        .long  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
+        .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
+        .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
+        .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
+        .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
+        .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
+        .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
+        .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
+        .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
 .previous
 
 domain_crash_synchronous_string:
diff -r 0cbf1586a432 -r 3050c8339da6 xen/arch/x86/x86_64/compat/entry.S
--- a/xen/arch/x86/x86_64/compat/entry.S        Thu Feb 08 18:01:42 2007 +0000
+++ b/xen/arch/x86/x86_64/compat/entry.S        Thu Feb 08 18:18:56 2007 +0000
@@ -123,10 +123,10 @@ compat_restore_all_guest:
 compat_restore_all_guest:
         RESTORE_ALL
         addq  $8,%rsp
-CFLT0:  iretq
+.Lft0:  iretq
 
 .section .fixup,"ax"
-CFIX0:  popq  -15*8-8(%rsp)            # error_code/entry_vector
+.Lfx0:  popq  -15*8-8(%rsp)            # error_code/entry_vector
         SAVE_ALL                       # 15*8 bytes pushed
         movq  -8(%rsp),%rsi            # error_code/entry_vector
         sti                            # after stack abuse (-1024(%rsp))
@@ -135,11 +135,11 @@ CFIX0:  popq  -15*8-8(%rsp)            #
         pushq %rax                     # RSP
         pushfq                         # RFLAGS
         pushq $__HYPERVISOR_CS         # CS
-        leaq  CDBLFLT0(%rip),%rax
+        leaq  .Ldf0(%rip),%rax
         pushq %rax                     # RIP
         pushq %rsi                     # error_code/entry_vector
         jmp   handle_exception
-CDBLFLT0:GET_CURRENT(%rbx)
+.Ldf0:  GET_CURRENT(%rbx)
         jmp   compat_test_all_events
 compat_failsafe_callback:
         GET_CURRENT(%rbx)
@@ -157,10 +157,10 @@ 1:
         jmp   compat_test_all_events
 .previous
 .section __pre_ex_table,"a"
-       .quad CFLT0,CFIX0
+       .quad .Lft0,.Lfx0
 .previous
 .section __ex_table,"a"
-        .quad CDBLFLT0,compat_failsafe_callback
+        .quad .Ldf0,compat_failsafe_callback
 .previous
 
 /* %rdx: trap_bounce, %rbx: struct vcpu */
@@ -180,16 +180,16 @@ compat_create_bounce_frame:
         jz    1f
         /* Push new frame at registered guest-OS stack base. */
         movl  VCPU_kernel_sp(%rbx),%esi
-CFLT1:  mov   VCPU_kernel_ss(%rbx),%fs
+.Lft1:  mov   VCPU_kernel_ss(%rbx),%fs
         subl  $2*4,%esi
         movl  UREGS_rsp+8(%rsp),%eax
-CFLT2:  movl  %eax,%fs:(%rsi)
+.Lft2:  movl  %eax,%fs:(%rsi)
         movl  UREGS_ss+8(%rsp),%eax
-CFLT3:  movl  %eax,%fs:4(%rsi)
+.Lft3:  movl  %eax,%fs:4(%rsi)
         jmp   2f
 1:      /* In kernel context already: push new frame at existing %rsp. */
         movl  UREGS_rsp+8(%rsp),%esi
-CFLT4:  mov   UREGS_ss+8(%rsp),%fs
+.Lft4:  mov   UREGS_ss+8(%rsp),%fs
 2:
         movb  TRAPBOUNCE_flags(%rdx),%cl
         subl  $3*4,%esi
@@ -201,7 +201,7 @@ 2:
         popq  %rax
         shll  $16,%eax                  # Bits 16-23: saved_upcall_mask
         movw  UREGS_cs+8(%rsp),%ax      # Bits  0-15: CS
-CFLT5:  movl  %eax,%fs:4(%rsi)          # CS / saved_upcall_mask
+.Lft5:  movl  %eax,%fs:4(%rsi)          # CS / saved_upcall_mask
         shrl  $16,%eax
         testb %al,%al                   # Bits 0-7: saved_upcall_mask
         setz  %ch                       # %ch == !saved_upcall_mask
@@ -209,25 +209,25 @@ CFLT5:  movl  %eax,%fs:4(%rsi)          
         andl  $~X86_EFLAGS_IF,%eax
         shlb  $1,%ch                    # Bit 9 (EFLAGS.IF)
         orb   %ch,%ah                   # Fold EFLAGS.IF into %eax
-CFLT6:  movl  %eax,%fs:2*4(%rsi)        # EFLAGS
+.Lft6:  movl  %eax,%fs:2*4(%rsi)        # EFLAGS
         movl  UREGS_rip+8(%rsp),%eax
-CFLT7:  movl  %eax,%fs:(%rsi)           # EIP
+.Lft7:  movl  %eax,%fs:(%rsi)           # EIP
         testb $TBF_EXCEPTION_ERRCODE,%cl
         jz    1f
         subl  $4,%esi
         movl  TRAPBOUNCE_error_code(%rdx),%eax
-CFLT8:  movl  %eax,%fs:(%rsi)           # ERROR CODE
+.Lft8:  movl  %eax,%fs:(%rsi)           # ERROR CODE
 1:
         testb $TBF_FAILSAFE,%cl
         jz    2f
         subl  $4*4,%esi
         movl  %gs,%eax
-CFLT9:  movl  %eax,%fs:3*4(%rsi)        # GS
-CFLT10: movl  %edi,%fs:2*4(%rsi)        # FS
+.Lft9:  movl  %eax,%fs:3*4(%rsi)        # GS
+.Lft10: movl  %edi,%fs:2*4(%rsi)        # FS
         movl  %es,%eax
-CFLT11: movl  %eax,%fs:1*4(%rsi)        # ES
+.Lft11: movl  %eax,%fs:1*4(%rsi)        # ES
         movl  %ds,%eax
-CFLT12: movl  %eax,%fs:0*4(%rsi)        # DS
+.Lft12: movl  %eax,%fs:0*4(%rsi)        # DS
 2:
         /* Rewrite our stack frame and return to guest-OS mode. */
         /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
@@ -236,7 +236,7 @@ 2:
                  X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
         mov   %fs,UREGS_ss+8(%rsp)
         movl  %esi,UREGS_rsp+8(%rsp)
-CFLT13: mov   %edi,%fs
+.Lft13: mov   %edi,%fs
         movzwl TRAPBOUNCE_cs(%rdx),%eax
         /* Null selectors (0-3) are not allowed. */
         testl $~3,%eax
@@ -247,18 +247,18 @@ CFLT13: mov   %edi,%fs
         movb  $0,TRAPBOUNCE_flags(%rdx)
         ret
 .section .fixup,"ax"
-CFIX13:
+.Lfx13:
         xorl  %edi,%edi
-        jmp   CFLT13
+        jmp   .Lft13
 .previous
 .section __ex_table,"a"
-        .quad  CFLT1,domain_crash_synchronous  ,  CFLT2,compat_crash_page_fault
-        .quad  CFLT3,compat_crash_page_fault_4 ,  
CFLT4,domain_crash_synchronous
-        .quad  CFLT5,compat_crash_page_fault_4 ,  
CFLT6,compat_crash_page_fault_8
-        .quad  CFLT7,compat_crash_page_fault   ,  CFLT8,compat_crash_page_fault
-        .quad  CFLT9,compat_crash_page_fault_12, 
CFLT10,compat_crash_page_fault_8
-        .quad CFLT11,compat_crash_page_fault_4 , CFLT12,compat_crash_page_fault
-        .quad CFLT13,CFIX13
+        .quad  .Lft1,domain_crash_synchronous  ,  .Lft2,compat_crash_page_fault
+        .quad  .Lft3,compat_crash_page_fault_4 ,  
.Lft4,domain_crash_synchronous
+        .quad  .Lft5,compat_crash_page_fault_4 ,  
.Lft6,compat_crash_page_fault_8
+        .quad  .Lft7,compat_crash_page_fault   ,  .Lft8,compat_crash_page_fault
+        .quad  .Lft9,compat_crash_page_fault_12, 
.Lft10,compat_crash_page_fault_8
+        .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
+        .quad .Lft13,.Lfx13
 .previous
 
 compat_crash_page_fault_12:
@@ -268,17 +268,17 @@ compat_crash_page_fault_4:
 compat_crash_page_fault_4:
         addl  $4,%esi
 compat_crash_page_fault:
-CFLT14: mov   %edi,%fs
+.Lft14: mov   %edi,%fs
         movl  %esi,%edi
         call  show_page_walk
         jmp   domain_crash_synchronous
 .section .fixup,"ax"
-CFIX14:
+.Lfx14:
         xorl  %edi,%edi
-        jmp   CFLT14
+        jmp   .Lft14
 .previous
 .section __ex_table,"a"
-        .quad CFLT14,CFIX14
+        .quad .Lft14,.Lfx14
 .previous
 
 .section .rodata, "a", @progbits
diff -r 0cbf1586a432 -r 3050c8339da6 xen/arch/x86/x86_64/entry.S
--- a/xen/arch/x86/x86_64/entry.S       Thu Feb 08 18:01:42 2007 +0000
+++ b/xen/arch/x86/x86_64/entry.S       Thu Feb 08 18:18:56 2007 +0000
@@ -56,10 +56,10 @@ 1:      sysretl
 /* No special register assumptions. */
 iret_exit_to_guest:
         addq  $8,%rsp
-FLT1:   iretq
+.Lft1:  iretq
 
 .section .fixup,"ax"
-FIX1:   popq  -15*8-8(%rsp)            # error_code/entry_vector
+.Lfx1:  popq  -15*8-8(%rsp)            # error_code/entry_vector
         SAVE_ALL                       # 15*8 bytes pushed
         movq  -8(%rsp),%rsi            # error_code/entry_vector
         sti                            # after stack abuse (-1024(%rsp))
@@ -68,11 +68,11 @@ FIX1:   popq  -15*8-8(%rsp)            #
         pushq %rax                     # RSP
         pushf                          # RFLAGS
         pushq $__HYPERVISOR_CS         # CS
-        leaq  DBLFLT1(%rip),%rax
+        leaq  .Ldf1(%rip),%rax
         pushq %rax                     # RIP
         pushq %rsi                     # error_code/entry_vector
         jmp   handle_exception
-DBLFLT1:GET_CURRENT(%rbx)
+.Ldf1:  GET_CURRENT(%rbx)
         jmp   test_all_events
 failsafe_callback:
         GET_CURRENT(%rbx)
@@ -87,10 +87,10 @@ 1:      call  create_bounce_frame
         jmp   test_all_events
 .previous
 .section __pre_ex_table,"a"
-        .quad FLT1,FIX1
+        .quad .Lft1,.Lfx1
 .previous
 .section __ex_table,"a"
-        .quad DBLFLT1,failsafe_callback
+        .quad .Ldf1,failsafe_callback
 .previous
 
         ALIGN
@@ -249,9 +249,9 @@ 1:      movb  TRAPBOUNCE_flags(%rdx),%cl
 1:      movb  TRAPBOUNCE_flags(%rdx),%cl
         subq  $40,%rsi
         movq  UREGS_ss+8(%rsp),%rax
-FLT2:   movq  %rax,32(%rsi)             # SS
+.Lft2:  movq  %rax,32(%rsi)             # SS
         movq  UREGS_rsp+8(%rsp),%rax
-FLT3:   movq  %rax,24(%rsi)             # RSP
+.Lft3:  movq  %rax,24(%rsi)             # RSP
         movq  VCPU_vcpu_info(%rbx),%rax
         pushq VCPUINFO_upcall_mask(%rax)
         testb $TBF_INTERRUPT,%cl
@@ -260,7 +260,7 @@ FLT3:   movq  %rax,24(%rsi)             
         popq  %rax
         shlq  $32,%rax                  # Bits 32-39: saved_upcall_mask
         movw  UREGS_cs+8(%rsp),%ax      # Bits  0-15: CS
-FLT4:   movq  %rax,8(%rsi)              # CS / saved_upcall_mask
+.Lft4:  movq  %rax,8(%rsi)              # CS / saved_upcall_mask
         shrq  $32,%rax
         testb $0xFF,%al                 # Bits 0-7: saved_upcall_mask
         setz  %ch                       # %ch == !saved_upcall_mask
@@ -268,30 +268,30 @@ FLT4:   movq  %rax,8(%rsi)              
         andq  $~X86_EFLAGS_IF,%rax
         shlb  $1,%ch                    # Bit 9 (EFLAGS.IF)
         orb   %ch,%ah                   # Fold EFLAGS.IF into %eax
-FLT5:   movq  %rax,16(%rsi)             # RFLAGS
+.Lft5:  movq  %rax,16(%rsi)             # RFLAGS
         movq  UREGS_rip+8(%rsp),%rax
-FLT6:   movq  %rax,(%rsi)               # RIP
+.Lft6:  movq  %rax,(%rsi)               # RIP
         testb $TBF_EXCEPTION_ERRCODE,%cl
         jz    1f
         subq  $8,%rsi
         movl  TRAPBOUNCE_error_code(%rdx),%eax
-FLT7:   movq  %rax,(%rsi)               # ERROR CODE
+.Lft7:  movq  %rax,(%rsi)               # ERROR CODE
 1:      testb $TBF_FAILSAFE,%cl
         jz    2f
         subq  $32,%rsi
         movl  %gs,%eax
-FLT8:   movq  %rax,24(%rsi)             # GS
+.Lft8:  movq  %rax,24(%rsi)             # GS
         movl  %fs,%eax
-FLT9:   movq  %rax,16(%rsi)             # FS
+.Lft9:  movq  %rax,16(%rsi)             # FS
         movl  %es,%eax
-FLT10:  movq  %rax,8(%rsi)              # ES
+.Lft10: movq  %rax,8(%rsi)              # ES
         movl  %ds,%eax
-FLT11:  movq  %rax,(%rsi)               # DS
+.Lft11: movq  %rax,(%rsi)               # DS
 2:      subq  $16,%rsi
         movq  UREGS_r11+8(%rsp),%rax
-FLT12:  movq  %rax,8(%rsi)              # R11
+.Lft12: movq  %rax,8(%rsi)              # R11
         movq  UREGS_rcx+8(%rsp),%rax
-FLT13:  movq  %rax,(%rsi)               # RCX
+.Lft13: movq  %rax,(%rsi)               # RCX
         /* Rewrite our stack frame and return to guest-OS mode. */
         /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
         /* Also clear AC: alignment checks shouldn't trigger in kernel mode. */
@@ -308,12 +308,12 @@ FLT13:  movq  %rax,(%rsi)               
         movb  $0,TRAPBOUNCE_flags(%rdx)
         ret
 .section __ex_table,"a"
-        .quad  FLT2,domain_crash_synchronous ,  FLT3,domain_crash_synchronous
-        .quad  FLT4,domain_crash_synchronous ,  FLT5,domain_crash_synchronous
-        .quad  FLT6,domain_crash_synchronous ,  FLT7,domain_crash_synchronous
-        .quad  FLT8,domain_crash_synchronous ,  FLT9,domain_crash_synchronous
-        .quad FLT10,domain_crash_synchronous , FLT11,domain_crash_synchronous
-        .quad FLT12,domain_crash_synchronous , FLT13,domain_crash_synchronous
+        .quad  .Lft2,domain_crash_synchronous ,  .Lft3,domain_crash_synchronous
+        .quad  .Lft4,domain_crash_synchronous ,  .Lft5,domain_crash_synchronous
+        .quad  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
+        .quad  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
+        .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
+        .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
 .previous
 
 domain_crash_synchronous_string:

_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog

<Prev in Thread] Current Thread [Next in Thread>
  • [Xen-changelog] [xen-unstable] x86: hide assembly labels, Xen patchbot-unstable <=