#PF is, in all "normal" usage models, the only potentially high frequency (and hence performance sensitive) exception. Thus make it the fall-through case into handle_exception (rather than divide_error for x86-32 and not having one at all for x86-64). Signed-off-by: Jan Beulich --- a/xen/arch/x86/x86_32/entry.S +++ b/xen/arch/x86/x86_32/entry.S @@ -438,9 +438,8 @@ ENTRY(ret_from_intr) jnz test_all_events jmp restore_all_xen -ENTRY(divide_error) - pushl $TRAP_divide_error<<16 - ALIGN +ENTRY(page_fault) + movw $TRAP_page_fault,2(%esp) handle_exception: FIXUP_RING0_GUEST_STACK SAVE_ALL(1f,2f) @@ -513,6 +512,10 @@ ENTRY(device_not_available) pushl $TRAP_no_device<<16 jmp handle_exception +ENTRY(divide_error) + pushl $TRAP_divide_error<<16 + jmp handle_exception + ENTRY(debug) pushl $TRAP_debug<<16 jmp handle_exception @@ -557,10 +560,6 @@ ENTRY(alignment_check) movw $TRAP_alignment_check,2(%esp) jmp handle_exception -ENTRY(page_fault) - movw $TRAP_page_fault,2(%esp) - jmp handle_exception - ENTRY(spurious_interrupt_bug) pushl $TRAP_spurious_int<<16 jmp handle_exception --- a/xen/arch/x86/x86_64/entry.S +++ b/xen/arch/x86/x86_64/entry.S @@ -474,9 +474,11 @@ ENTRY(ret_from_intr) jz test_all_events jmp compat_test_all_events - ALIGN +ENTRY(page_fault) + movl $TRAP_page_fault,4(%rsp) /* No special register assumptions. */ -ENTRY(handle_exception) + .globl handle_exception +handle_exception: SAVE_ALL handle_exception_saved: testb $X86_EFLAGS_IF>>8,UREGS_eflags+1(%rsp) @@ -598,10 +600,6 @@ ENTRY(alignment_check) movl $TRAP_alignment_check,4(%rsp) jmp handle_exception -ENTRY(page_fault) - movl $TRAP_page_fault,4(%rsp) - jmp handle_exception - ENTRY(spurious_interrupt_bug) pushq $0 movl $TRAP_spurious_int,4(%rsp)