Save/Restore missing volatile registers (XMM0-5) save/restore in Page Fault handler.

Signed-off-by: Jeff Fan <jeff.fan@intel.com>
Reviewed-by: Jiewen Yao <jiewen.yao@intel.com>



git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@13974 6f19259b-4bc3-4df7-8a09-765794883524
This commit is contained in:
vanjeff
2012-11-28 04:49:48 +00:00
parent 72ccedbd76
commit 53c76a6478
2 changed files with 42 additions and 6 deletions

View File

@@ -1,6 +1,6 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2006 - 2011, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2006 - 2012, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -28,10 +28,28 @@ ASM_PFX(PageFaultHandlerHook):
pushq %r9
pushq %r10
pushq %r11
addq $-0x68, %rsp # reserve memory to store XMM registers and make address 16-byte alignment
movdqa %xmm0, 0(%rsp)
movdqa %xmm1, 0x10(%rsp)
movdqa %xmm2, 0x20(%rsp)
movdqa %xmm3, 0x30(%rsp)
movdqa %xmm4, 0x40(%rsp)
movdqa %xmm5, 0x50(%rsp)
addq $-0x20, %rsp
call ASM_PFX(PageFaultHandler)
addq $0x20, %rsp
testb %al, %al
movdqa 0(%rsp), %xmm0
movdqa 0x10(%rsp), %xmm1
movdqa 0x20(%rsp), %xmm2
movdqa 0x30(%rsp), %xmm3
movdqa 0x40(%rsp), %xmm4
movdqa 0x50(%rsp), %xmm5
addq $0x68, %rsp
testb %al, %al # set ZF flag
popq %r11
popq %r10
popq %r9
@@ -39,7 +57,7 @@ ASM_PFX(PageFaultHandlerHook):
popq %rdx
popq %rcx
popq %rax # restore all volatile registers
jnz L1
jnz L1 # check ZF flag
#ifdef __APPLE__
int $3
#else