UefiCpuPkg/PiSmmCpuDxeSmm: Add paging protection.

PiSmmCpuDxeSmm consumes SmmAttributesTable and setup page table:
1) Code region is marked as read-only and Data region is non-executable,
if the PE image is 4K aligned.
2) Important data structure is set to RO, such as GDT/IDT.
3) SmmSaveState is set to non-executable,
and SmmEntrypoint is set to read-only.
4) If static page is supported, page table is read-only.

We use page table to protect other components, and itself.

If we use dynamic paging, we can still provide *partial* protection.
And hope page table is not modified by other components.

The XD enabling code is moved to SmiEntry to let NX take effect.

Cc: Jeff Fan <jeff.fan@intel.com>
Cc: Feng Tian <feng.tian@intel.com>
Cc: Star Zeng <star.zeng@intel.com>
Cc: Michael D Kinney <michael.d.kinney@intel.com>
Cc: Laszlo Ersek <lersek@redhat.com>
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Jiewen Yao <jiewen.yao@intel.com>
Tested-by: Laszlo Ersek <lersek@redhat.com>
Reviewed-by: Jeff Fan <jeff.fan@intel.com>
Reviewed-by: Michael D Kinney <michael.d.kinney@intel.com>
This commit is contained in:
Jiewen Yao
2016-10-23 23:19:52 +08:00
parent 28b020b5de
commit 717fb60443
25 changed files with 2042 additions and 775 deletions

View File

@@ -1,6 +1,6 @@
#------------------------------------------------------------------------------
#
# Copyright (c) 2009 - 2015, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -24,9 +24,13 @@ ASM_GLOBAL ASM_PFX(gcSmiHandlerSize)
ASM_GLOBAL ASM_PFX(gSmiCr3)
ASM_GLOBAL ASM_PFX(gSmiStack)
ASM_GLOBAL ASM_PFX(gSmbase)
ASM_GLOBAL ASM_PFX(mXdSupported)
ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
ASM_GLOBAL ASM_PFX(gSmiHandlerIdtr)
.equ MSR_EFER, 0xc0000080
.equ MSR_EFER_XD, 0x800
.equ DSC_OFFSET, 0xfb00
.equ DSC_GDTPTR, 0x30
.equ DSC_GDTSIZ, 0x38
@@ -122,8 +126,41 @@ L11:
orl $BIT10, %eax
L12: # as cr4.PGE is not set here, refresh cr3
movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.
cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
jz L5
# Load TSS
movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag
movl $TSS_SEGMENT, %eax
ltrw %ax
L5:
# enable NXE if supported
.byte 0xb0 # mov al, imm8
ASM_PFX(mXdSupported): .byte 1
cmpb $0, %al
jz SkipNxe
#
# Check XD disable bit
#
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
pushl %edx # save MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]
jz L13
andw $0x0FFFB, %dx # clear XD Disable bit if it is set
wrmsr
L13:
movl $MSR_EFER, %ecx
rdmsr
orw $MSR_EFER_XD,%ax # enable NXE
wrmsr
SkipNxe:
subl $4, %esp
NxeDone:
movl %cr0, %ebx
orl $0x080010000, %ebx # enable paging + WP
orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE
movl %ebx, %cr0
leal DSC_OFFSET(%edi),%ebx
movw DSC_DS(%ebx),%ax
@@ -135,35 +172,39 @@ L12: # as cr4.PGE is not set here, refresh
movw DSC_SS(%ebx),%ax
movl %eax, %ss
cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
jz L5
# Load TSS
movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag
movl $TSS_SEGMENT, %eax
ltrw %ax
L5:
# jmp _SmiHandler # instruction is not needed
_SmiHandler:
movl (%esp), %ebx
movl 4(%esp), %ebx
pushl %ebx
movl $ASM_PFX(CpuSmmDebugEntry), %eax
call *%eax
popl %ecx
addl $4, %esp
pushl %ebx
movl $ASM_PFX(SmiRendezvous), %eax
call *%eax
popl %ecx
addl $4, %esp
pushl %ebx
movl $ASM_PFX(CpuSmmDebugExit), %eax
call *%eax
popl %ecx
addl $4, %esp
movl $ASM_PFX(mXdSupported), %eax
movb (%eax), %al
cmpb $0, %al
jz L16
popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]
testl $BIT2, %edx
jz L16
movl $MSR_IA32_MISC_ENABLE, %ecx
rdmsr
orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM
wrmsr
L16:
rsm
ASM_PFX(gcSmiHandlerSize): .word . - _SmiEntryPoint