1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 3 #include <console/console.h> 4 #include <cpu/amd/amd64_save_state.h> 5 #include <cpu/amd/msr.h> 6 #include <amdblocks/smm.h> 7 #include <cpu/cpu.h> 8 #include <cpu/x86/msr.h> 9 #include <stdint.h> 10 11 /* 12 * For data stored in TSEG, ensure TValid is clear so R/W access can reach 13 * the DRAM when not in SMM. 14 */ clear_tvalid(void)15void clear_tvalid(void) 16 { 17 msr_t hwcr = rdmsr(HWCR_MSR); 18 msr_t mask = rdmsr(SMM_MASK_MSR); 19 int tvalid = !!(mask.lo & SMM_TSEG_VALID); 20 21 if (hwcr.lo & SMM_LOCK) { 22 if (!tvalid) /* not valid but locked means still accessible */ 23 return; 24 25 printk(BIOS_ERR, "can't clear TValid, already locked\n"); 26 return; 27 } 28 29 mask.lo &= ~SMM_TSEG_VALID; 30 wrmsr(SMM_MASK_MSR, mask); 31 } 32 tseg_valid(void)33void tseg_valid(void) 34 { 35 msr_t mask = rdmsr(SMM_MASK_MSR); 36 mask.lo |= SMM_TSEG_VALID; 37 38 wrmsr(SMM_MASK_MSR, mask); 39 } 40 is_smm_locked(void)41bool is_smm_locked(void) 42 { 43 msr_t hwcr = rdmsr(HWCR_MSR); 44 return hwcr.lo & SMM_LOCK ? true : false; 45 } 46 lock_smm(void)47void lock_smm(void) 48 { 49 msr_t hwcr = rdmsr(HWCR_MSR); 50 hwcr.lo |= SMM_LOCK; 51 wrmsr(HWCR_MSR, hwcr); 52 } 53