1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 3 #ifndef CPU_X86_CACHE 4 #define CPU_X86_CACHE 5 6 #include <cpu/x86/cr.h> 7 8 #define CR0_CacheDisable (CR0_CD) 9 #define CR0_NoWriteThrough (CR0_NW) 10 11 #define CPUID_FEATURE_CLFLUSH_BIT 19 12 #define CPUID_FEATURE_SELF_SNOOP_BIT 27 13 14 #if !defined(__ASSEMBLER__) 15 16 #include <arch/cpuid.h> 17 #include <stdbool.h> 18 #include <stddef.h> 19 wbinvd(void)20static inline void wbinvd(void) 21 { 22 asm volatile ("wbinvd" ::: "memory"); 23 } 24 invd(void)25static inline void invd(void) 26 { 27 asm volatile("invd" ::: "memory"); 28 } 29 clflush(void * addr)30static inline void clflush(void *addr) 31 { 32 asm volatile ("clflush (%0)"::"r" (addr)); 33 } 34 35 bool clflush_supported(void); 36 void clflush_region(const uintptr_t start, const size_t size); 37 38 /* The following functions require the __always_inline due to AMD 39 * function STOP_CAR_AND_CPU that disables cache as 40 * RAM, the cache as RAM stack can no longer be used. Called 41 * functions must be inlined to avoid stack usage. Also, the 42 * compiler must keep local variables register based and not 43 * allocated them from the stack. With gcc 4.5.0, some functions 44 * declared as inline are not being inlined. This patch forces 45 * these functions to always be inlined by adding the qualifier 46 * __always_inline to their declaration. 47 */ enable_cache(void)48static __always_inline void enable_cache(void) 49 { 50 write_cr0(read_cr0() & ~(CR0_CD | CR0_NW)); 51 } 52 53 /* 54 * Cache flushing is the most time-consuming step when programming the MTRRs. 55 * However, if the processor supports cache self-snooping (ss), we can skip 56 * this step and save time. 57 */ self_snooping_supported(void)58static __always_inline bool self_snooping_supported(void) 59 { 60 return (cpuid_edx(1) >> CPUID_FEATURE_SELF_SNOOP_BIT) & 1; 61 } 62 disable_cache(void)63static __always_inline void disable_cache(void) 64 { 65 /* Disable and write back the cache */ 66 write_cr0(read_cr0() | CR0_CD); 67 if (!self_snooping_supported()) 68 wbinvd(); 69 } 70 71 #endif /* !__ASSEMBLER__ */ 72 #endif /* CPU_X86_CACHE */ 73